diff --git a/.backportrc.json b/.backportrc.json index 03f3f892f9227..20287f0bfc0e6 100644 --- a/.backportrc.json +++ b/.backportrc.json @@ -1,10 +1,10 @@ { "upstream" : "elastic/elasticsearch", - "targetBranchChoices" : [ "main", "8.x", "8.16", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], + "targetBranchChoices" : [ "main", "8.x", "8.17", "8.16", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], "targetPRLabels" : [ "backport" ], "branchLabelMapping" : { "^v9.0.0$" : "main", - "^v8.17.0$" : "8.x", + "^v8.18.0$" : "8.x", "^v(\\d+).(\\d+).\\d+(?:-(?:alpha|beta|rc)\\d+)?$" : "$1.$2" } } diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 19e99852869e6..5be5990cfb203 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.16.1", "8.17.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.template.yml b/.buildkite/pipelines/periodic-packaging.template.yml index 081d059460653..1a1e46d55f7a4 100644 --- a/.buildkite/pipelines/periodic-packaging.template.yml +++ b/.buildkite/pipelines/periodic-packaging.template.yml @@ -8,6 +8,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 7dd8269f4ffe6..a49e486176484 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -9,6 +9,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 @@ -288,8 +289,8 @@ steps: env: BWC_VERSION: 8.15.4 - - label: "{{matrix.image}} / 8.16.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.1 + - label: "{{matrix.image}} / 8.16.2 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.2 timeout_in_minutes: 300 matrix: setup: @@ -302,7 +303,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.16.1 + BWC_VERSION: 8.16.2 - label: "{{matrix.image}} / 8.17.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.0 @@ -320,6 +321,22 @@ steps: env: BWC_VERSION: 8.17.0 + - label: "{{matrix.image}} / 8.18.0 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.18.0 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 8.18.0 + - label: "{{matrix.image}} / 9.0.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v9.0.0 timeout_in_minutes: 300 diff --git a/.buildkite/pipelines/periodic-platform-support.yml b/.buildkite/pipelines/periodic-platform-support.yml index f9f75488f0917..79e5a2e8dcdbb 100644 --- a/.buildkite/pipelines/periodic-platform-support.yml +++ b/.buildkite/pipelines/periodic-platform-support.yml @@ -8,6 +8,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 79371d6ddccf5..aa1db893df8cc 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -306,8 +306,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.16.1 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.16.1#bwcTest + - label: 8.16.2 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.16.2#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -316,7 +316,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.16.1 + BWC_VERSION: 8.16.2 retry: automatic: - exit_status: "-1" @@ -344,6 +344,25 @@ steps: - signal_reason: agent_stop limit: 3 + - label: 8.18.0 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.18.0#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.18.0 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + - label: 9.0.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v9.0.0#bwcTest timeout_in_minutes: 300 @@ -429,7 +448,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk21 - BWC_VERSION: ["8.16.1", "8.17.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -471,7 +490,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.16.1", "8.17.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml index 04ccc41891b3b..ffc1350aceab3 100644 --- a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml +++ b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml @@ -3,68 +3,15 @@ config: steps: - group: packaging-tests-unix steps: - - label: "{{matrix.image}} / docker / packaging-tests-unix" - key: "packaging-tests-unix-docker" - command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.docker-cloud-ess - timeout_in_minutes: 300 - matrix: - setup: - image: - - debian-11 - - opensuse-leap-15 - - oraclelinux-7 - - oraclelinux-8 - - sles-12 - - sles-15 - - ubuntu-1804 - - ubuntu-2004 - - ubuntu-2204 - - rocky-8 - - rocky-9 - - rhel-7 - - rhel-8 - - rhel-9 - - almalinux-8 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - diskSizeGb: 350 - machineType: custom-16-32768 - - label: "{{matrix.image}} / packages / packaging-tests-unix" - key: "packaging-tests-unix-packages" - command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.packages - timeout_in_minutes: 300 - matrix: - setup: - image: - - debian-11 - - opensuse-leap-15 - - oraclelinux-7 - - oraclelinux-8 - - sles-12 - - sles-15 - - ubuntu-1804 - - ubuntu-2004 - - ubuntu-2204 - - rocky-8 - - rocky-9 - - rhel-7 - - rhel-8 - - rhel-9 - - almalinux-8 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - diskSizeGb: 350 - machineType: custom-16-32768 - - label: "{{matrix.image}} / archives / packaging-tests-unix" - key: "packaging-tests-unix-archives" - command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.archives + - label: "{{matrix.image}} / {{matrix.PACKAGING_TASK}} / packaging-tests-unix" + key: "packaging-tests-unix" + command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.{{matrix.PACKAGING_TASK}} timeout_in_minutes: 300 matrix: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 @@ -79,6 +26,11 @@ steps: - rhel-8 - rhel-9 - almalinux-8 + PACKAGING_TASK: + - docker + - docker-cloud-ess + - packages + - archives agents: provider: gcp image: family/elasticsearch-{{matrix.image}} diff --git a/.buildkite/scripts/dra-workflow.sh b/.buildkite/scripts/dra-workflow.sh index 81b8225e443a4..f2dc40ca1927f 100755 --- a/.buildkite/scripts/dra-workflow.sh +++ b/.buildkite/scripts/dra-workflow.sh @@ -6,7 +6,7 @@ WORKFLOW="${DRA_WORKFLOW:-snapshot}" BRANCH="${BUILDKITE_BRANCH:-}" # Don't publish main branch to staging -if [[ "$BRANCH" == "main" && "$WORKFLOW" == "staging" ]]; then +if [[ ("$BRANCH" == "main" || "$BRANCH" == *.x) && "$WORKFLOW" == "staging" ]]; then exit 0 fi diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 85522e47a523f..a8d6dda4fb0c2 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -15,6 +15,7 @@ BWC_VERSION: - "8.13.4" - "8.14.3" - "8.15.4" - - "8.16.1" + - "8.16.2" - "8.17.0" + - "8.18.0" - "9.0.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 9ea3072021bb3..5514fc376a285 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,4 +1,5 @@ BWC_VERSION: - - "8.16.1" + - "8.16.2" - "8.17.0" + - "8.18.0" - "9.0.0" diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java index 3790be5f279d1..d44586ef4901a 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.script.DocReader; @@ -76,8 +77,7 @@ public class ScriptScoreBenchmark { private final PluginsService pluginsService = new PluginsService( Settings.EMPTY, null, - null, - Path.of(System.getProperty("plugins.dir")) + new PluginsLoader(null, Path.of(System.getProperty("plugins.dir"))) ); private final ScriptModule scriptModule = new ScriptModule(Settings.EMPTY, pluginsService.filterPlugins(ScriptPlugin.class).toList()); diff --git a/branches.json b/branches.json index e81d511a88458..0e23a795664dd 100644 --- a/branches.json +++ b/branches.json @@ -7,6 +7,9 @@ { "branch": "8.16" }, + { + "branch": "8.17" + }, { "branch": "8.x" }, diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java index d19a1d492d9ed..c3124812e5089 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java @@ -17,11 +17,9 @@ import org.elasticsearch.gradle.internal.conventions.info.GitInfo; import org.elasticsearch.gradle.internal.conventions.precommit.PomValidationPrecommitPlugin; import org.elasticsearch.gradle.internal.conventions.util.Util; -import org.gradle.api.Action; import org.gradle.api.NamedDomainObjectSet; import org.gradle.api.Plugin; import org.gradle.api.Project; -import org.gradle.api.Task; import org.gradle.api.XmlProvider; import org.gradle.api.file.ProjectLayout; import org.gradle.api.plugins.BasePlugin; @@ -69,6 +67,7 @@ public void apply(Project project) { configureSourcesJar(project); configurePomGeneration(project); configurePublications(project); + formatGeneratedPom(project); } private void configurePublications(Project project) { @@ -127,42 +126,27 @@ private void configurePomGeneration(Project project) { projectVersion.get() ) ); - pomTask.doFirst(t -> pomTask.getPom().withXml(xml -> formatDependencies(xml))); }); var publishing = extensions.getByType(PublishingExtension.class); final var mavenPublications = publishing.getPublications().withType(MavenPublication.class); - addNameAndDescriptionToPom(project, mavenPublications); mavenPublications.configureEach(publication -> { - // Add git origin info to generated POM files for internal builds - publication.getPom().withXml(xml -> addScmInfo(xml, gitInfo.get())); + publication.getPom().withXml(xml -> { + // Add git origin info to generated POM files for internal builds + addScmInfo(xml, gitInfo.get()); + }); // have to defer this until archivesBaseName is set project.afterEvaluate(p -> publication.setArtifactId(archivesBaseName.get())); generatePomTask.configure(t -> t.dependsOn(generateMavenPoms)); }); } - /** - * just ensure we put dependencies to the end. more a cosmetic thing than anything else - * */ - private void formatDependencies(XmlProvider xml) { - Element rootElement = xml.asElement(); - var dependencies = rootElement.getElementsByTagName("dependencies"); - if (dependencies.getLength() == 1 && dependencies.item(0) != null) { - org.w3c.dom.Node item = dependencies.item(0); - rootElement.removeChild(item); - rootElement.appendChild(item); - } - } - private void addNameAndDescriptionToPom(Project project, NamedDomainObjectSet mavenPublications) { var name = project.getName(); var description = providerFactory.provider(() -> project.getDescription() != null ? project.getDescription() : ""); mavenPublications.configureEach(p -> p.getPom().withXml(xml -> { var root = xml.asNode(); - // Node versionNode = root.get("version"); - // versionNode.plus(1, "name", name); root.appendNode("name", name); root.appendNode("description", description.get()); })); @@ -209,4 +193,32 @@ static void configureSourcesJar(Project project) { project.getTasks().named(BasePlugin.ASSEMBLE_TASK_NAME).configure(t -> t.dependsOn(sourcesJarTask)); }); } + + + /** + * Format the generated pom files to be in a sort of reproducible order. + */ + private void formatGeneratedPom(Project project) { + var publishing = project.getExtensions().getByType(PublishingExtension.class); + final var mavenPublications = publishing.getPublications().withType(MavenPublication.class); + mavenPublications.configureEach(publication -> { + publication.getPom().withXml(xml -> { + // Add some pom formatting + formatDependencies(xml); + }); + }); + } + + /** + * just ensure we put dependencies to the end. more a cosmetic thing than anything else + * */ + private void formatDependencies(XmlProvider xml) { + Element rootElement = xml.asElement(); + var dependencies = rootElement.getElementsByTagName("dependencies"); + if (dependencies.getLength() == 1 && dependencies.item(0) != null) { + org.w3c.dom.Node item = dependencies.item(0); + rootElement.removeChild(item); + rootElement.appendChild(item); + } + } } diff --git a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties index 6acc1431eaec1..22286c90de3d1 100644 --- a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties +++ b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=2ab88d6de2c23e6adae7363ae6e29cbdd2a709e992929b48b6530fd0c7133bd6 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-all.zip +distributionSha256Sum=89d4e70e4e84e2d2dfbb63e4daa53e21b25017cc70c37e4eea31ee51fb15098a +distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/BuildPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/BuildPluginFuncTest.groovy index 03b044583add0..63bb732d8a11d 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/BuildPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/BuildPluginFuncTest.groovy @@ -119,7 +119,7 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest { noticeFile.set(file("NOTICE")) """ when: - def result = gradleRunner("assemble", "-x", "generateHistoricalFeaturesMetadata").build() + def result = gradleRunner("assemble", "-x", "generateClusterFeaturesMetadata").build() then: result.task(":assemble").outcome == TaskOutcome.SUCCESS file("build/distributions/hello-world.jar").exists() diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/ElasticsearchJavaPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/ElasticsearchJavaPluginFuncTest.groovy index 9fc6aa7276b2d..36a43c4b739b6 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/ElasticsearchJavaPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/ElasticsearchJavaPluginFuncTest.groovy @@ -20,9 +20,6 @@ class ElasticsearchJavaPluginFuncTest extends AbstractGradleInternalPluginFuncTe when: buildFile.text << """ import org.elasticsearch.gradle.Architecture - import org.elasticsearch.gradle.internal.info.BuildParams - BuildParams.init { it.setMinimumRuntimeVersion(JavaVersion.VERSION_1_10) } - assert tasks.named('compileJava').get().sourceCompatibility == JavaVersion.VERSION_1_10.toString() assert tasks.named('compileJava').get().targetCompatibility == JavaVersion.VERSION_1_10.toString() """ diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy index 94df02b280ca6..a4635a7232754 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy @@ -9,7 +9,6 @@ package org.elasticsearch.gradle.internal - import spock.lang.Unroll import com.github.tomakehurst.wiremock.WireMockServer @@ -24,8 +23,7 @@ import java.nio.file.Paths import java.util.regex.Matcher import java.util.regex.Pattern -import static org.elasticsearch.gradle.internal.JdkDownloadPlugin.VENDOR_ADOPTIUM -import static org.elasticsearch.gradle.internal.JdkDownloadPlugin.VENDOR_OPENJDK +import static org.elasticsearch.gradle.internal.JdkDownloadPlugin.* class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { @@ -33,13 +31,11 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { private static final String ADOPT_JDK_VERSION = "12.0.2+10" private static final String ADOPT_JDK_VERSION_11 = "11.0.10+9" private static final String ADOPT_JDK_VERSION_15 = "15.0.2+7" + private static final String AZUL_JDK_VERSION_8 = "8u302+b08" + private static final String AZUL_8_DISTRO_VERSION = "8.56.0.23" private static final String OPEN_JDK_VERSION = "12.0.1+99@123456789123456789123456789abcde" private static final Pattern JDK_HOME_LOGLINE = Pattern.compile("JDK HOME: (.*)") - def setup() { - configurationCacheCompatible = false - } - @Unroll def "jdk #jdkVendor for #platform#suffix are downloaded and extracted"() { given: @@ -56,14 +52,16 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { version = '$jdkVersion' platform = "$platform" architecture = '$arch' + distributionVersion = '$distributionVersion' } } - def theJdks = jdks +// def theJdks = jdks tasks.register("getJdk") { dependsOn jdks.myJdk + def jdk = jdks.myJdk doLast { - println "JDK HOME: " + theJdks.myJdk + println "JDK HOME: " + jdk } } """ @@ -78,22 +76,23 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { assertExtraction(result.output, expectedJavaBin); where: - platform | arch | jdkVendor | jdkVersion | expectedJavaBin | suffix - "linux" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "bin/java" | "" - "linux" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "bin/java" | "" - "linux" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "bin/java" | "(old version)" - "windows" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "bin/java" | "" - "windows" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "bin/java" | "" - "windows" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "bin/java" | "(old version)" - "darwin" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "Contents/Home/bin/java" | "" - "darwin" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "Contents/Home/bin/java" | "" - "darwin" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "Contents/Home/bin/java" | "(old version)" - "mac" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "Contents/Home/bin/java" | "" - "mac" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "Contents/Home/bin/java" | "(old version)" - "darwin" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "Contents/Home/bin/java" | "" - "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "bin/java" | "" - "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_11 | "bin/java" | "(jdk 11)" - "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_15 | "bin/java" | "(jdk 15)" + platform | arch | jdkVendor | jdkVersion | distributionVersion | expectedJavaBin | suffix + "linux" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "bin/java" | "" + "linux" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "bin/java" | "" + "linux" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "bin/java" | "(old version)" + "windows" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "bin/java" | "" + "windows" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "bin/java" | "" + "windows" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "bin/java" | "(old version)" + "darwin" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "Contents/Home/bin/java" | "" + "darwin" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "Contents/Home/bin/java" | "" + "darwin" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "Contents/Home/bin/java" | "(old version)" + "mac" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "Contents/Home/bin/java" | "" + "mac" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "Contents/Home/bin/java" | "(old version)" + "darwin" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "Contents/Home/bin/java" | "" + "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "bin/java" | "" + "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_11 | null | "bin/java" | "(jdk 11)" + "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_15 | null | "bin/java" | "(jdk 15)" + "darwin" | "aarch64" | VENDOR_ZULU | AZUL_JDK_VERSION_8 | AZUL_8_DISTRO_VERSION | "Contents/Home/bin/java" | "(jdk 8)" } def "transforms are reused across projects"() { @@ -118,9 +117,10 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { } } tasks.register("getJdk") { - dependsOn jdks.myJdk + def jdk = jdks.myJdk + dependsOn jdk doLast { - println "JDK HOME: " + jdks.myJdk + println "JDK HOME: " + jdk } } """ @@ -137,7 +137,7 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { result.output.count("Unpacking linux-12.0.2-x64.tar.gz using ${SymbolicLinkPreservingUntarTransform.simpleName}") == 1 where: - platform | jdkVendor | jdkVersion | expectedJavaBin + platform | jdkVendor | jdkVersion | expectedJavaBin "linux" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "bin/java" } @@ -159,6 +159,7 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { vendor = '$VENDOR_ADOPTIUM' version = '$ADOPT_JDK_VERSION' platform = "$platform" + distributionVersion = '$ADOPT_JDK_VERSION' architecture = "x64" } } @@ -204,6 +205,8 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { assert matcher.find() == true; String jdkHome = matcher.group(1); Path javaPath = Paths.get(jdkHome, javaBin); + println "canonical " + javaPath.toFile().getCanonicalPath() + Paths.get(jdkHome).toFile().listFiles().each { println it } assert Files.exists(javaPath) == true; true } @@ -221,15 +224,26 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { final String versionPath = isOld ? "jdk1/99" : "jdk12.0.1/123456789123456789123456789abcde/99"; final String filename = "openjdk-" + (isOld ? "1" : "12.0.1") + "_" + effectivePlatform + "-x64_bin." + extension(platform); return "/java/GA/" + versionPath + "/GPL/" + filename; + } else if (vendor.equals(VENDOR_ZULU)) { + // we only have a single version of zulu currently in the tests + return "/zulu/bin/zulu8.56.0.23-ca-jdk8.0.302-macosx_aarch64.tar.gz" } } private static byte[] filebytes(final String vendor, final String platform) throws IOException { final String effectivePlatform = getPlatform(vendor, platform); if (vendor.equals(VENDOR_ADOPTIUM)) { - return JdkDownloadPluginFuncTest.class.getResourceAsStream("fake_adoptium_" + effectivePlatform + "." + extension(platform)).getBytes() + return JdkDownloadPluginFuncTest.class.getResourceAsStream( + "fake_adoptium_" + effectivePlatform + "." + extension(platform) + ).getBytes() } else if (vendor.equals(VENDOR_OPENJDK)) { - JdkDownloadPluginFuncTest.class.getResourceAsStream("fake_openjdk_" + effectivePlatform + "." + extension(platform)).getBytes() + return JdkDownloadPluginFuncTest.class.getResourceAsStream( + "fake_openjdk_" + effectivePlatform + "." + extension(platform) + ).getBytes() + } else { + // zulu + String resourcePath = "fake_zulu_" + effectivePlatform + "." + extension(platform) + return JdkDownloadPluginFuncTest.class.getResourceAsStream(resourcePath).getBytes() } } diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy index c7e11ba96c7dd..a199ff9d3eac5 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy @@ -303,7 +303,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { """ when: - def result = gradleRunner('assemble', '--stacktrace', '-x', 'generateHistoricalFeaturesMetadata').build() + def result = gradleRunner('assemble', '--stacktrace', '-x', 'generateClusterFeaturesMetadata').build() then: result.task(":generatePom").outcome == TaskOutcome.SUCCESS diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_zulu_macos.tar.gz b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_zulu_macos.tar.gz new file mode 100644 index 0000000000000..87361b67ec76c Binary files /dev/null and b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_zulu_macos.tar.gz differ diff --git a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle index 2748411750a5a..847eda7a355c0 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle @@ -12,10 +12,14 @@ import java.time.LocalDateTime; import org.elasticsearch.gradle.Architecture import org.elasticsearch.gradle.OS +import static org.elasticsearch.gradle.internal.util.CiUtils.safeName import java.lang.management.ManagementFactory import java.time.LocalDateTime +// Resolving this early to avoid issues with the build scan plugin in combination with the configuration cache usage +def taskNames = gradle.startParameter.taskNames.join(' ') + develocity { buildScan { @@ -110,7 +114,7 @@ develocity { // Add a build annotation // See: https://buildkite.com/docs/agent/v3/cli-annotate - def body = """
${System.getenv('BUILDKITE_LABEL')} :gradle: ${result.failures ? 'failed' : 'successful'} build: gradle ${gradle.startParameter.taskNames.join(' ')}
""" + def body = """
${System.getenv('BUILDKITE_LABEL')} :gradle: ${result.failures ? 'failed' : 'successful'} build: gradle ${taskNames}
""" def process = [ 'buildkite-agent', 'annotate', @@ -131,7 +135,3 @@ develocity { } } } - -static def safeName(String string) { - return string.replaceAll(/[^a-zA-Z0-9_\-\.]+/, ' ').trim().replaceAll(' ', '_').toLowerCase() -} diff --git a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle index 567812c740817..14e2323b4d14d 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle @@ -9,28 +9,26 @@ import org.elasticsearch.gradle.internal.ExportElasticsearchBuildResourcesTask -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.rest.RestTestBasePlugin import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask import org.elasticsearch.gradle.testclusters.TestClustersAware -import org.elasticsearch.gradle.testclusters.TestDistribution //apply plugin: org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin // Common config when running with a FIPS-140 runtime JVM if (buildParams.inFipsJvm) { allprojects { - String javaSecurityFilename = buildParams.runtimeJavaDetails.toLowerCase().contains('oracle') ? 'fips_java_oracle.security' : 'fips_java.security' + String javaSecurityFilename = buildParams.runtimeJavaDetails.get().toLowerCase().contains('oracle') ? 'fips_java_oracle.security' : 'fips_java.security' File fipsResourcesDir = new File(project.buildDir, 'fips-resources') File fipsSecurity = new File(fipsResourcesDir, javaSecurityFilename) File fipsPolicy = new File(fipsResourcesDir, 'fips_java.policy') File fipsTrustStore = new File(fipsResourcesDir, 'cacerts.bcfks') - def bcFips = dependencies.create('org.bouncycastle:bc-fips:1.0.2.4') - def bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.17') + def bcFips = dependencies.create('org.bouncycastle:bc-fips:1.0.2.5') + def bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.19') def manualDebug = false; //change this to manually debug bouncy castle in an IDE if(manualDebug) { - bcFips = dependencies.create('org.bouncycastle:bc-fips-debug:1.0.2.4') - bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.17'){ + bcFips = dependencies.create('org.bouncycastle:bc-fips-debug:1.0.2.5') + bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.19'){ exclude group: 'org.bouncycastle', module: 'bc-fips' // to avoid jar hell } } diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index 5640409e0ff44..9237c3ae8918c 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -9,7 +9,6 @@ import org.elasticsearch.gradle.util.Pair import org.elasticsearch.gradle.util.GradleUtils -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.TestUtil import org.jetbrains.gradle.ext.JUnit @@ -145,7 +144,7 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { doLast { ['main', 'test'].each { sourceSet -> - modifyXml(".idea/modules/libs/native/elasticsearch.libs.${project.project(':libs:native').name}.${sourceSet}.iml") { xml -> + modifyXml(".idea/modules/libs/native/elasticsearch.libs.native.${sourceSet}.iml") { xml -> xml.component.find { it.'@name' == 'NewModuleRootManager' }?.'@LANGUAGE_LEVEL' = 'JDK_21_PREVIEW' } } diff --git a/build-tools-internal/src/main/groovy/elasticsearch.stable-api.gradle b/build-tools-internal/src/main/groovy/elasticsearch.stable-api.gradle index 27b490329f8cb..3f506ae954df8 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.stable-api.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.stable-api.gradle @@ -3,7 +3,6 @@ import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.BwcVersions import org.elasticsearch.gradle.internal.JarApiComparisonTask -import org.elasticsearch.gradle.internal.info.BuildParams import static org.elasticsearch.gradle.internal.InternalDistributionBwcSetupPlugin.buildBwcTaskName diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntFixtureStop.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntFixtureStop.groovy index ad37fa9f02c8c..6c87149095186 100644 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntFixtureStop.groovy +++ b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntFixtureStop.groovy @@ -15,16 +15,12 @@ import org.elasticsearch.gradle.internal.test.AntFixture import org.gradle.api.file.FileSystemOperations import org.gradle.api.file.ProjectLayout import org.gradle.api.provider.ProviderFactory -import org.gradle.api.tasks.Internal import org.gradle.process.ExecOperations import javax.inject.Inject abstract class AntFixtureStop extends LoggedExec implements FixtureStop { - @Internal - AntFixture fixture - @Inject AntFixtureStop(ProjectLayout projectLayout, ExecOperations execOperations, @@ -34,12 +30,12 @@ abstract class AntFixtureStop extends LoggedExec implements FixtureStop { } void setFixture(AntFixture fixture) { - assert this.fixture == null - this.fixture = fixture; - final Object pid = "${-> this.fixture.pid}" - onlyIf("pidFile exists") { fixture.pidFile.exists() } + def pidFile = fixture.pidFile + def fixtureName = fixture.name + final Object pid = "${-> Integer.parseInt(pidFile.getText('UTF-8').trim())}" + onlyIf("pidFile exists") { pidFile.exists() } doFirst { - logger.info("Shutting down ${fixture.name} with pid ${pid}") + logger.info("Shutting down ${fixtureName} with pid ${pid}") } if (OS.current() == OS.WINDOWS) { @@ -51,9 +47,8 @@ abstract class AntFixtureStop extends LoggedExec implements FixtureStop { } doLast { fileSystemOperations.delete { - it.delete(fixture.pidFile) + it.delete(pidFile) } } - this.fixture = fixture } } diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy index 81f21f8c62d86..01a3bdaee2337 100644 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy +++ b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy @@ -29,11 +29,6 @@ import java.nio.charset.Charset */ public abstract class AntTask extends DefaultTask { - /** - * A buffer that will contain the output of the ant code run, - * if the output was not already written directly to stdout. - */ - public final ByteArrayOutputStream outputBuffer = new ByteArrayOutputStream() @Inject protected FileSystemOperations getFileSystemOperations() { @@ -57,6 +52,11 @@ public abstract class AntTask extends DefaultTask { // otherwise groovy replaces System.out, and you have no chance to debug // ant.saveStreams = false + /** + * A buffer that will contain the output of the ant code run, + * if the output was not already written directly to stdout. + */ + ByteArrayOutputStream outputBuffer = new ByteArrayOutputStream() final int outputLevel = logger.isDebugEnabled() ? Project.MSG_DEBUG : Project.MSG_INFO final PrintStream stream = useStdout() ? System.out : new PrintStream(outputBuffer, true, Charset.defaultCharset().name()) diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy index f2837ff40fb79..88a68f1194858 100644 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy +++ b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy @@ -10,22 +10,37 @@ package org.elasticsearch.gradle.internal.test import org.elasticsearch.gradle.OS + import org.elasticsearch.gradle.internal.AntFixtureStop import org.elasticsearch.gradle.internal.AntTask +import org.elasticsearch.gradle.testclusters.TestClusterInfo +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersRegistry import org.gradle.api.GradleException +import org.gradle.api.file.ProjectLayout +import org.gradle.api.provider.Property +import org.gradle.api.provider.Provider +import org.gradle.api.provider.ProviderFactory +import org.gradle.api.provider.ValueSource +import org.gradle.api.provider.ValueSourceParameters +import org.gradle.api.tasks.Input import org.gradle.api.tasks.Internal import org.gradle.api.tasks.TaskProvider +import javax.inject.Inject + /** * A fixture for integration tests which runs in a separate process launched by Ant. */ -class AntFixture extends AntTask implements Fixture { +class AntFixture extends AntTask { /** The path to the executable that starts the fixture. */ @Internal String executable private final List arguments = new ArrayList<>() + private ProjectLayout projectLayout + private final ProviderFactory providerFactory void args(Object... args) { arguments.addAll(args) @@ -69,19 +84,14 @@ class AntFixture extends AntTask implements Fixture { return tmpFile.exists() } - private final TaskProvider stopTask - - AntFixture() { - stopTask = createStopTask() + @Inject + AntFixture(ProjectLayout projectLayout, ProviderFactory providerFactory) { + this.providerFactory = providerFactory + this.projectLayout = projectLayout; + TaskProvider stopTask = createStopTask() finalizedBy(stopTask) } - @Override - @Internal - TaskProvider getStopTask() { - return stopTask - } - @Override protected void runAnt(AntBuilder ant) { // reset everything @@ -231,7 +241,7 @@ class AntFixture extends AntTask implements Fixture { */ @Internal protected File getBaseDir() { - return new File(project.buildDir, "fixtures/${name}") + return new File(projectLayout.getBuildDirectory().getAsFile().get(), "fixtures/${name}") } /** Returns the working directory for the process. Defaults to "cwd" inside baseDir. */ @@ -242,7 +252,7 @@ class AntFixture extends AntTask implements Fixture { /** Returns the file the process writes its pid to. Defaults to "pid" inside baseDir. */ @Internal - protected File getPidFile() { + File getPidFile() { return new File(baseDir, 'pid') } @@ -264,6 +274,12 @@ class AntFixture extends AntTask implements Fixture { return portsFile.readLines("UTF-8").get(0) } + @Internal + Provider getAddressAndPortProvider() { + File thePortFile = portsFile + return providerFactory.provider(() -> thePortFile.readLines("UTF-8").get(0)) + } + /** Returns a file that wraps around the actual command when {@code spawn == true}. */ @Internal protected File getWrapperScript() { @@ -281,4 +297,22 @@ class AntFixture extends AntTask implements Fixture { protected File getRunLog() { return new File(cwd, 'run.log') } + + @Internal + Provider getAddressAndPortSource() { + return providerFactory.of(AntFixtureValueSource.class, spec -> { + spec.getParameters().getPortFile().set(portsFile); + }); + } + + static abstract class AntFixtureValueSource implements ValueSource { + @Override + String obtain() { + return getParameters().getPortFile().map { it.readLines("UTF-8").get(0) }.get() + } + + interface Parameters extends ValueSourceParameters { + Property getPortFile(); + } + } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java index 49887dac5b6fd..2b79bc2b9173e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java @@ -14,7 +14,7 @@ import org.elasticsearch.gradle.internal.conventions.util.Util; import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.elasticsearch.gradle.internal.precommit.JarHellPrecommitPlugin; -import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin; +import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin; import org.elasticsearch.gradle.plugin.PluginBuildPlugin; import org.elasticsearch.gradle.plugin.PluginPropertiesExtension; import org.elasticsearch.gradle.testclusters.ElasticsearchCluster; @@ -38,7 +38,7 @@ public void apply(Project project) { project.getPluginManager().apply(PluginBuildPlugin.class); project.getPluginManager().apply(JarHellPrecommitPlugin.class); project.getPluginManager().apply(ElasticsearchJavaPlugin.class); - project.getPluginManager().apply(HistoricalFeaturesMetadataPlugin.class); + project.getPluginManager().apply(ClusterFeaturesMetadataPlugin.class); boolean isCi = project.getRootProject().getExtensions().getByType(BuildParameterExtension.class).isCi(); // Clear default dependencies added by public PluginBuildPlugin as we add our // own project dependencies for internal builds diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BuildPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BuildPlugin.java index 75984e1bc6998..fb8a9858e24d5 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BuildPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BuildPlugin.java @@ -12,7 +12,7 @@ import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.internal.precommit.InternalPrecommitTasks; import org.elasticsearch.gradle.internal.snyk.SnykDependencyMonitoringGradlePlugin; -import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin; +import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin; import org.gradle.api.InvalidUserDataException; import org.gradle.api.Plugin; import org.gradle.api.Project; @@ -63,7 +63,7 @@ public void apply(final Project project) { project.getPluginManager().apply(ElasticsearchJavadocPlugin.class); project.getPluginManager().apply(DependenciesInfoPlugin.class); project.getPluginManager().apply(SnykDependencyMonitoringGradlePlugin.class); - project.getPluginManager().apply(HistoricalFeaturesMetadataPlugin.class); + project.getPluginManager().apply(ClusterFeaturesMetadataPlugin.class); InternalPrecommitTasks.create(project, true); configureLicenseAndNotice(project); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index 3e0a47a8f453c..bf901fef90450 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -13,10 +13,8 @@ * This class models the different Docker base images that are used to build Docker distributions of Elasticsearch. */ public enum DockerBase { - DEFAULT("ubuntu:20.04", "", "apt-get"), - // "latest" here is intentional, since the image name specifies "8" - UBI("docker.elastic.co/ubi8/ubi-minimal:latest", "-ubi", "microdnf"), + DEFAULT("docker.elastic.co/ubi8/ubi-minimal:latest", "", "microdnf"), // The Iron Bank base image is UBI (albeit hardened), but we are required to parameterize the Docker build IRON_BANK("${BASE_REGISTRY}/${BASE_IMAGE}:${BASE_TAG}", "-ironbank", "yum"), diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java index 7d9537feaea56..14baa55794c95 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java @@ -147,12 +147,17 @@ interface Parameters extends FlowParameters { @SuppressWarnings("checkstyle:DescendantToken") @Override public void execute(BuildFinishedFlowAction.Parameters parameters) throws FileNotFoundException { + List filesToArchive = parameters.getFilteredFiles().get(); + if (filesToArchive.isEmpty()) { + return; + } File uploadFile = parameters.getUploadFile().get(); if (uploadFile.exists()) { getFileSystemOperations().delete(spec -> spec.delete(uploadFile)); } uploadFile.getParentFile().mkdirs(); - createBuildArchiveTar(parameters.getFilteredFiles().get(), parameters.getProjectDir().get(), uploadFile); + + createBuildArchiveTar(filesToArchive, parameters.getProjectDir().get(), uploadFile); if (uploadFile.exists() && "true".equals(System.getenv("BUILDKITE"))) { String uploadFilePath = uploadFile.getName(); File uploadFileDir = uploadFile.getParentFile(); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavadocPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavadocPlugin.java index 42a44edd7f9a5..aca310cbf1e47 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavadocPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavadocPlugin.java @@ -82,12 +82,15 @@ private void configureJavadocForConfiguration(Project project, boolean shadow, C .sorted(Comparator.comparing(Dependency::getGroup)) .filter(d -> d instanceof ProjectDependency) .map(d -> (ProjectDependency) d) - .filter(p -> p.getDependencyProject() != null) .forEach(projectDependency -> configureDependency(project, shadow, projectDependency)); } private void configureDependency(Project project, boolean shadowed, ProjectDependency dep) { - var upstreamProject = dep.getDependencyProject(); + // we should use variant aware dependency management to resolve artifacts required for javadoc here + Project upstreamProject = project.project(dep.getPath()); + if (upstreamProject == null) { + return; + } if (shadowed) { /* * Include the source of shadowed upstream projects so we don't diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java index 60699522cdc3f..ec694de8ec597 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java @@ -173,9 +173,6 @@ private static String distributionProjectName(ElasticsearchDistribution distribu if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER) { return projectName + "docker" + archString + "-export"; } - if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_UBI) { - return projectName + "ubi-docker" + archString + "-export"; - } if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_IRONBANK) { return projectName + "ironbank-docker" + archString + "-export"; } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/Jdk.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/Jdk.java index c70d421939d6d..4396a18c205c1 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/Jdk.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/Jdk.java @@ -11,6 +11,7 @@ import org.gradle.api.Buildable; import org.gradle.api.artifacts.Configuration; +import org.gradle.api.file.FileCollection; import org.gradle.api.model.ObjectFactory; import org.gradle.api.provider.Property; import org.gradle.api.tasks.TaskDependency; @@ -33,13 +34,14 @@ public class Jdk implements Buildable, Iterable { private static final Pattern LEGACY_VERSION_PATTERN = Pattern.compile("(\\d)(u\\d+)\\+(b\\d+?)(@([a-f0-9]{32}))?"); private final String name; - private final Configuration configuration; + private final FileCollection configuration; private final Property vendor; private final Property version; private final Property platform; private final Property architecture; private final Property distributionVersion; + private final String configurationName; private String baseVersion; private String major; private String build; @@ -47,6 +49,7 @@ public class Jdk implements Buildable, Iterable { Jdk(String name, Configuration configuration, ObjectFactory objectFactory) { this.name = name; + this.configurationName = configuration.getName(); this.configuration = configuration; this.vendor = objectFactory.property(String.class); this.version = objectFactory.property(String.class); @@ -137,7 +140,7 @@ public String getPath() { } public String getConfigurationName() { - return configuration.getName(); + return configurationName; } @Override diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java index 5b195cad3388f..3c278128e43f2 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java @@ -21,8 +21,6 @@ import org.gradle.api.artifacts.type.ArtifactTypeDefinition; import org.gradle.api.attributes.Attribute; -import java.util.Arrays; - /** * @deprecated We wanna get rid from this and custom jdk downloads via this plugin and * make leverage the gradle toolchain resolver capabilities. @@ -38,8 +36,8 @@ public class JdkDownloadPlugin implements Plugin { private static final String REPO_NAME_PREFIX = "jdk_repo_"; private static final String EXTENSION_NAME = "jdks"; - public static final String JDK_TRIMMED_PREFIX = "(jdk-?\\d.*)|(zulu-?\\d.+).jdk"; - public static final String ZULU_LINUX_AARCH_PATTERN = "zulu.*linux_aarch64"; + public static final String JDK_TRIMMED_PREFIX = "(jdk-?\\d.*)|(zulu-?\\d.*).jdk"; + public static final String ZULU_LINUX_AARCH_PATTERN = "zulu.*_aarch64"; @Override public void apply(Project project) { @@ -66,7 +64,8 @@ public void apply(Project project) { .attribute(jdkAttribute, true); transformSpec.parameters(parameters -> { parameters.setTrimmedPrefixPattern(JDK_TRIMMED_PREFIX); - parameters.setKeepStructureFor(Arrays.asList(ZULU_LINUX_AARCH_PATTERN)); + // parameters.setAsFiletreeOutput(true); + // parameters.setKeepStructureFor(Arrays.asList(ZULU_LINUX_AARCH_PATTERN)); }); }); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index d1585120b0803..7c488e6e73fee 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -21,6 +21,7 @@ import org.gradle.api.plugins.JavaPluginExtension; import org.gradle.api.tasks.SourceSet; import org.gradle.api.tasks.SourceSetContainer; +import org.gradle.api.tasks.TaskProvider; import org.gradle.api.tasks.compile.CompileOptions; import org.gradle.api.tasks.compile.JavaCompile; import org.gradle.api.tasks.javadoc.Javadoc; @@ -87,6 +88,7 @@ public void apply(Project project) { String mainSourceSetName = SourceSet.MAIN_SOURCE_SET_NAME + javaVersion; SourceSet mainSourceSet = addSourceSet(project, javaExtension, mainSourceSetName, mainSourceSets, javaVersion); configureSourceSetInJar(project, mainSourceSet, javaVersion); + addJar(project, mainSourceSet, javaVersion); mainSourceSets.add(mainSourceSetName); testSourceSets.add(mainSourceSetName); @@ -147,6 +149,14 @@ private SourceSet addSourceSet( return sourceSet; } + private void addJar(Project project, SourceSet sourceSet, int javaVersion) { + project.getConfigurations().register("java" + javaVersion); + TaskProvider jarTask = project.getTasks().register("java" + javaVersion + "Jar", Jar.class, task -> { + task.from(sourceSet.getOutput()); + }); + project.getArtifacts().add("java" + javaVersion, jarTask); + } + private void configurePreviewFeatures(Project project, SourceSet sourceSet, int javaVersion) { project.getTasks().withType(JavaCompile.class).named(sourceSet.getCompileJavaTaskName()).configure(compileTask -> { CompileOptions compileOptions = compileTask.getOptions(); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java index 8f0951da86b88..28776f03d17e8 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java @@ -17,7 +17,6 @@ public class InternalElasticsearchDistributionTypes { public static ElasticsearchDistributionType DEB = new DebElasticsearchDistributionType(); public static ElasticsearchDistributionType RPM = new RpmElasticsearchDistributionType(); public static ElasticsearchDistributionType DOCKER = new DockerElasticsearchDistributionType(); - public static ElasticsearchDistributionType DOCKER_UBI = new DockerUbiElasticsearchDistributionType(); public static ElasticsearchDistributionType DOCKER_IRONBANK = new DockerIronBankElasticsearchDistributionType(); public static ElasticsearchDistributionType DOCKER_CLOUD_ESS = new DockerCloudEssElasticsearchDistributionType(); public static ElasticsearchDistributionType DOCKER_WOLFI = new DockerWolfiElasticsearchDistributionType(); @@ -26,7 +25,6 @@ public class InternalElasticsearchDistributionTypes { DEB, RPM, DOCKER, - DOCKER_UBI, DOCKER_IRONBANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParams.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParams.java deleted file mode 100644 index ea8aeda8fc099..0000000000000 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParams.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ -package org.elasticsearch.gradle.internal.info; - -import java.lang.reflect.Modifier; -import java.util.Arrays; -import java.util.function.Consumer; - -@Deprecated -public class BuildParams { - private static Boolean isCi; - - /** - * Initialize global build parameters. This method accepts and a initialization function which in turn accepts a - * {@link MutableBuildParams}. Initialization can be done in "stages", therefore changes override existing values, and values from - * previous calls to {@link #init(Consumer)} carry forward. In cases where you want to clear existing values - * {@link MutableBuildParams#reset()} may be used. - * - * @param initializer Build parameter initializer - */ - public static void init(Consumer initializer) { - initializer.accept(MutableBuildParams.INSTANCE); - } - - public static Boolean isCi() { - return value(isCi); - } - - private static T value(T object) { - if (object == null) { - String callingMethod = Thread.currentThread().getStackTrace()[2].getMethodName(); - - throw new IllegalStateException( - "Build parameter '" - + propertyName(callingMethod) - + "' has not been initialized.\n" - + "Perhaps the plugin responsible for initializing this property has not been applied." - ); - } - - return object; - } - - private static String propertyName(String methodName) { - String propertyName = methodName.startsWith("is") ? methodName.substring("is".length()) : methodName.substring("get".length()); - return propertyName.substring(0, 1).toLowerCase() + propertyName.substring(1); - } - - public static class MutableBuildParams { - private static MutableBuildParams INSTANCE = new MutableBuildParams(); - - private MutableBuildParams() {} - - /** - * Resets any existing values from previous initializations. - */ - public void reset() { - Arrays.stream(BuildParams.class.getDeclaredFields()).filter(f -> Modifier.isStatic(f.getModifiers())).forEach(f -> { - try { - // Since we are mutating private static fields from a public static inner class we need to suppress - // accessibility controls here. - f.setAccessible(true); - f.set(null, null); - } catch (IllegalAccessException e) { - throw new RuntimeException(e); - } - }); - } - - public void setIsCi(boolean isCi) { - BuildParams.isCi = isCi; - } - } -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java index 761b0601a1c24..0535026b2594e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java @@ -152,13 +152,6 @@ public void apply(Project project) { spec.getParameters().getBuildParams().set(buildParams); }); - BuildParams.init(params -> { - params.reset(); - params.setIsCi( - System.getenv("JENKINS_URL") != null || System.getenv("BUILDKITE_BUILD_URL") != null || System.getProperty("isCI") != null - ); - }); - // Enforce the minimum compiler version assertMinimumCompilerVersion(minimumCompilerVersion); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/HistoricalFeaturesMetadataPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ClusterFeaturesMetadataPlugin.java similarity index 83% rename from build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/HistoricalFeaturesMetadataPlugin.java rename to build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ClusterFeaturesMetadataPlugin.java index be972f11d4586..0c8a99fa82398 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/HistoricalFeaturesMetadataPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ClusterFeaturesMetadataPlugin.java @@ -21,10 +21,10 @@ import java.util.Map; /** - * Extracts historical feature metadata into a machine-readable format for use in backward compatibility testing. + * Extracts cluster feature metadata into a machine-readable format for use in backward compatibility testing. */ -public class HistoricalFeaturesMetadataPlugin implements Plugin { - public static final String HISTORICAL_FEATURES_JSON = "historical-features.json"; +public class ClusterFeaturesMetadataPlugin implements Plugin { + public static final String CLUSTER_FEATURES_JSON = "cluster-features.json"; public static final String FEATURES_METADATA_TYPE = "features-metadata-json"; public static final String FEATURES_METADATA_CONFIGURATION = "featuresMetadata"; @@ -40,13 +40,13 @@ public void apply(Project project) { SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class); SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME); - TaskProvider generateTask = project.getTasks() - .register("generateHistoricalFeaturesMetadata", HistoricalFeaturesMetadataTask.class, task -> { + TaskProvider generateTask = project.getTasks() + .register("generateClusterFeaturesMetadata", ClusterFeaturesMetadataTask.class, task -> { task.setClasspath( featureMetadataExtractorConfig.plus(mainSourceSet.getRuntimeClasspath()) .plus(project.getConfigurations().getByName(CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME)) ); - task.getOutputFile().convention(project.getLayout().getBuildDirectory().file(HISTORICAL_FEATURES_JSON)); + task.getOutputFile().convention(project.getLayout().getBuildDirectory().file(CLUSTER_FEATURES_JSON)); }); Configuration featuresMetadataArtifactConfig = project.getConfigurations().create(FEATURES_METADATA_CONFIGURATION, c -> { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/HistoricalFeaturesMetadataTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ClusterFeaturesMetadataTask.java similarity index 81% rename from build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/HistoricalFeaturesMetadataTask.java rename to build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ClusterFeaturesMetadataTask.java index a2ea7af210dfd..aa4f90e4d2367 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/HistoricalFeaturesMetadataTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ClusterFeaturesMetadataTask.java @@ -26,7 +26,7 @@ import javax.inject.Inject; @CacheableTask -public abstract class HistoricalFeaturesMetadataTask extends DefaultTask { +public abstract class ClusterFeaturesMetadataTask extends DefaultTask { private FileCollection classpath; @OutputFile @@ -46,30 +46,30 @@ public void setClasspath(FileCollection classpath) { @TaskAction public void execute() { - getWorkerExecutor().noIsolation().submit(HistoricalFeaturesMetadataWorkAction.class, params -> { + getWorkerExecutor().noIsolation().submit(ClusterFeaturesMetadataWorkAction.class, params -> { params.getClasspath().setFrom(getClasspath()); params.getOutputFile().set(getOutputFile()); }); } - public interface HistoricalFeaturesWorkParameters extends WorkParameters { + public interface ClusterFeaturesWorkParameters extends WorkParameters { ConfigurableFileCollection getClasspath(); RegularFileProperty getOutputFile(); } - public abstract static class HistoricalFeaturesMetadataWorkAction implements WorkAction { + public abstract static class ClusterFeaturesMetadataWorkAction implements WorkAction { private final ExecOperations execOperations; @Inject - public HistoricalFeaturesMetadataWorkAction(ExecOperations execOperations) { + public ClusterFeaturesMetadataWorkAction(ExecOperations execOperations) { this.execOperations = execOperations; } @Override public void execute() { LoggedExec.javaexec(execOperations, spec -> { - spec.getMainClass().set("org.elasticsearch.extractor.features.HistoricalFeaturesMetadataExtractor"); + spec.getMainClass().set("org.elasticsearch.extractor.features.ClusterFeaturesMetadataExtractor"); spec.classpath(getParameters().getClasspath()); spec.args(getParameters().getOutputFile().get().getAsFile().getAbsolutePath()); }); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java index e8d2bbd93ff20..211718c151ba9 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java @@ -51,7 +51,6 @@ import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_CLOUD_ESS; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_IRONBANK; -import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_UBI; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_WOLFI; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.RPM; import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; @@ -148,7 +147,6 @@ public void apply(Project project) { private static Map> lifecycleTasks(Project project, String taskPrefix) { Map> lifecyleTasks = new HashMap<>(); lifecyleTasks.put(DOCKER, project.getTasks().register(taskPrefix + ".docker")); - lifecyleTasks.put(DOCKER_UBI, project.getTasks().register(taskPrefix + ".docker-ubi")); lifecyleTasks.put(DOCKER_IRONBANK, project.getTasks().register(taskPrefix + ".docker-ironbank")); lifecyleTasks.put(DOCKER_CLOUD_ESS, project.getTasks().register(taskPrefix + ".docker-cloud-ess")); lifecyleTasks.put(DOCKER_WOLFI, project.getTasks().register(taskPrefix + ".docker-wolfi")); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithDependenciesPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithDependenciesPlugin.java index 487fe012a5941..a2851bfa2ae55 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithDependenciesPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithDependenciesPlugin.java @@ -10,11 +10,14 @@ package org.elasticsearch.gradle.internal.test; import org.apache.commons.lang.StringUtils; -import org.elasticsearch.gradle.plugin.PluginBuildPlugin; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.artifacts.Configuration; +import org.gradle.api.artifacts.Dependency; import org.gradle.api.artifacts.ProjectDependency; +import org.gradle.api.artifacts.dsl.DependencyHandler; +import org.gradle.api.attributes.Attribute; +import org.gradle.api.attributes.LibraryElements; import org.gradle.api.plugins.ExtraPropertiesExtension; import org.gradle.api.tasks.Copy; import org.gradle.api.tasks.SourceSetContainer; @@ -45,23 +48,31 @@ public void apply(final Project project) { Configuration testImplementationConfig = project.getConfigurations().getByName("testImplementation"); testImplementationConfig.getDependencies().all(dep -> { - if (dep instanceof ProjectDependency - && ((ProjectDependency) dep).getDependencyProject().getPlugins().hasPlugin(PluginBuildPlugin.class)) { - project.getGradle() - .projectsEvaluated(gradle -> addPluginResources(project, ((ProjectDependency) dep).getDependencyProject())); + if (dep instanceof ProjectDependency && dep.getGroup().contains("plugin")) { + addPluginResources(project, ((ProjectDependency) dep)); } }); } - private static void addPluginResources(final Project project, final Project pluginProject) { - final File outputDir = new File(project.getBuildDir(), "/generated-test-resources/" + pluginProject.getName()); - String camelProjectName = stream(pluginProject.getName().split("-")).map(t -> StringUtils.capitalize(t)) + private static void addPluginResources(final Project project, final ProjectDependency projectDependency) { + final File outputDir = new File(project.getBuildDir(), "/generated-test-resources/" + projectDependency.getName()); + String camelProjectName = stream(projectDependency.getName().split("-")).map(t -> StringUtils.capitalize(t)) .collect(Collectors.joining()); String taskName = "copy" + camelProjectName + "Metadata"; + String metadataConfiguration = "resolved" + camelProjectName + "Metadata"; + Configuration pluginMetadata = project.getConfigurations().maybeCreate(metadataConfiguration); + pluginMetadata.getAttributes().attribute(Attribute.of("pluginMetadata", Boolean.class), true); + pluginMetadata.getAttributes() + .attribute( + LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE, + project.getObjects().named(LibraryElements.class, LibraryElements.RESOURCES) + ); + DependencyHandler dependencyHandler = project.getDependencies(); + Dependency pluginMetadataDependency = dependencyHandler.project(Map.of("path", projectDependency.getPath())); + dependencyHandler.add(metadataConfiguration, pluginMetadataDependency); project.getTasks().register(taskName, Copy.class, copy -> { copy.into(outputDir); - copy.from(pluginProject.getTasks().named("pluginProperties")); - copy.from(pluginProject.file("src/main/plugin-metadata")); + copy.from(pluginMetadata); }); Map map = Map.of("builtBy", taskName); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java index 548791b9496c2..559c0f60abc08 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java @@ -20,8 +20,8 @@ import org.elasticsearch.gradle.distribution.ElasticsearchDistributionTypes; import org.elasticsearch.gradle.internal.ElasticsearchJavaBasePlugin; import org.elasticsearch.gradle.internal.InternalDistributionDownloadPlugin; +import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin; import org.elasticsearch.gradle.internal.test.ErrorReportingTestListener; -import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin; import org.elasticsearch.gradle.plugin.BasePluginBuildPlugin; import org.elasticsearch.gradle.plugin.PluginBuildPlugin; import org.elasticsearch.gradle.plugin.PluginPropertiesExtension; @@ -43,6 +43,7 @@ import org.gradle.api.file.ConfigurableFileCollection; import org.gradle.api.file.FileCollection; import org.gradle.api.file.FileTree; +import org.gradle.api.internal.artifacts.dependencies.ProjectDependencyInternal; import org.gradle.api.provider.ProviderFactory; import org.gradle.api.tasks.ClasspathNormalizer; import org.gradle.api.tasks.PathSensitivity; @@ -115,12 +116,12 @@ public void apply(Project project) { extractedPluginsConfiguration.extendsFrom(pluginsConfiguration); configureArtifactTransforms(project); - // Create configuration for aggregating historical feature metadata + // Create configuration for aggregating feature metadata FileCollection featureMetadataConfig = project.getConfigurations().create(FEATURES_METADATA_CONFIGURATION, c -> { c.setCanBeConsumed(false); c.setCanBeResolved(true); c.attributes( - a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE) + a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ClusterFeaturesMetadataPlugin.FEATURES_METADATA_TYPE) ); c.defaultDependencies(d -> d.add(project.getDependencies().project(Map.of("path", ":server")))); c.withDependencies(dependencies -> { @@ -135,10 +136,7 @@ public void apply(Project project) { c.setCanBeConsumed(false); c.setCanBeResolved(true); c.attributes( - a -> a.attribute( - ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, - HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE - ) + a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ClusterFeaturesMetadataPlugin.FEATURES_METADATA_TYPE) ); c.defaultDependencies( d -> d.add(project.getDependencies().project(Map.of("path", ":distribution", "configuration", "featuresMetadata"))) @@ -251,7 +249,7 @@ private void copyDependencies(Project project, DependencySet dependencies, Confi configuration.getDependencies() .stream() .filter(d -> d instanceof ProjectDependency) - .map(d -> project.getDependencies().project(Map.of("path", ((ProjectDependency) d).getDependencyProject().getPath()))) + .map(d -> project.getDependencies().project(Map.of("path", ((ProjectDependencyInternal) d).getPath()))) .forEach(dependencies::add); } @@ -328,8 +326,9 @@ private Configuration createPluginConfiguration(Project project, String name, bo Collection additionalDependencies = new LinkedHashSet<>(); for (Iterator iterator = dependencies.iterator(); iterator.hasNext();) { Dependency dependency = iterator.next(); + // this logic of relying on other projects metadata should probably live in a build service if (dependency instanceof ProjectDependency projectDependency) { - Project dependencyProject = projectDependency.getDependencyProject(); + Project dependencyProject = project.project(projectDependency.getPath()); List extendedPlugins = dependencyProject.getExtensions() .getByType(PluginPropertiesExtension.class) .getExtendedPlugins(); @@ -339,8 +338,8 @@ private Configuration createPluginConfiguration(Project project, String name, bo iterator.remove(); additionalDependencies.add( useExploded - ? getExplodedBundleDependency(project, dependencyProject.getPath()) - : getBundleZipTaskDependency(project, dependencyProject.getPath()) + ? getExplodedBundleDependency(project, projectDependency.getPath()) + : getBundleZipTaskDependency(project, projectDependency.getPath()) ); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java index 61dea47eb15c1..ca669276123b3 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java @@ -26,6 +26,7 @@ import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.Dependency; import org.gradle.api.file.Directory; +import org.gradle.api.file.FileCollection; import org.gradle.api.file.ProjectLayout; import org.gradle.api.file.RelativePath; import org.gradle.api.internal.file.FileOperations; @@ -244,10 +245,11 @@ public void apply(Project project) { yamlRestCompatTestTask.configure(testTask -> { testTask.systemProperty("tests.restCompat", true); // Use test runner and classpath from "normal" yaml source set + FileCollection outputFileCollection = yamlCompatTestSourceSet.getOutput(); testTask.setTestClassesDirs( yamlTestSourceSet.getOutput().getClassesDirs().plus(yamlCompatTestSourceSet.getOutput().getClassesDirs()) ); - testTask.onlyIf("Compatibility tests are available", t -> yamlCompatTestSourceSet.getOutput().isEmpty() == false); + testTask.onlyIf("Compatibility tests are available", t -> outputFileCollection.isEmpty() == false); testTask.setClasspath( yamlCompatTestSourceSet.getRuntimeClasspath() // remove the "normal" api and tests diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java index ef93dafa913cd..ba242a8e23861 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java @@ -137,7 +137,7 @@ public void skipTest(String fullTestName, String reason) { // However, the folder can be arbitrarily nest so, a == a1/a2/a3, and the test name can include forward slashes, so c == c1/c2/c3 // So we also need to support a1/a2/a3/b/c1/c2/c3 - String[] testParts = fullTestName.split("/"); + String[] testParts = fullTestName.split("/", 3); if (testParts.length < 3) { throw new IllegalArgumentException( "To skip tests, all 3 parts [folder/file/test name] must be defined. found [" + fullTestName + "]" diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AbstractCustomJavaToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AbstractCustomJavaToolchainResolver.java index ac458a632e818..0c6a6bc26156b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AbstractCustomJavaToolchainResolver.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AbstractCustomJavaToolchainResolver.java @@ -34,6 +34,7 @@ static String toArchString(Architecture architecture) { case X86_64 -> "x64"; case AARCH64 -> "aarch64"; case X86 -> "x86"; + default -> throw new UnsupportedOperationException("Architecture " + architecture); }; } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/Fixture.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/CiUtils.java similarity index 57% rename from build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/Fixture.java rename to build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/CiUtils.java index f7ee88c715dfa..1b019a6cbd3e6 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/Fixture.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/CiUtils.java @@ -7,15 +7,12 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.gradle.internal.test; +package org.elasticsearch.gradle.internal.util; -/** - * Any object that can produce an accompanying stop task, meant to tear down - * a previously instantiated service. - */ -public interface Fixture { +public class CiUtils { - /** A task which will stop this fixture. This should be used as a finalizedBy for any tasks that use the fixture. */ - Object getStopTask(); + static String safeName(String input) { + return input.replaceAll("[^a-zA-Z0-9_\\-\\.]+", " ").trim().replaceAll(" ", "_").toLowerCase(); + } } diff --git a/build-tools-internal/src/main/resources/checkstyle.xml b/build-tools-internal/src/main/resources/checkstyle.xml index daedc2ac3c629..9ed31d993909e 100644 --- a/build-tools-internal/src/main/resources/checkstyle.xml +++ b/build-tools-internal/src/main/resources/checkstyle.xml @@ -57,7 +57,7 @@ unfair. --> - + diff --git a/build-tools-internal/src/main/resources/fips_java.policy b/build-tools-internal/src/main/resources/fips_java.policy index c259b0bc908d8..781e1247db7a5 100644 --- a/build-tools-internal/src/main/resources/fips_java.policy +++ b/build-tools-internal/src/main/resources/fips_java.policy @@ -5,6 +5,7 @@ grant { permission java.security.SecurityPermission "getProperty.jdk.tls.disabledAlgorithms"; permission java.security.SecurityPermission "getProperty.jdk.certpath.disabledAlgorithms"; permission java.security.SecurityPermission "getProperty.jdk.tls.server.defaultDHEParameters"; + permission java.security.SecurityPermission "getProperty.org.bouncycastle.ec.max_f2m_field_size"; permission java.lang.RuntimePermission "getProtectionDomain"; permission java.util.PropertyPermission "java.runtime.name", "read"; permission org.bouncycastle.crypto.CryptoServicesPermission "tlsAlgorithmsEnabled"; @@ -20,6 +21,6 @@ grant { }; // rely on the caller's socket permissions, the JSSE TLS implementation here is always allowed to connect -grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.17.jar" { +grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.19.jar" { permission java.net.SocketPermission "*", "connect"; }; diff --git a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt index a9da7995c2b36..68b97050ea012 100644 --- a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt +++ b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt @@ -167,3 +167,5 @@ org.elasticsearch.cluster.SnapshotDeletionsInProgress$Entry#(java.lang.Str @defaultMessage Use a Thread constructor with a name, anonymous threads are more difficult to debug java.lang.Thread#(java.lang.Runnable) java.lang.Thread#(java.lang.ThreadGroup, java.lang.Runnable) + +org.elasticsearch.common.bytes.BytesReference#copyBytes(org.elasticsearch.common.bytes.BytesReference) @ This method is a subject for removal. Copying bytes is prone to performance regressions and unnecessary allocations. diff --git a/build-tools-internal/src/main/resources/minimumGradleVersion b/build-tools-internal/src/main/resources/minimumGradleVersion index dd78a707858a7..876e3136ea819 100644 --- a/build-tools-internal/src/main/resources/minimumGradleVersion +++ b/build-tools-internal/src/main/resources/minimumGradleVersion @@ -1 +1 @@ -8.10.2 \ No newline at end of file +8.11.1 \ No newline at end of file diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy index c5b7a44a19d31..9c7d20d84a670 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy @@ -17,232 +17,201 @@ import org.elasticsearch.gradle.internal.BwcVersions.UnreleasedVersionInfo class BwcVersionsSpec extends Specification { List versionLines = [] - def "current version is next major with last minor staged"() { - given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.16.2', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.1.0', '9.0.0') - - when: - def bwc = new BwcVersions(versionLines, v('8.1.0')) - def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } - - then: - unreleased == [ - (v('7.16.2')): new UnreleasedVersionInfo(v('7.16.2'), '7.16', ':distribution:bwc:bugfix'), - (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.17', ':distribution:bwc:staged'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), '8.x', ':distribution:bwc:minor'), - (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') - ] - bwc.wireCompatible == [v('7.17.0'), v('8.0.0'), v('8.1.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.16.2'), v('7.17.0'), v('8.0.0'), v('8.1.0')] - bwc.minimumWireCompatibleVersion == v('7.17.0') - } - def "current version is next minor with next major and last minor both staged"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.1.0', '9.1.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.1.0', '10.1.0') when: - def bwc = new BwcVersions(versionLines, v('8.1.0')) + def bwc = new BwcVersions(versionLines, v('9.1.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.16.1')): new UnreleasedVersionInfo(v('7.16.1'), '7.16', ':distribution:bwc:bugfix'), - (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.17', ':distribution:bwc:staged'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), '8.x', ':distribution:bwc:minor'), - (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix'), + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.17', ':distribution:bwc:staged'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), '9.x', ':distribution:bwc:minor'), + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution') ] - bwc.wireCompatible == [v('7.17.0'), v('8.0.0'), v('8.1.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('8.0.0'), v('8.1.0')] + bwc.wireCompatible == [v('8.17.0'), v('9.0.0'), v('9.1.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('9.0.0'), v('9.1.0')] } def "current is next minor with upcoming minor staged"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('7.17.1', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.1.0', '9.1.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.17.1', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.1.0', '10.1.0') when: - def bwc = new BwcVersions(versionLines, v('8.1.0')) + def bwc = new BwcVersions(versionLines, v('9.1.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:bugfix'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), '8.0', ':distribution:bwc:staged'), - (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:bugfix'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), '9.0', ':distribution:bwc:staged'), + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution') ] - bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.1.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.1.0')] + bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.1.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.1.0')] } def "current version is staged major"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('7.17.1', '8.10.0') - addVersion('8.0.0', '9.0.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.17.1', '9.10.0') + addVersion('9.0.0', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('8.0.0')) + def bwc = new BwcVersions(versionLines, v('9.0.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:bugfix'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), 'main', ':distribution'), + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:bugfix'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0')] + bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0')] } def "current version is major with unreleased next minor"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('8.0.0', '9.0.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('9.0.0', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('8.0.0')) + def bwc = new BwcVersions(versionLines, v('9.0.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.16.1')): new UnreleasedVersionInfo(v('7.16.1'), '7.16', ':distribution:bwc:bugfix'), - (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.x', ':distribution:bwc:minor'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), 'main', ':distribution'), + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix'), + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.x', ':distribution:bwc:minor'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('7.17.0'), v('8.0.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('8.0.0')] + bwc.wireCompatible == [v('8.17.0'), v('9.0.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('9.0.0')] } def "current version is major with staged next minor"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('8.0.0', '9.0.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('9.0.0', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('8.0.0')) + def bwc = new BwcVersions(versionLines, v('9.0.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.15.2')): new UnreleasedVersionInfo(v('7.15.2'), '7.15', ':distribution:bwc:bugfix'), - (v('7.16.0')): new UnreleasedVersionInfo(v('7.16.0'), '7.16', ':distribution:bwc:staged'), - (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.x', ':distribution:bwc:minor'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), 'main', ':distribution'), + (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:bugfix'), + (v('8.16.0')): new UnreleasedVersionInfo(v('8.16.0'), '8.16', ':distribution:bwc:staged'), + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.x', ':distribution:bwc:minor'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('7.17.0'), v('8.0.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.17.0'), v('8.0.0')] + bwc.wireCompatible == [v('8.17.0'), v('9.0.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.17.0'), v('9.0.0')] } def "current version is next bugfix"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('7.17.1', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.0.1', '9.0.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.17.1', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.0.1', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('8.0.1')) + def bwc = new BwcVersions(versionLines, v('9.0.1')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), - (v('8.0.1')): new UnreleasedVersionInfo(v('8.0.1'), 'main', ':distribution'), + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:maintenance'), + (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1')] + bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1')] } def "current version is next minor with no staged releases"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('7.17.1', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.0.1', '9.0.0') - addVersion('8.1.0', '9.1.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.17.1', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.0.1', '10.0.0') + addVersion('9.1.0', '10.1.0') when: - def bwc = new BwcVersions(versionLines, v('8.1.0')) + def bwc = new BwcVersions(versionLines, v('9.1.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), - (v('8.0.1')): new UnreleasedVersionInfo(v('8.0.1'), '8.0', ':distribution:bwc:bugfix'), - (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:maintenance'), + (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), '9.0', ':distribution:bwc:bugfix'), + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution') ] - bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1'), v('8.1.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1'), v('8.1.0')] + bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1'), v('9.1.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1'), v('9.1.0')] } private void addVersion(String elasticsearch, String lucene) { diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java b/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java index 2f5b110fc59a9..c3da389fc30d4 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java @@ -39,6 +39,12 @@ public void write(byte b[], int off, int len) throws IOException { bootstrap(); delegate.write(b, off, len); } + + @Override + public void write(byte b[]) throws IOException { + bootstrap(); + delegate.write(b); + } }; } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java index b3a792b418384..42e576012c0c9 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java @@ -24,6 +24,8 @@ import org.gradle.api.Task; import org.gradle.api.Transformer; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; +import org.gradle.api.attributes.Attribute; +import org.gradle.api.attributes.LibraryElements; import org.gradle.api.file.CopySpec; import org.gradle.api.file.FileCollection; import org.gradle.api.file.RegularFile; @@ -126,9 +128,27 @@ private TaskProvider createBundleTasks(final Project project, PluginPropert // know about the plugin (used by test security code to statically initialize the plugin in unit tests) var testSourceSet = project.getExtensions().getByType(SourceSetContainer.class).getByName("test"); Map map = Map.of("builtBy", buildProperties); - testSourceSet.getOutput().dir(map, new File(project.getBuildDir(), "generated-resources")); + + File generatedResources = new File(project.getBuildDir(), "generated-resources"); + testSourceSet.getOutput().dir(map, generatedResources); testSourceSet.getResources().srcDir(pluginMetadata); + // expose the plugin properties and metadata for other plugins to use in their tests. + // See TestWithDependenciesPlugin for how this is used. + project.getConfigurations().create("pluginMetadata", conf -> { + conf.getAttributes().attribute(Attribute.of("pluginMetadata", Boolean.class), true); + conf.getAttributes() + .attribute( + LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE, + project.getObjects().named(LibraryElements.class, LibraryElements.RESOURCES) + ); + }); + + project.getArtifacts().add("pluginMetadata", new File(project.getBuildDir(), "generated-descriptor"), artifact -> { + artifact.builtBy(buildProperties); + }); + project.getArtifacts().add("pluginMetadata", pluginMetadata); + // getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, "plugin-metadata"); var bundleSpec = createBundleSpec(project, pluginMetadata, buildProperties); extension.setBundleSpec(bundleSpec); // create the actual bundle task, which zips up all the files for the plugin diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java index ec341ecfd8b79..77393fe16b4c2 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java @@ -76,6 +76,7 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named { private final LinkedHashMap> waitConditions = new LinkedHashMap<>(); private final transient Project project; private final Provider reaper; + private final Provider testClustersRegistryProvider; private final FileSystemOperations fileSystemOperations; private final ArchiveOperations archiveOperations; private final ExecOperations execOperations; @@ -87,11 +88,14 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named { private boolean shared = false; + private int claims = 0; + public ElasticsearchCluster( String path, String clusterName, Project project, Provider reaper, + Provider testClustersRegistryProvider, FileSystemOperations fileSystemOperations, ArchiveOperations archiveOperations, ExecOperations execOperations, @@ -104,6 +108,7 @@ public ElasticsearchCluster( this.clusterName = clusterName; this.project = project; this.reaper = reaper; + this.testClustersRegistryProvider = testClustersRegistryProvider; this.fileSystemOperations = fileSystemOperations; this.archiveOperations = archiveOperations; this.execOperations = execOperations; @@ -120,6 +125,7 @@ public ElasticsearchCluster( clusterName + "-0", project, reaper, + testClustersRegistryProvider, fileSystemOperations, archiveOperations, execOperations, @@ -177,6 +183,7 @@ public void setNumberOfNodes(int numberOfNodes) { clusterName + "-" + i, project, reaper, + testClustersRegistryProvider, fileSystemOperations, archiveOperations, execOperations, @@ -408,6 +415,7 @@ public void setPreserveDataDir(boolean preserveDataDir) { public void freeze() { nodes.forEach(ElasticsearchNode::freeze); configurationFrozen.set(true); + nodes.whenObjectAdded(node -> { throw new IllegalStateException("Cannot add nodes to test cluster after is has been frozen"); }); } private void checkFrozen() { @@ -663,4 +671,11 @@ public String toString() { return "cluster{" + path + ":" + clusterName + "}"; } + int addClaim() { + return ++this.claims; + } + + int removeClaim() { + return --this.claims; + } } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index df11733928f0f..90162591cfcef 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -124,6 +124,8 @@ public class ElasticsearchNode implements TestClusterConfiguration { private final String name; transient private final Project project; private final Provider reaperServiceProvider; + private final Provider testClustersRegistryProvider; + private final FileSystemOperations fileSystemOperations; private final ArchiveOperations archiveOperations; private final ExecOperations execOperations; @@ -164,7 +166,6 @@ public class ElasticsearchNode implements TestClusterConfiguration { private final List distributions = new ArrayList<>(); private int currentDistro = 0; private TestDistribution testDistribution; - private volatile Process esProcess; private Function nameCustomization = s -> s; private boolean isWorkingDirConfigured = false; private String httpPort = "0"; @@ -179,6 +180,7 @@ public class ElasticsearchNode implements TestClusterConfiguration { String name, Project project, Provider reaperServiceProvider, + Provider testClustersRegistryProvider, FileSystemOperations fileSystemOperations, ArchiveOperations archiveOperations, ExecOperations execOperations, @@ -191,6 +193,7 @@ public class ElasticsearchNode implements TestClusterConfiguration { this.name = name; this.project = project; this.reaperServiceProvider = reaperServiceProvider; + this.testClustersRegistryProvider = testClustersRegistryProvider; this.fileSystemOperations = fileSystemOperations; this.archiveOperations = archiveOperations; this.execOperations = execOperations; @@ -892,11 +895,13 @@ private void startElasticsearchProcess() { } } LOGGER.info("Running `{}` in `{}` for {} env: {}", command, workingDir, this, environment); + Process esProcess; try { esProcess = processBuilder.start(); } catch (IOException e) { throw new TestClustersException("Failed to start ES process for " + this, e); } + testClustersRegistryProvider.get().storeProcess(id(), esProcess); reaperServiceProvider.get().registerPid(toString(), esProcess.pid()); } @@ -982,6 +987,7 @@ public synchronized void stop(boolean tailLogs) { } catch (IOException e) { throw new UncheckedIOException(e); } + Process esProcess = testClustersRegistryProvider.get().getProcess(id()); if (esProcess == null && tailLogs) { // This is a special case. If start() throws an exception the plugin will still call stop // Another exception here would eat the orriginal. @@ -1574,6 +1580,7 @@ public List getFeatureFlags() { @Override @Internal public boolean isProcessAlive() { + Process esProcess = testClustersRegistryProvider.get().getProcess(id()); requireNonNull(esProcess, "Can't wait for `" + this + "` as it's not started. Does the task have `useCluster` ?"); return esProcess.isAlive(); } @@ -1602,6 +1609,10 @@ public int hashCode() { @Override public String toString() { + return id() + " (" + System.identityHashCode(this) + ")"; + } + + private String id() { return "node{" + path + ":" + name + "}"; } @@ -1702,7 +1713,7 @@ public CharSequence[] getArgs() { } } - private record FeatureFlag(String feature, Version from, Version until) { + public record FeatureFlag(String feature, Version from, Version until) { @Input public String getFeature() { diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterInfo.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterInfo.java new file mode 100644 index 0000000000000..07663de7a9df9 --- /dev/null +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterInfo.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.gradle.testclusters; + +import java.io.File; +import java.util.List; + +public class TestClusterInfo { + private final List allHttpSocketURI; + private final List allTransportPortURI; + private final List auditLogs; + + public TestClusterInfo(List allHttpSocketURI, List allTransportPortURI, List auditLogs) { + this.allHttpSocketURI = allHttpSocketURI; + this.allTransportPortURI = allTransportPortURI; + this.auditLogs = auditLogs; + } + + public List getAllHttpSocketURI() { + return allHttpSocketURI; + } + + public List getAllTransportPortURI() { + return allTransportPortURI; + } + + public List getAuditLogs() { + return auditLogs; + } +} diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterValueSource.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterValueSource.java new file mode 100644 index 0000000000000..8ecadcdc6d2b1 --- /dev/null +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterValueSource.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.testclusters; + +import org.gradle.api.provider.Property; +import org.gradle.api.provider.ValueSource; +import org.gradle.api.provider.ValueSourceParameters; +import org.jetbrains.annotations.Nullable; + +public abstract class TestClusterValueSource implements ValueSource { + + @Nullable + @Override + public TestClusterInfo obtain() { + String clusterName = getParameters().getClusterName().get(); + String path = getParameters().getPath().get(); + return getParameters().getService().get().getClusterDetails(path, clusterName); + } + + interface Parameters extends ValueSourceParameters { + Property getClusterName(); + + Property getPath(); + + Property getService(); + } +} diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java index f84aa2a0389c2..9e5fc1f09ac9e 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.gradle.testclusters; +import org.elasticsearch.gradle.ElasticsearchDistribution; import org.gradle.api.Task; import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; @@ -34,10 +35,15 @@ default void useCluster(ElasticsearchCluster cluster) { if (cluster.getPath().equals(getProject().getPath()) == false) { throw new TestClustersException("Task " + getPath() + " can't use test cluster from" + " another project " + cluster); } - - cluster.getNodes() - .all(node -> node.getDistributions().forEach(distro -> dependsOn(getProject().provider(() -> distro.maybeFreeze())))); - dependsOn(cluster.getPluginAndModuleConfigurations()); + if (cluster.getName().equals(getName())) { + for (ElasticsearchNode node : cluster.getNodes()) { + for (ElasticsearchDistribution distro : node.getDistributions()) { + ElasticsearchDistribution frozenDistro = distro.maybeFreeze(); + dependsOn(frozenDistro); + } + } + dependsOn(cluster.getPluginAndModuleConfigurations()); + } getClusters().add(cluster); } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java index 301782d52d1a3..ada31bc11a653 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java @@ -26,6 +26,7 @@ import org.gradle.api.invocation.Gradle; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; +import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; import org.gradle.api.provider.ProviderFactory; import org.gradle.api.services.BuildService; @@ -106,15 +107,22 @@ public void apply(Project project) { runtimeJavaProvider = providerFactory.provider( () -> System.getenv("RUNTIME_JAVA_HOME") == null ? Jvm.current().getJavaHome() : new File(System.getenv("RUNTIME_JAVA_HOME")) ); + + // register cluster registry as a global build service + Provider testClustersRegistryProvider = project.getGradle() + .getSharedServices() + .registerIfAbsent(REGISTRY_SERVICE_NAME, TestClustersRegistry.class, noop()); + // enable the DSL to describe clusters - NamedDomainObjectContainer container = createTestClustersContainerExtension(project, reaperServiceProvider); + NamedDomainObjectContainer container = createTestClustersContainerExtension( + project, + testClustersRegistryProvider, + reaperServiceProvider + ); // provide a task to be able to list defined clusters. createListClustersTask(project, container); - // register cluster registry as a global build service - project.getGradle().getSharedServices().registerIfAbsent(REGISTRY_SERVICE_NAME, TestClustersRegistry.class, noop()); - // register throttle so we only run at most max-workers/2 nodes concurrently Provider testClustersThrottleProvider = project.getGradle() .getSharedServices() @@ -145,6 +153,7 @@ private void configureArtifactTransforms(Project project) { private NamedDomainObjectContainer createTestClustersContainerExtension( Project project, + Provider testClustersRegistryProvider, Provider reaper ) { // Create an extensions that allows describing clusters @@ -155,6 +164,7 @@ private NamedDomainObjectContainer createTestClustersConta name, project, reaper, + testClustersRegistryProvider, getFileSystemOperations(), getArchiveOperations(), getExecOperations(), @@ -199,7 +209,9 @@ public void apply(Project project) { Provider testClusterTasksService = project.getGradle() .getSharedServices() - .registerIfAbsent(TEST_CLUSTER_TASKS_SERVICE, TaskEventsService.class, spec -> {}); + .registerIfAbsent(TEST_CLUSTER_TASKS_SERVICE, TaskEventsService.class, spec -> { + spec.getParameters().getRegistry().set(registryProvider); + }); TestClustersRegistry registry = registryProvider.get(); // When we know what tasks will run, we claim the clusters of those task to differentiate between clusters @@ -209,7 +221,7 @@ public void apply(Project project) { configureClaimClustersHook(project.getGradle(), registry); // Before each task, we determine if a cluster needs to be started for that task. - configureStartClustersHook(project.getGradle(), registry, testClusterTasksService); + configureStartClustersHook(project.getGradle()); // After each task we determine if there are clusters that are no longer needed. getEventsListenerRegistry().onTaskCompletion(testClusterTasksService); @@ -228,12 +240,7 @@ private static void configureClaimClustersHook(Gradle gradle, TestClustersRegist }); } - private void configureStartClustersHook( - Gradle gradle, - TestClustersRegistry registry, - Provider testClusterTasksService - ) { - testClusterTasksService.get().registry(registry); + private void configureStartClustersHook(Gradle gradle) { gradle.getTaskGraph().whenReady(taskExecutionGraph -> { taskExecutionGraph.getAllTasks() .stream() @@ -249,19 +256,14 @@ private void configureStartClustersHook( } } - static public abstract class TaskEventsService implements BuildService, OperationCompletionListener { + static public abstract class TaskEventsService implements BuildService, OperationCompletionListener { Map tasksMap = new HashMap<>(); - private TestClustersRegistry registryProvider; public void register(TestClustersAware task) { tasksMap.put(task.getPath(), task); } - public void registry(TestClustersRegistry registry) { - this.registryProvider = registry; - } - @Override public void onFinish(FinishEvent finishEvent) { if (finishEvent instanceof TaskFinishEvent taskFinishEvent) { @@ -273,11 +275,18 @@ public void onFinish(FinishEvent finishEvent) { if (task.getDidWork()) { task.getClusters() .forEach( - cluster -> registryProvider.stopCluster(cluster, taskFinishEvent.getResult() instanceof TaskFailureResult) + cluster -> getParameters().getRegistry() + .get() + .stopCluster(cluster, taskFinishEvent.getResult() instanceof TaskFailureResult) ); } } } } + + // Some parameters for the web server + interface Params extends BuildServiceParameters { + Property getRegistry(); + } } } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersRegistry.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersRegistry.java index 8de0dd67b654c..8d2a9217e7d0c 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersRegistry.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersRegistry.java @@ -10,6 +10,8 @@ import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; +import org.gradle.api.provider.Provider; +import org.gradle.api.provider.ProviderFactory; import org.gradle.api.services.BuildService; import org.gradle.api.services.BuildServiceParameters; @@ -17,20 +19,23 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; + +import javax.inject.Inject; public abstract class TestClustersRegistry implements BuildService { private static final Logger logger = Logging.getLogger(TestClustersRegistry.class); private static final String TESTCLUSTERS_INSPECT_FAILURE = "testclusters.inspect.failure"; private final Boolean allowClusterToSurvive = Boolean.valueOf(System.getProperty(TESTCLUSTERS_INSPECT_FAILURE, "false")); - private final Map claimsInventory = new HashMap<>(); - private final Set runningClusters = new HashSet<>(); + private final Map nodeProcesses = new HashMap<>(); + + @Inject + public abstract ProviderFactory getProviderFactory(); public void claimCluster(ElasticsearchCluster cluster) { - cluster.freeze(); - int claim = claimsInventory.getOrDefault(cluster, 0) + 1; - claimsInventory.put(cluster, claim); - if (claim > 1) { + int claims = cluster.addClaim(); + if (claims > 1) { cluster.setShared(true); } } @@ -43,6 +48,13 @@ public void maybeStartCluster(ElasticsearchCluster cluster) { cluster.start(); } + public Provider getClusterInfo(String clusterName) { + return getProviderFactory().of(TestClusterValueSource.class, spec -> { + spec.getParameters().getService().set(TestClustersRegistry.this); + spec.getParameters().getClusterName().set(clusterName); + }); + } + public void stopCluster(ElasticsearchCluster cluster, boolean taskFailed) { if (taskFailed) { // If the task fails, and other tasks use this cluster, the other task will likely never be @@ -67,8 +79,7 @@ public void stopCluster(ElasticsearchCluster cluster, boolean taskFailed) { runningClusters.remove(cluster); } } else { - int currentClaims = claimsInventory.getOrDefault(cluster, 0) - 1; - claimsInventory.put(cluster, currentClaims); + int currentClaims = cluster.removeClaim(); if (currentClaims <= 0 && runningClusters.contains(cluster)) { cluster.stop(false); runningClusters.remove(cluster); @@ -76,4 +87,33 @@ public void stopCluster(ElasticsearchCluster cluster, boolean taskFailed) { } } + public TestClusterInfo getClusterDetails(String path, String clusterName) { + ElasticsearchCluster cluster = runningClusters.stream() + .filter(c -> c.getPath().equals(path)) + .filter(c -> c.getName().equals(clusterName)) + .findFirst() + .orElseThrow(); + return new TestClusterInfo( + cluster.getAllHttpSocketURI(), + cluster.getAllTransportPortURI(), + cluster.getNodes().stream().map(n -> n.getAuditLog()).collect(Collectors.toList()) + ); + } + + public void restart(String path, String clusterName) { + ElasticsearchCluster cluster = runningClusters.stream() + .filter(c -> c.getPath().equals(path)) + .filter(c -> c.getName().equals(clusterName)) + .findFirst() + .orElseThrow(); + cluster.restart(); + } + + public void storeProcess(String id, Process esProcess) { + nodeProcesses.put(id, esProcess); + } + + public Process getProcess(String id) { + return nodeProcesses.get(id); + } } diff --git a/distribution/build.gradle b/distribution/build.gradle index e3481706ef230..bfbf10ac85e2f 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -14,7 +14,7 @@ import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.ConcatFilesTask import org.elasticsearch.gradle.internal.DependenciesInfoPlugin import org.elasticsearch.gradle.internal.NoticeTask -import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin +import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin import java.nio.file.Files import java.nio.file.Path @@ -33,7 +33,7 @@ configurations { } featuresMetadata { attributes { - attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE) + attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ClusterFeaturesMetadataPlugin.FEATURES_METADATA_TYPE) } } } diff --git a/distribution/docker/README.md b/distribution/docker/README.md index 49facab461edc..9438b4f1e82c3 100644 --- a/distribution/docker/README.md +++ b/distribution/docker/README.md @@ -3,8 +3,7 @@ The ES build can generate several types of Docker image. These are enumerated in the [DockerBase] enum. - * Default - this is what most people use, and is based on Ubuntu - * UBI - the same as the default image, but based upon [RedHat's UBI + * Default - this is what most people use, and is based on [RedHat's UBI images][ubi], specifically their minimal flavour. * Wolfi - the same as the default image, but based upon [Wolfi](https://github.com/wolfi-dev) * Cloud ESS - this directly extends the Wolfi image, and adds all ES plugins @@ -23,14 +22,7 @@ the [DockerBase] enum. software (FOSS) and Commercial off-the-shelf (COTS). In practice, this is another UBI build, this time on the regular UBI image, with extra hardening. See below for more details. - * Cloud - this is mostly the same as the default image, with some notable differences: - * `filebeat` and `metricbeat` are included - * `wget` is included - * The `ENTRYPOINT` is just `/bin/tini`, and the `CMD` is - `/app/elasticsearch.sh`. In normal use this file would be bind-mounted - in, but the image ships a stub version of this file so that the image - can still be tested. -The long-term goal is for both Cloud images to be retired in favour of the +The long-term goal is for Cloud ESS image to be retired in favour of the default image. diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index d73f9c395f15c..f5b94fb9dfd94 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -527,9 +527,7 @@ subprojects { Project subProject -> final Architecture architecture = subProject.name.contains('aarch64-') ? Architecture.AARCH64 : Architecture.X64 DockerBase base = DockerBase.DEFAULT - if (subProject.name.contains('ubi-')) { - base = DockerBase.UBI - } else if (subProject.name.contains('ironbank-')) { + if (subProject.name.contains('ironbank-')) { base = DockerBase.IRON_BANK } else if (subProject.name.contains('cloud-ess-')) { base = DockerBase.CLOUD_ESS @@ -538,11 +536,11 @@ subprojects { Project subProject -> } final String arch = architecture == Architecture.AARCH64 ? '-aarch64' : '' - final String extension = base == DockerBase.UBI ? 'ubi.tar' : + final String extension = (base == DockerBase.IRON_BANK ? 'ironbank.tar' : - (base == DockerBase.CLOUD_ESS ? 'cloud-ess.tar' : - (base == DockerBase.WOLFI ? 'wolfi.tar' : - 'docker.tar'))) + (base == DockerBase.CLOUD_ESS ? 'cloud-ess.tar' : + (base == DockerBase.WOLFI ? 'wolfi.tar' : + 'docker.tar'))) final String artifactName = "elasticsearch${arch}${base.suffix}_test" final String exportTaskName = taskName("export", architecture, base, 'DockerImage') diff --git a/distribution/docker/src/docker/Dockerfile b/distribution/docker/src/docker/Dockerfile index fd2516f2fdc9a..6cb030565d9d2 100644 --- a/distribution/docker/src/docker/Dockerfile +++ b/distribution/docker/src/docker/Dockerfile @@ -41,9 +41,7 @@ RUN chmod 0555 /bin/tini <% } else { %> # Install required packages to extract the Elasticsearch distribution -<% if (docker_base == 'default' || docker_base == 'cloud') { %> -RUN <%= retry.loop(package_manager, "${package_manager} update && DEBIAN_FRONTEND=noninteractive ${package_manager} install -y curl ") %> -<% } else if (docker_base == "wolfi") { %> +<% if (docker_base == "wolfi") { %> RUN <%= retry.loop(package_manager, "export DEBIAN_FRONTEND=noninteractive && ${package_manager} update && ${package_manager} update && ${package_manager} add --no-cache curl") %> <% } else { %> RUN <%= retry.loop(package_manager, "${package_manager} install -y findutils tar gzip") %> @@ -117,27 +115,6 @@ RUN sed -i -e 's/ES_DISTRIBUTION_TYPE=tar/ES_DISTRIBUTION_TYPE=docker/' bin/elas chmod 0775 bin config config/jvm.options.d data logs plugins && \\ find config -type f -exec chmod 0664 {} + -<% if (docker_base == "cloud") { %> -COPY filebeat-${version}.tar.gz metricbeat-${version}.tar.gz /tmp/ -RUN set -eux ; \\ - for beat in filebeat metricbeat ; do \\ - if [ ! -s /tmp/\$beat-${version}.tar.gz ]; then \\ - echo "/tmp/\$beat-${version}.tar.gz is empty - cannot uncompress" 2>&1 ; \\ - exit 1 ; \\ - fi ; \\ - if ! tar tf /tmp/\$beat-${version}.tar.gz >/dev/null; then \\ - echo "/tmp/\$beat-${version}.tar.gz is corrupt - cannot uncompress" 2>&1 ; \\ - exit 1 ; \\ - fi ; \\ - mkdir -p /opt/\$beat ; \\ - tar xf /tmp/\$beat-${version}.tar.gz -C /opt/\$beat --strip-components=1 ; \\ - done - -# Add plugins infrastructure -RUN mkdir -p /opt/plugins/archive -RUN chmod -R 0555 /opt/plugins -<% } %> - ################################################################################ # Build stage 2 (the actual Elasticsearch image): # @@ -173,21 +150,6 @@ SHELL ["/bin/bash", "-c"] # Optionally set Bash as the default shell in the container at runtime CMD ["/bin/bash"] -<% } else if (docker_base == "default" || docker_base == "cloud") { %> - -# Change default shell to bash, then install required packages with retries. -RUN yes no | dpkg-reconfigure dash && \\ - <%= retry.loop( - package_manager, - "export DEBIAN_FRONTEND=noninteractive && \n" + - " ${package_manager} update && \n" + - " ${package_manager} upgrade -y && \n" + - " ${package_manager} install -y --no-install-recommends \n" + - " ca-certificates curl netcat p11-kit unzip zip ${docker_base == 'cloud' ? 'wget' : '' } && \n" + - " ${package_manager} clean && \n" + - " rm -rf /var/lib/apt/lists/*" - ) %> - <% } else { %> RUN <%= retry.loop( @@ -201,12 +163,7 @@ RUN <%= retry.loop( <% } %> -<% if (docker_base == "default" || docker_base == "cloud") { %> -RUN groupadd -g 1000 elasticsearch && \\ - adduser --uid 1000 --gid 1000 --home /usr/share/elasticsearch elasticsearch && \\ - adduser elasticsearch root && \\ - chown -R 0:0 /usr/share/elasticsearch -<% } else if (docker_base == "wolfi") { %> +<% if (docker_base == "wolfi") { %> RUN groupadd -g 1000 elasticsearch && \ adduser -G elasticsearch -u 1000 elasticsearch -D --home /usr/share/elasticsearch elasticsearch && \ adduser elasticsearch root && \ @@ -226,10 +183,6 @@ COPY --from=builder --chown=0:0 /usr/share/elasticsearch /usr/share/elasticsearc COPY --from=builder --chown=0:0 /bin/tini /bin/tini <% } %> -<% if (docker_base == 'cloud') { %> -COPY --from=builder --chown=0:0 /opt /opt -<% } %> - ENV PATH /usr/share/elasticsearch/bin:\$PATH ENV SHELL /bin/bash COPY ${bin_dir}/docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh @@ -251,12 +204,7 @@ RUN chmod g=u /etc/passwd && \\ chmod 0775 /usr/share/elasticsearch && \\ chown elasticsearch bin config config/jvm.options.d data logs plugins -<% if (docker_base == 'default' || docker_base == 'cloud') { %> -# Update "cacerts" bundle to use Ubuntu's CA certificates (and make sure it -# stays up-to-date with changes to Ubuntu's store) -COPY bin/docker-openjdk /etc/ca-certificates/update.d/docker-openjdk -RUN /etc/ca-certificates/update.d/docker-openjdk -<% } else if (docker_base == 'wolfi') { %> +<% if (docker_base == 'wolfi') { %> RUN ln -sf /etc/ssl/certs/java/cacerts /usr/share/elasticsearch/jdk/lib/security/cacerts <% } else { %> RUN ln -sf /etc/pki/ca-trust/extracted/java/cacerts /usr/share/elasticsearch/jdk/lib/security/cacerts @@ -284,9 +232,7 @@ LABEL org.label-schema.build-date="${build_date}" \\ org.opencontainers.image.url="https://www.elastic.co/products/elasticsearch" \\ org.opencontainers.image.vendor="Elastic" \\ org.opencontainers.image.version="${version}" -<% } %> -<% if (docker_base == 'ubi') { %> LABEL name="Elasticsearch" \\ maintainer="infra@elastic.co" \\ vendor="Elastic" \\ @@ -296,21 +242,12 @@ LABEL name="Elasticsearch" \\ description="You know, for search." <% } %> -<% if (docker_base == 'ubi') { %> -RUN mkdir /licenses && cp LICENSE.txt /licenses/LICENSE -<% } else if (docker_base == 'iron_bank') { %> RUN mkdir /licenses && cp LICENSE.txt /licenses/LICENSE +<% if (docker_base == 'iron_bank') { %> COPY LICENSE /licenses/LICENSE.addendum <% } %> -<% if (docker_base == "cloud") { %> -ENTRYPOINT ["/bin/tini", "--"] -CMD ["/app/elasticsearch.sh"] -# Generate a stub command that will be overwritten at runtime -RUN mkdir /app && \\ - echo -e '#!/bin/bash\\nexec /usr/local/bin/docker-entrypoint.sh eswrapper' > /app/elasticsearch.sh && \\ - chmod 0555 /app/elasticsearch.sh -<% } else if (docker_base == "wolfi") { %> +<% if (docker_base == "wolfi") { %> # Our actual entrypoint is `tini`, a minimal but functional init program. It # calls the entrypoint we provide, while correctly forwarding signals. ENTRYPOINT ["/sbin/tini", "--", "/usr/local/bin/docker-entrypoint.sh"] diff --git a/distribution/docker/ubi-docker-aarch64-export/build.gradle b/distribution/docker/ubi-docker-aarch64-export/build.gradle deleted file mode 100644 index 537b5a093683e..0000000000000 --- a/distribution/docker/ubi-docker-aarch64-export/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -// This file is intentionally blank. All configuration of the -// export is done in the parent project. diff --git a/distribution/docker/ubi-docker-export/build.gradle b/distribution/docker/ubi-docker-export/build.gradle deleted file mode 100644 index 537b5a093683e..0000000000000 --- a/distribution/docker/ubi-docker-export/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -// This file is intentionally blank. All configuration of the -// export is done in the parent project. diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 918980fea616a..5f45b4b72974f 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -10,7 +10,6 @@ import org.apache.tools.ant.filters.ReplaceTokens import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.OS -import org.elasticsearch.gradle.internal.info.BuildParams import org.redline_rpm.header.Flags import java.nio.file.Files @@ -44,7 +43,7 @@ import java.util.regex.Pattern */ plugins { - id "com.netflix.nebula.ospackage-base" version "11.9.1" + id "com.netflix.nebula.ospackage-base" version "11.10.0" } ['deb', 'rpm'].each { type -> @@ -196,7 +195,7 @@ def commonPackageConfig(String type, String architecture) { configurationFile '/etc/elasticsearch/users_roles' from("${packagingFiles}") { dirPermissions { - unix(02750) + unix(0750) } into('/etc') permissionGroup 'elasticsearch' @@ -209,7 +208,7 @@ def commonPackageConfig(String type, String architecture) { from("${packagingFiles}/etc/elasticsearch") { into('/etc/elasticsearch') dirPermissions { - unix(02750) + unix(0750) } setgid = true filePermissions { @@ -261,7 +260,7 @@ def commonPackageConfig(String type, String architecture) { // ========= empty dirs ========= // NOTE: these are created under packagingFiles as empty, but the permissions are set here - Closure copyEmptyDir = { path, u, g, mode -> + Closure copyEmptyDir = { path, u, g, gid, mode -> File file = new File(path) into(file.parent) { from "${packagingFiles}/${file.parent}" @@ -273,12 +272,12 @@ def commonPackageConfig(String type, String architecture) { dirPermissions { unix(mode) } - setgid (mode == 02750) + setgid(gid) } } - copyEmptyDir('/var/log/elasticsearch', 'elasticsearch', 'elasticsearch', 02750) - copyEmptyDir('/var/lib/elasticsearch', 'elasticsearch', 'elasticsearch', 02750) - copyEmptyDir('/usr/share/elasticsearch/plugins', 'root', 'root', 0755) + copyEmptyDir('/var/log/elasticsearch', 'elasticsearch', 'elasticsearch', true, 0750) + copyEmptyDir('/var/lib/elasticsearch', 'elasticsearch', 'elasticsearch', true, 0750) + copyEmptyDir('/usr/share/elasticsearch/plugins', 'root', 'root', false, 0755) // the oss package conflicts with the default distribution and vice versa conflicts('elasticsearch-oss') @@ -335,7 +334,6 @@ Closure commonDebConfig(String architecture) { // versions found on oldest supported distro, centos-6 requires('bash', '4.1', GREATER | EQUAL) - requires('lsb-base', '4', GREATER | EQUAL) requires 'libc6' requires 'adduser' diff --git a/distribution/packages/src/deb/lintian/elasticsearch b/distribution/packages/src/deb/lintian/elasticsearch index edd705b66caaa..1622d8d8aeb40 100644 --- a/distribution/packages/src/deb/lintian/elasticsearch +++ b/distribution/packages/src/deb/lintian/elasticsearch @@ -5,8 +5,6 @@ changelog-file-missing-in-native-package # we intentionally copy our copyright file for all deb packages -copyright-file-contains-full-apache-2-license -copyright-not-using-common-license-for-apache2 copyright-without-copyright-notice # we still put all our files under /usr/share/elasticsearch even after transition to platform dependent packages @@ -16,37 +14,23 @@ arch-dependent-file-in-usr-share missing-dep-on-jarwrapper # we prefer to not make our config and log files world readable -non-standard-file-perm etc/default/elasticsearch 0660 != 0644 -non-standard-dir-perm etc/elasticsearch/ 2750 != 0755 -non-standard-dir-perm etc/elasticsearch/jvm.options.d/ 2750 != 0755 -non-standard-file-perm etc/elasticsearch/* -non-standard-dir-perm var/lib/elasticsearch/ 2750 != 0755 -non-standard-dir-perm var/log/elasticsearch/ 2750 != 0755 - -# this lintian tag is simply wrong; contrary to the explanation, Debian systemd -# does actually look at /usr/lib/systemd/system -systemd-service-file-outside-lib usr/lib/systemd/system/elasticsearch.service +non-standard-file-perm 0660 != 0644 [etc/default/elasticsearch] +non-standard-dir-perm 2750 != 0755 [etc/elasticsearch/] +non-standard-dir-perm 2750 != 0755 [etc/elasticsearch/jvm.options.d/] +non-standard-file-perm 0660 != 0644 [etc/elasticsearch/*] +non-standard-dir-perm 2750 != 0755 [var/lib/elasticsearch/] +non-standard-dir-perm 2750 != 0755 [var/log/elasticsearch/] # the package scripts handle systemd directly and don't need to use deb helpers maintainer-script-calls-systemctl # bundled JDK embedded-library -unstripped-binary-or-object usr/share/elasticsearch/jdk/* -extra-license-file usr/share/elasticsearch/jdk/legal/* -hardening-no-pie usr/share/elasticsearch/jdk/bin/* -hardening-no-pie usr/share/elasticsearch/jdk/lib/* +unstripped-binary-or-object [usr/share/elasticsearch/jdk/*] # the system java version that lintian assumes is far behind what elasticsearch uses unknown-java-class-version -# elastic licensed modules contain elastic license -extra-license-file usr/share/elasticsearch/modules/* - -# This dependency appears to have a packaging flaw, and includes a -# generated source file alongside the compiled version -jar-contains-source usr/share/elasticsearch/modules/repository-gcs/api-common*.jar * - # There's no `License` field in Debian control files, but earlier versions # of `lintian` were more permissive. Override this warning so that we can # run `lintian` on different releases of Debian. The format of this override @@ -58,8 +42,27 @@ unknown-field License # indirectly to libc via libdl. This might not be best practice but we # don't build them ourselves and the license precludes us modifying them # to fix this. -library-not-linked-against-libc usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64/lib/libmkl_*.so +library-not-linked-against-libc [usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64/lib/libmkl_*.so*] + + +# Below is the copy of some of the above rules in format for Lintian versions <= 2.104 (Debian 11) +# Override syntax changes between Lintian versions in a non-backwards compatible way, so we handle it with +# duplication and ignoring some issues in the test code. + + +# we prefer to not make our config and log files world readable +non-standard-file-perm etc/default/elasticsearch 0660 != 0644 +non-standard-dir-perm etc/elasticsearch/ 2750 != 0755 +non-standard-dir-perm etc/elasticsearch/jvm.options.d/ 2750 != 0755 +non-standard-file-perm etc/elasticsearch/* +non-standard-dir-perm var/lib/elasticsearch/ 2750 != 0755 +non-standard-dir-perm var/log/elasticsearch/ 2750 != 0755 -# shared-lib-without-dependency-information (now shared-library-lacks-prerequisites) is falsely reported for libvec.so -# which has no dependencies (not even libc) besides the symbols in the base executable. -shared-lib-without-dependency-information usr/share/elasticsearch/lib/platform/linux-x64/libvec.so +# bundled JDK +unstripped-binary-or-object usr/share/elasticsearch/jdk/* + +# Intel MKL libraries are not linked directly to libc. They are linked +# indirectly to libc via libdl. This might not be best practice but we +# don't build them ourselves and the license precludes us modifying them +# to fix this. +library-not-linked-against-libc usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64/lib/libmkl_*.so* diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle index ac8ade89c9014..57750f2162a71 100644 --- a/distribution/tools/plugin-cli/build.gradle +++ b/distribution/tools/plugin-cli/build.gradle @@ -29,7 +29,7 @@ dependencies { implementation 'org.ow2.asm:asm-tree:9.7' api "org.bouncycastle:bcpg-fips:1.0.7.1" - api "org.bouncycastle:bc-fips:1.0.2.4" + api "org.bouncycastle:bc-fips:1.0.2.5" testImplementation project(":test:framework") testImplementation "com.google.jimfs:jimfs:${versions.jimfs}" testRuntimeOnly "com.google.guava:guava:${versions.jimfs_guava}" diff --git a/docs/changelog/112989.yaml b/docs/changelog/112989.yaml new file mode 100644 index 0000000000000..364f012f94420 --- /dev/null +++ b/docs/changelog/112989.yaml @@ -0,0 +1,5 @@ +pr: 112989 +summary: Upgrade Bouncy Castle FIPS dependencies +area: Security +type: upgrade +issues: [] diff --git a/docs/changelog/114914.yaml b/docs/changelog/114914.yaml new file mode 100644 index 0000000000000..bad13e26682dc --- /dev/null +++ b/docs/changelog/114914.yaml @@ -0,0 +1,5 @@ +pr: 114914 +summary: Adding chunking settings to `IbmWatsonxService` +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/115091.yaml b/docs/changelog/115091.yaml new file mode 100644 index 0000000000000..762bcca5e8c52 --- /dev/null +++ b/docs/changelog/115091.yaml @@ -0,0 +1,7 @@ +pr: 115091 +summary: Added stricter range type checks and runtime warnings for ENRICH +area: ES|QL +type: bug +issues: + - 107357 + - 116799 diff --git a/docs/changelog/115585.yaml b/docs/changelog/115585.yaml new file mode 100644 index 0000000000000..02eecfc3d7d2b --- /dev/null +++ b/docs/changelog/115585.yaml @@ -0,0 +1,6 @@ +pr: 115459 +summary: Adds access to flags no_sub_matches and no_overlapping_matches to hyphenation-decompounder-tokenfilter +area: Search +type: enhancement +issues: + - 97849 diff --git a/docs/changelog/115616.yaml b/docs/changelog/115616.yaml new file mode 100644 index 0000000000000..4fb4dc18538de --- /dev/null +++ b/docs/changelog/115616.yaml @@ -0,0 +1,6 @@ +pr: 115616 +summary: Fix double lookup failure on ESQL +area: ES|QL +type: bug +issues: + - 111398 diff --git a/docs/changelog/115930.yaml b/docs/changelog/115930.yaml new file mode 100644 index 0000000000000..788a01b5cac96 --- /dev/null +++ b/docs/changelog/115930.yaml @@ -0,0 +1,5 @@ +pr: 115930 +summary: Inconsistency in the _analyzer api when the index is not included +area: Search +type: bug +issues: [] diff --git a/docs/changelog/116115.yaml b/docs/changelog/116115.yaml new file mode 100644 index 0000000000000..33e1735c20ca4 --- /dev/null +++ b/docs/changelog/116115.yaml @@ -0,0 +1,5 @@ +pr: 116115 +summary: Allow http unsafe buffers by default +area: Network +type: enhancement +issues: [] diff --git a/docs/changelog/116277.yaml b/docs/changelog/116277.yaml new file mode 100644 index 0000000000000..62262b7797783 --- /dev/null +++ b/docs/changelog/116277.yaml @@ -0,0 +1,6 @@ +pr: 116277 +summary: Update Semantic Query To Handle Zero Size Responses +area: Vector Search +type: bug +issues: + - 116083 diff --git a/docs/changelog/116339.yaml b/docs/changelog/116339.yaml deleted file mode 100644 index 1767183271812..0000000000000 --- a/docs/changelog/116339.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116339 -summary: "Index stats enhancement: creation date and `tier_preference`" -area: Stats -type: feature -issues: [] diff --git a/docs/changelog/116358.yaml b/docs/changelog/116358.yaml new file mode 100644 index 0000000000000..58b44a1e9bcf5 --- /dev/null +++ b/docs/changelog/116358.yaml @@ -0,0 +1,5 @@ +pr: 116358 +summary: Update Deberta tokenizer +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/116515.yaml b/docs/changelog/116515.yaml new file mode 100644 index 0000000000000..6c0d473361e52 --- /dev/null +++ b/docs/changelog/116515.yaml @@ -0,0 +1,5 @@ +pr: 116515 +summary: Esql/lookup join grammar +area: ES|QL +type: feature +issues: [] diff --git a/docs/changelog/116531.yaml b/docs/changelog/116531.yaml new file mode 100644 index 0000000000000..908bbff487973 --- /dev/null +++ b/docs/changelog/116531.yaml @@ -0,0 +1,5 @@ +pr: 116531 +summary: "Add a standard deviation aggregating function: STD_DEV" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/116676.yaml b/docs/changelog/116676.yaml new file mode 100644 index 0000000000000..8c6671e177499 --- /dev/null +++ b/docs/changelog/116676.yaml @@ -0,0 +1,5 @@ +pr: 116676 +summary: Fix handling of time exceeded exception in fetch phase +area: Search +type: bug +issues: [] diff --git a/docs/changelog/116689.yaml b/docs/changelog/116689.yaml new file mode 100644 index 0000000000000..0b1d1646868aa --- /dev/null +++ b/docs/changelog/116689.yaml @@ -0,0 +1,10 @@ +pr: 116689 +summary: Deprecate `_source.mode` in mappings +area: Mapping +type: deprecation +issues: [] +deprecation: + title: Deprecate `_source.mode` in mappings + area: Mapping + details: Configuring `_source.mode` in mappings is deprecated and will be removed in future versions. Use `index.mapping.source.mode` index setting instead. + impact: Use `index.mapping.source.mode` index setting instead diff --git a/docs/changelog/116692.yaml b/docs/changelog/116692.yaml new file mode 100644 index 0000000000000..30f9e62095436 --- /dev/null +++ b/docs/changelog/116692.yaml @@ -0,0 +1,5 @@ +pr: 116692 +summary: Remove all mentions of eis and gateway and deprecate flags that do +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/116765.yaml b/docs/changelog/116765.yaml new file mode 100644 index 0000000000000..ec2357c17acaf --- /dev/null +++ b/docs/changelog/116765.yaml @@ -0,0 +1,5 @@ +pr: 116765 +summary: Metrics for incremental bulk splits +area: Distributed +type: enhancement +issues: [] diff --git a/docs/changelog/116809.yaml b/docs/changelog/116809.yaml new file mode 100644 index 0000000000000..61dbeb233d576 --- /dev/null +++ b/docs/changelog/116809.yaml @@ -0,0 +1,5 @@ +pr: 116809 +summary: "Distinguish `LicensedFeature` by family field" +area: License +type: bug +issues: [] diff --git a/docs/changelog/116819.yaml b/docs/changelog/116819.yaml new file mode 100644 index 0000000000000..afe06c583fe55 --- /dev/null +++ b/docs/changelog/116819.yaml @@ -0,0 +1,5 @@ +pr: 116819 +summary: ESQL - Add match operator (:) +area: Search +type: feature +issues: [] diff --git a/docs/changelog/116915.yaml b/docs/changelog/116915.yaml new file mode 100644 index 0000000000000..9686f0023a14a --- /dev/null +++ b/docs/changelog/116915.yaml @@ -0,0 +1,5 @@ +pr: 116915 +summary: Improve message about insecure S3 settings +area: Snapshot/Restore +type: enhancement +issues: [] diff --git a/docs/changelog/116918.yaml b/docs/changelog/116918.yaml new file mode 100644 index 0000000000000..3b04b4ae4a69a --- /dev/null +++ b/docs/changelog/116918.yaml @@ -0,0 +1,5 @@ +pr: 116918 +summary: Split searchable snapshot into multiple repo operations +area: Snapshot/Restore +type: enhancement +issues: [] diff --git a/docs/changelog/116922.yaml b/docs/changelog/116922.yaml new file mode 100644 index 0000000000000..39e63da50ea24 --- /dev/null +++ b/docs/changelog/116922.yaml @@ -0,0 +1,5 @@ +pr: 116922 +summary: Always check if index mode is logsdb +area: Logs +type: bug +issues: [] diff --git a/docs/changelog/116931.yaml b/docs/changelog/116931.yaml new file mode 100644 index 0000000000000..8b31d236ff137 --- /dev/null +++ b/docs/changelog/116931.yaml @@ -0,0 +1,5 @@ +pr: 116931 +summary: Enable built-in Inference Endpoints and default for Semantic Text +area: "Machine Learning" +type: enhancement +issues: [] diff --git a/docs/changelog/116942.yaml b/docs/changelog/116942.yaml new file mode 100644 index 0000000000000..5037e8c59cd85 --- /dev/null +++ b/docs/changelog/116942.yaml @@ -0,0 +1,5 @@ +pr: 116942 +summary: Fix handling of bulk requests with semantic text fields and delete ops +area: Relevance +type: bug +issues: [] diff --git a/docs/changelog/116943.yaml b/docs/changelog/116943.yaml new file mode 100644 index 0000000000000..3fd0793610cdd --- /dev/null +++ b/docs/changelog/116943.yaml @@ -0,0 +1,11 @@ +pr: 116943 +summary: Remove support for deprecated `force_source` highlighting parameter +area: Highlighting +type: breaking +issues: [] +breaking: + title: Remove support for deprecated `force_source` highlighting parameter + area: REST API + details: The deprecated highlighting `force_source` parameter is no longer supported. + impact: Users should remove usages of the `force_source` parameter from their search requests. + notable: false diff --git a/docs/changelog/116944.yaml b/docs/changelog/116944.yaml new file mode 100644 index 0000000000000..e7833e49cf965 --- /dev/null +++ b/docs/changelog/116944.yaml @@ -0,0 +1,11 @@ +pr: 116944 +summary: "Remove support for type, fields, `copy_to` and boost in metadata field definition" +area: Mapping +type: breaking +issues: [] +breaking: + title: "Remove support for type, fields, copy_to and boost in metadata field definition" + area: Mapping + details: The type, fields, copy_to and boost parameters are no longer supported in metadata field definition + impact: Users providing type, fields, copy_to or boost as part of metadata field definition should remove them from their mappings. + notable: false diff --git a/docs/changelog/116957.yaml b/docs/changelog/116957.yaml new file mode 100644 index 0000000000000..1020190de180d --- /dev/null +++ b/docs/changelog/116957.yaml @@ -0,0 +1,5 @@ +pr: 116957 +summary: Propagate scoring function through random sampler +area: Machine Learning +type: bug +issues: [ 110134 ] diff --git a/docs/changelog/116962.yaml b/docs/changelog/116962.yaml new file mode 100644 index 0000000000000..8f16b00e3f9fc --- /dev/null +++ b/docs/changelog/116962.yaml @@ -0,0 +1,5 @@ +pr: 116962 +summary: "Add special case for elastic reranker in inference API" +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/116970.yaml b/docs/changelog/116970.yaml new file mode 100644 index 0000000000000..66de673dfb53c --- /dev/null +++ b/docs/changelog/116970.yaml @@ -0,0 +1,11 @@ +pr: 116970 +summary: Remove legacy params from range query +area: Search +type: breaking +issues: [] +breaking: + title: Remove legacy params from range query + area: REST API + details: The deprecated range query parameters `to`, `from`, `include_lower`, and `include_upper` are no longer supported. + impact: Users should use `lt`, `lte`, `gt`, and `gte` query parameters instead. + notable: false diff --git a/docs/changelog/116980.yaml b/docs/changelog/116980.yaml new file mode 100644 index 0000000000000..140324fd40b92 --- /dev/null +++ b/docs/changelog/116980.yaml @@ -0,0 +1,6 @@ +pr: 116980 +summary: "ESQL: Fix sorts containing `_source`" +area: ES|QL +type: bug +issues: + - 116659 diff --git a/docs/changelog/116995.yaml b/docs/changelog/116995.yaml new file mode 100644 index 0000000000000..a0467c630edf3 --- /dev/null +++ b/docs/changelog/116995.yaml @@ -0,0 +1,5 @@ +pr: 116995 +summary: "Apm-data: disable date_detection for all apm data streams" +area: Data streams +type: enhancement +issues: [] \ No newline at end of file diff --git a/docs/changelog/117080.yaml b/docs/changelog/117080.yaml new file mode 100644 index 0000000000000..5909f966e0fa2 --- /dev/null +++ b/docs/changelog/117080.yaml @@ -0,0 +1,5 @@ +pr: 117080 +summary: Esql Enable Date Nanos (tech preview) +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/117095.yaml b/docs/changelog/117095.yaml new file mode 100644 index 0000000000000..27460924ecb71 --- /dev/null +++ b/docs/changelog/117095.yaml @@ -0,0 +1,5 @@ +pr: 117095 +summary: Add version prefix to Inference Service API path +area: Inference +type: enhancement +issues: [] diff --git a/docs/changelog/117105.yaml b/docs/changelog/117105.yaml new file mode 100644 index 0000000000000..de56c4d521a62 --- /dev/null +++ b/docs/changelog/117105.yaml @@ -0,0 +1,6 @@ +pr: 117105 +summary: Fix long metric deserialize & add - auto-resize needs to be set manually +area: CCS +type: bug +issues: + - 116914 diff --git a/docs/changelog/117148.yaml b/docs/changelog/117148.yaml new file mode 100644 index 0000000000000..92dd69672616a --- /dev/null +++ b/docs/changelog/117148.yaml @@ -0,0 +1,5 @@ +pr: 117148 +summary: Preserve thread context when waiting for segment generation in RTG +area: CRUD +type: bug +issues: [] diff --git a/docs/changelog/117153.yaml b/docs/changelog/117153.yaml new file mode 100644 index 0000000000000..f7640c0a7ed6a --- /dev/null +++ b/docs/changelog/117153.yaml @@ -0,0 +1,5 @@ +pr: 117153 +summary: "ESQL: fix the column position in errors" +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/117182.yaml b/docs/changelog/117182.yaml new file mode 100644 index 0000000000000..b5398bec1ef30 --- /dev/null +++ b/docs/changelog/117182.yaml @@ -0,0 +1,6 @@ +pr: 117182 +summary: Change synthetic source logic for `constant_keyword` +area: Mapping +type: bug +issues: + - 117083 diff --git a/docs/changelog/117189.yaml b/docs/changelog/117189.yaml new file mode 100644 index 0000000000000..e89c2d81506d9 --- /dev/null +++ b/docs/changelog/117189.yaml @@ -0,0 +1,5 @@ +pr: 117189 +summary: Fix deberta tokenizer bug caused by bug in normalizer +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/117201.yaml b/docs/changelog/117201.yaml new file mode 100644 index 0000000000000..f8a2be35c70a3 --- /dev/null +++ b/docs/changelog/117201.yaml @@ -0,0 +1,6 @@ +pr: 117201 +summary: "Use `field_caps` native nested fields filtering" +area: ES|QL +type: bug +issues: + - 117054 diff --git a/docs/changelog/117243.yaml b/docs/changelog/117243.yaml new file mode 100644 index 0000000000000..f871d476bd0ec --- /dev/null +++ b/docs/changelog/117243.yaml @@ -0,0 +1,5 @@ +pr: 117243 +summary: Bump major version for feature migration system indices +area: Infra/Core +type: upgrade +issues: [] diff --git a/docs/changelog/117287.yaml b/docs/changelog/117287.yaml new file mode 100644 index 0000000000000..08da9dd8087b2 --- /dev/null +++ b/docs/changelog/117287.yaml @@ -0,0 +1,5 @@ +pr: 117287 +summary: Fixing bug setting index when parsing Google Vertex AI results +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/117294.yaml b/docs/changelog/117294.yaml new file mode 100644 index 0000000000000..f6e80690de7ff --- /dev/null +++ b/docs/changelog/117294.yaml @@ -0,0 +1,5 @@ +pr: 117294 +summary: Always Emit Inference ID in Semantic Text Mapping +area: Mapping +type: bug +issues: [] diff --git a/docs/changelog/117297.yaml b/docs/changelog/117297.yaml new file mode 100644 index 0000000000000..4a0051bbae644 --- /dev/null +++ b/docs/changelog/117297.yaml @@ -0,0 +1,5 @@ +pr: 117297 +summary: Fix CCS exchange when multi cluster aliases point to same cluster +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/117316.yaml b/docs/changelog/117316.yaml new file mode 100644 index 0000000000000..69474d68a8190 --- /dev/null +++ b/docs/changelog/117316.yaml @@ -0,0 +1,5 @@ +pr: 117316 +summary: Fix validation of SORT by aggregate functions +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/117350.yaml b/docs/changelog/117350.yaml new file mode 100644 index 0000000000000..dca54f2037a87 --- /dev/null +++ b/docs/changelog/117350.yaml @@ -0,0 +1,5 @@ +pr: 117350 +summary: "Improve halfbyte transposition performance, marginally improving bbq performance" +area: Vector Search +type: enhancement +issues: [] diff --git a/docs/changelog/90529.yaml b/docs/changelog/90529.yaml new file mode 100644 index 0000000000000..a014c82259a9e --- /dev/null +++ b/docs/changelog/90529.yaml @@ -0,0 +1,26 @@ +pr: 90529 +summary: Output a consistent format when generating error json +area: Infra/REST API +type: "breaking" +issues: + - 89387 +breaking: + title: Error JSON structure has changed when detailed errors are disabled + area: REST API + details: |- + This change modifies the JSON format of error messages returned to REST clients + when detailed messages are turned off. + Previously, JSON returned when an exception occurred, and `http.detailed_errors.enabled: false` was set, + just consisted of a single `"error"` text field with some basic information. + Setting `http.detailed_errors.enabled: true` (the default) changed this field + to an object with more detailed information. + With this change, non-detailed errors now have the same structure as detailed errors. `"error"` will now always + be an object with, at a minimum, a `"type"` and `"reason"` field. Additional fields are included when detailed + errors are enabled. + To use the previous structure for non-detailed errors, use the v8 REST API. + impact: |- + If you have set `http.detailed_errors.enabled: false` (the default is `true`) + the structure of JSON when any exceptions occur now matches the structure when + detailed errors are enabled. + To use the previous structure for non-detailed errors, use the v8 REST API. + notable: false diff --git a/docs/reference/analysis/tokenfilters/hyphenation-decompounder-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/hyphenation-decompounder-tokenfilter.asciidoc index eed66d81e9132..1bd36f801aa17 100644 --- a/docs/reference/analysis/tokenfilters/hyphenation-decompounder-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/hyphenation-decompounder-tokenfilter.asciidoc @@ -111,6 +111,18 @@ output. Defaults to `5`. (Optional, Boolean) If `true`, only include the longest matching subword. Defaults to `false`. +`no_sub_matches`:: +(Optional, Boolean) +If `true`, do not match sub tokens in tokens that are in the word list. +Defaults to `false`. + +`no_overlapping_matches`:: +(Optional, Boolean) +If `true`, do not allow overlapping tokens. +Defaults to `false`. + +Typically users will only want to include one of the three flags as enabling `no_overlapping_matches` is the most restrictive and `no_sub_matches` is more restrictive than `only_longest_match`. When enabling a more restrictive option the state of the less restrictive does not have any effect. + [[analysis-hyp-decomp-tokenfilter-customize]] ==== Customize and add to an analyzer diff --git a/docs/reference/cluster/allocation-explain.asciidoc b/docs/reference/cluster/allocation-explain.asciidoc index bbbea192f0f86..e640fa77c71ee 100644 --- a/docs/reference/cluster/allocation-explain.asciidoc +++ b/docs/reference/cluster/allocation-explain.asciidoc @@ -159,6 +159,8 @@ node. <5> The decider which led to the `no` decision for the node. <6> An explanation as to why the decider returned a `no` decision, with a helpful hint pointing to the setting that led to the decision. In this example, a newly created index has <> that requires that it only be allocated to a node named `nonexistent_node`, which does not exist, so the index is unable to allocate. +See https://www.youtube.com/watch?v=5z3n2VgusLE[this video] for a walkthrough of troubleshooting a node and index setting mismatch. + [[maximum-number-of-retries-exceeded]] ====== Maximum number of retries exceeded @@ -235,7 +237,9 @@ primary shard that was previously allocated. ---- // NOTCONSOLE -TIP: If a shard is unassigned with an allocation status of `no_valid_shard_copy`, then you should <>. If all the nodes containing in-sync copies of a shard are lost, then you can <>. +If a shard is unassigned with an allocation status of `no_valid_shard_copy`, then you should <>. If all the nodes containing in-sync copies of a shard are lost, then you can <>. + +See https://www.youtube.com/watch?v=6OAg9IyXFO4[this video] for a walkthrough of troubleshooting `no_valid_shard_copy`. ===== Unassigned replica shard diff --git a/docs/reference/cluster/update-settings.asciidoc b/docs/reference/cluster/update-settings.asciidoc index ca3d100e31e06..3d8bdcca07e2b 100644 --- a/docs/reference/cluster/update-settings.asciidoc +++ b/docs/reference/cluster/update-settings.asciidoc @@ -59,8 +59,8 @@ An example of a transient update: ==== We no longer recommend using transient cluster settings. Use persistent cluster settings instead. If a cluster becomes unstable, transient settings can clear -unexpectedly, resulting in a potentially undesired cluster configuration. See -the <>. +unexpectedly, resulting in a potentially undesired cluster configuration. +// See the <>. ==== // end::transient-settings-warning[] diff --git a/docs/reference/connector/docs/sync-rules.asciidoc b/docs/reference/connector/docs/sync-rules.asciidoc index 9b2a77be7db03..3ab72093666b8 100644 --- a/docs/reference/connector/docs/sync-rules.asciidoc +++ b/docs/reference/connector/docs/sync-rules.asciidoc @@ -116,6 +116,12 @@ A "match" is determined based on a condition defined by a combination of "field" The `Field` column should be used to define which field on a given document should be considered. +[NOTE] +==== +Only top-level fields are supported. +Nested/object fields cannot be referenced with "dot notation". +==== + The following rules are available in the `Rule` column: * `equals` - The field value is equal to the specified value. diff --git a/docs/reference/esql/esql-commands.asciidoc b/docs/reference/esql/esql-commands.asciidoc index 235113ac1394a..33e748d7eb7c1 100644 --- a/docs/reference/esql/esql-commands.asciidoc +++ b/docs/reference/esql/esql-commands.asciidoc @@ -38,12 +38,12 @@ image::images/esql/processing-command.svg[A processing command changing an input * <> * <> ifeval::["{release-state}"=="unreleased"] -* experimental:[] <> +//* experimental:[] <> endif::[] * <> * <> ifeval::["{release-state}"=="unreleased"] -* experimental:[] <> +//* experimental:[] <> endif::[] * experimental:[] <> * <> @@ -63,12 +63,12 @@ include::processing-commands/enrich.asciidoc[] include::processing-commands/eval.asciidoc[] include::processing-commands/grok.asciidoc[] ifeval::["{release-state}"=="unreleased"] -include::processing-commands/inlinestats.asciidoc[] +//include::processing-commands/inlinestats.asciidoc[] endif::[] include::processing-commands/keep.asciidoc[] include::processing-commands/limit.asciidoc[] ifeval::["{release-state}"=="unreleased"] -include::processing-commands/lookup.asciidoc[] +//include::processing-commands/lookup.asciidoc[] endif::[] include::processing-commands/mv_expand.asciidoc[] include::processing-commands/rename.asciidoc[] diff --git a/docs/reference/esql/esql-enrich-data.asciidoc b/docs/reference/esql/esql-enrich-data.asciidoc index c48118d1c367a..ad34e29f1a55b 100644 --- a/docs/reference/esql/esql-enrich-data.asciidoc +++ b/docs/reference/esql/esql-enrich-data.asciidoc @@ -138,8 +138,33 @@ include::{es-ref-dir}/ingest/apis/enrich/execute-enrich-policy.asciidoc[tag=upda include::../ingest/enrich.asciidoc[tag=update-enrich-policy] -==== Limitations +==== Enrich Policy Types and Limitations +The {esql} `ENRICH` command supports all three enrich policy types: + +`geo_match`:: +Matches enrich data to incoming documents based on a <>. +For an example, see <>. + +`match`:: +Matches enrich data to incoming documents based on a <>. +For an example, see <>. + +`range`:: +Matches a number, date, or IP address in incoming documents to a range in the +enrich index based on a <>. For an example, +see <>. + // tag::limitations[] -The {esql} `ENRICH` command only supports enrich policies of type `match`. -Furthermore, `ENRICH` only supports enriching on a column of type `keyword`. +While all three enrich policy types are supported, there are some limitations to be aware of: + +* The `geo_match` enrich policy type only supports the `intersects` spatial relation. +* It is required that the `match_field` in the `ENRICH` command is of the correct type. +For example, if the enrich policy is of type `geo_match`, the `match_field` in the `ENRICH` +command must be of type `geo_point` or `geo_shape`. +Likewise, a `range` enrich policy requires a `match_field` of type `integer`, `long`, `date`, or `ip`, +depending on the type of the range field in the original enrich index. +* However, this constraint is relaxed for `range` policies when the `match_field` is of type `KEYWORD`. +In this case the field values will be parsed during query execution, row by row. +If any value fails to parse, the output values for that row will be set to `null`, +an appropriate warning will be produced and the query will continue to execute. // end::limitations[] diff --git a/docs/reference/esql/esql-language.asciidoc b/docs/reference/esql/esql-language.asciidoc index a7c0e5e01a867..151ca803bf2eb 100644 --- a/docs/reference/esql/esql-language.asciidoc +++ b/docs/reference/esql/esql-language.asciidoc @@ -14,6 +14,7 @@ Detailed reference documentation for the {esql} language: * <> * <> * <> +* <> include::esql-syntax.asciidoc[] include::esql-commands.asciidoc[] @@ -23,3 +24,4 @@ include::multivalued-fields.asciidoc[] include::esql-process-data-with-dissect-grok.asciidoc[] include::esql-enrich-data.asciidoc[] include::implicit-casting.asciidoc[] +include::time-spans.asciidoc[] diff --git a/docs/reference/esql/esql-limitations.asciidoc b/docs/reference/esql/esql-limitations.asciidoc index 1772e956bd9e2..c2849e4889f98 100644 --- a/docs/reference/esql/esql-limitations.asciidoc +++ b/docs/reference/esql/esql-limitations.asciidoc @@ -25,6 +25,9 @@ include::processing-commands/limit.asciidoc[tag=limitation] * `alias` * `boolean` * `date` +* `date_nanos` (Tech Preview) +** The following functions don't yet support date nanos: `bucket`, `date_format`, `date_parse`, `date_diff`, `date_extract` +** You can use `to_datetime` to cast to millisecond dates to use unsupported functions * `double` (`float`, `half_float`, `scaled_float` are represented as `double`) * `ip` * `keyword` family including `keyword`, `constant_keyword`, and `wildcard` @@ -50,7 +53,6 @@ include::processing-commands/limit.asciidoc[tag=limitation] ** `position` ** `aggregate_metric_double` * Date/time -** `date_nanos` ** `date_range` * Other types ** `binary` diff --git a/docs/reference/esql/esql-query-api.asciidoc b/docs/reference/esql/esql-query-api.asciidoc index 63b8738266132..8e07a627567df 100644 --- a/docs/reference/esql/esql-query-api.asciidoc +++ b/docs/reference/esql/esql-query-api.asciidoc @@ -92,8 +92,8 @@ https://en.wikipedia.org/wiki/Query_plan[EXPLAIN PLAN]. ifeval::["{release-state}"=="unreleased"] -`table`:: -(Optional, object) Named "table" parameters that can be referenced by the <> command. +//`table`:: +//(Optional, object) Named "table" parameters that can be referenced by the <> command. endif::[] [discrete] diff --git a/docs/reference/esql/esql-syntax.asciidoc b/docs/reference/esql/esql-syntax.asciidoc index c7f741d064310..ba1c4ca820381 100644 --- a/docs/reference/esql/esql-syntax.asciidoc +++ b/docs/reference/esql/esql-syntax.asciidoc @@ -157,21 +157,15 @@ FROM employees ==== Timespan literals Datetime intervals and timespans can be expressed using timespan literals. -Timespan literals are a combination of a number and a qualifier. These -qualifiers are supported: - -* `millisecond`/`milliseconds`/`ms` -* `second`/`seconds`/`sec`/`s` -* `minute`/`minutes`/`min` -* `hour`/`hours`/`h` -* `day`/`days`/`d` -* `week`/`weeks`/`w` -* `month`/`months`/`mo` -* `quarter`/`quarters`/`q` -* `year`/`years`/`yr`/`y` +Timespan literals are a combination of a number and a temporal unit. The +supported temporal units are listed in <>. +More examples of the usages of time spans can be found in +<>. + Timespan literals are not whitespace sensitive. These expressions are all valid: * `1day` * `1 day` * `1 day` + diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index 7cdc42ea6cbf9..c2c2508ad5de2 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -17,10 +17,11 @@ The <> command supports these aggregate functions: * <> * <> * experimental:[] <> +* <> * <> * <> * <> -* experimental:[] <> +* <> // end::agg_list[] include::layout/avg.asciidoc[] @@ -32,6 +33,7 @@ include::layout/median_absolute_deviation.asciidoc[] include::layout/min.asciidoc[] include::layout/percentile.asciidoc[] include::layout/st_centroid_agg.asciidoc[] +include::layout/std_dev.asciidoc[] include::layout/sum.asciidoc[] include::layout/top.asciidoc[] include::layout/values.asciidoc[] diff --git a/docs/reference/esql/functions/binary.asciidoc b/docs/reference/esql/functions/binary.asciidoc index 72d466ae83d11..59bdadecc4923 100644 --- a/docs/reference/esql/functions/binary.asciidoc +++ b/docs/reference/esql/functions/binary.asciidoc @@ -87,6 +87,7 @@ Supported types: include::types/greater_than_or_equal.asciidoc[] +[[esql-add]] ==== Add `+` [.text-center] image::esql/functions/signature/add.svg[Embedded,opts=inline] @@ -98,6 +99,7 @@ Supported types: include::types/add.asciidoc[] +[[esql-subtract]] ==== Subtract `-` [.text-center] image::esql/functions/signature/sub.svg[Embedded,opts=inline] diff --git a/docs/reference/esql/functions/description/std_dev.asciidoc b/docs/reference/esql/functions/description/std_dev.asciidoc new file mode 100644 index 0000000000000..b78ddd7dbba13 --- /dev/null +++ b/docs/reference/esql/functions/description/std_dev.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +The standard deviation of a numeric field. diff --git a/docs/reference/esql/functions/examples/count.asciidoc b/docs/reference/esql/functions/examples/count.asciidoc index fb696b51e054c..33ed054d3d1e2 100644 --- a/docs/reference/esql/functions/examples/count.asciidoc +++ b/docs/reference/esql/functions/examples/count.asciidoc @@ -37,7 +37,7 @@ include::{esql-specs}/stats.csv-spec[tag=count-where] |=== include::{esql-specs}/stats.csv-spec[tag=count-where-result] |=== -To count the same stream of data based on two different expressions use the pattern `COUNT( OR NULL)` +To count the same stream of data based on two different expressions use the pattern `COUNT( OR NULL)`. This builds on the three-valued logic ({wikipedia}/Three-valued_logic[3VL]) of the language: `TRUE OR NULL` is `TRUE`, but `FALSE OR NULL` is `NULL`, plus the way COUNT handles `NULL`s: `COUNT(TRUE)` and `COUNT(FALSE)` are both 1, but `COUNT(NULL)` is 0. [source.merge.styled,esql] ---- include::{esql-specs}/stats.csv-spec[tag=count-or-null] diff --git a/docs/reference/esql/functions/examples/std_dev.asciidoc b/docs/reference/esql/functions/examples/std_dev.asciidoc new file mode 100644 index 0000000000000..2e6dc996aae9a --- /dev/null +++ b/docs/reference/esql/functions/examples/std_dev.asciidoc @@ -0,0 +1,22 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Examples* + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=stdev] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=stdev-result] +|=== +The expression can use inline functions. For example, to calculate the standard deviation of each employee's maximum salary changes, first use `MV_MAX` on each row, and then use `STD_DEV` on the result +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=docsStatsStdDevNestedExpression] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=docsStatsStdDevNestedExpression-result] +|=== + diff --git a/docs/reference/esql/functions/kibana/definition/case.json b/docs/reference/esql/functions/kibana/definition/case.json index bf498f690551c..51693d9d30660 100644 --- a/docs/reference/esql/functions/kibana/definition/case.json +++ b/docs/reference/esql/functions/kibana/definition/case.json @@ -172,6 +172,48 @@ "variadic" : true, "returnType" : "date" }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "date_nanos", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "date_nanos", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + }, + { + "name" : "elseValue", + "type" : "date_nanos", + "optional" : true, + "description" : "The value that's returned when no condition evaluates to `true`." + } + ], + "variadic" : true, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/coalesce.json b/docs/reference/esql/functions/kibana/definition/coalesce.json index 7f49195190951..c929323397c9b 100644 --- a/docs/reference/esql/functions/kibana/definition/coalesce.json +++ b/docs/reference/esql/functions/kibana/definition/coalesce.json @@ -88,6 +88,24 @@ "variadic" : true, "returnType" : "date" }, + { + "params" : [ + { + "name" : "first", + "type" : "date_nanos", + "optional" : false, + "description" : "Expression to evaluate." + }, + { + "name" : "rest", + "type" : "date_nanos", + "optional" : true, + "description" : "Other expression to evaluate." + } + ], + "variadic" : true, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/count.json b/docs/reference/esql/functions/kibana/definition/count.json index 88d4ba3d3e339..329a18c4d9d01 100644 --- a/docs/reference/esql/functions/kibana/definition/count.json +++ b/docs/reference/esql/functions/kibana/definition/count.json @@ -151,7 +151,7 @@ ], "examples" : [ "FROM employees\n| STATS COUNT(height)", - "FROM employees \n| STATS count = COUNT(*) BY languages \n| SORT languages DESC", + "FROM employees\n| STATS count = COUNT(*) BY languages\n| SORT languages DESC", "ROW words=\"foo;bar;baz;qux;quux;foo\"\n| STATS word_count = COUNT(SPLIT(words, \";\"))", "ROW n=1\n| WHERE n < 0\n| STATS COUNT(n)", "ROW n=1\n| STATS COUNT(n > 0 OR NULL), COUNT(n < 0 OR NULL)" diff --git a/docs/reference/esql/functions/kibana/definition/count_distinct.json b/docs/reference/esql/functions/kibana/definition/count_distinct.json index 3addd08df60df..54b99ee84ce2d 100644 --- a/docs/reference/esql/functions/kibana/definition/count_distinct.json +++ b/docs/reference/esql/functions/kibana/definition/count_distinct.json @@ -136,6 +136,72 @@ "variadic" : false, "returnType" : "long" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "integer", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "unsigned_long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/date_trunc.json b/docs/reference/esql/functions/kibana/definition/date_trunc.json index 871994407233b..cdda984a0ce7e 100644 --- a/docs/reference/esql/functions/kibana/definition/date_trunc.json +++ b/docs/reference/esql/functions/kibana/definition/date_trunc.json @@ -22,6 +22,24 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "interval", + "type" : "date_period", + "optional" : false, + "description" : "Interval; expressed using the timespan literal syntax." + }, + { + "name" : "date", + "type" : "date_nanos", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { @@ -39,6 +57,24 @@ ], "variadic" : false, "returnType" : "date" + }, + { + "params" : [ + { + "name" : "interval", + "type" : "time_duration", + "optional" : false, + "description" : "Interval; expressed using the timespan literal syntax." + }, + { + "name" : "date", + "type" : "date_nanos", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "date_nanos" } ], "examples" : [ diff --git a/docs/reference/esql/functions/kibana/definition/equals.json b/docs/reference/esql/functions/kibana/definition/equals.json index 59df59eaccc4e..885d949f4b20f 100644 --- a/docs/reference/esql/functions/kibana/definition/equals.json +++ b/docs/reference/esql/functions/kibana/definition/equals.json @@ -77,6 +77,24 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/greater_than.json b/docs/reference/esql/functions/kibana/definition/greater_than.json index 7354112551e2c..cf6e30a0a4547 100644 --- a/docs/reference/esql/functions/kibana/definition/greater_than.json +++ b/docs/reference/esql/functions/kibana/definition/greater_than.json @@ -23,6 +23,24 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/greater_than_or_equal.json b/docs/reference/esql/functions/kibana/definition/greater_than_or_equal.json index 832eed417ef4a..2535c68af6acf 100644 --- a/docs/reference/esql/functions/kibana/definition/greater_than_or_equal.json +++ b/docs/reference/esql/functions/kibana/definition/greater_than_or_equal.json @@ -23,6 +23,24 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/greatest.json b/docs/reference/esql/functions/kibana/definition/greatest.json index eebb4fad1eb1d..077100317dfca 100644 --- a/docs/reference/esql/functions/kibana/definition/greatest.json +++ b/docs/reference/esql/functions/kibana/definition/greatest.json @@ -53,6 +53,24 @@ "variadic" : true, "returnType" : "date" }, + { + "params" : [ + { + "name" : "first", + "type" : "date_nanos", + "optional" : false, + "description" : "First of the columns to evaluate." + }, + { + "name" : "rest", + "type" : "date_nanos", + "optional" : true, + "description" : "The rest of the columns to evaluate." + } + ], + "variadic" : true, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/least.json b/docs/reference/esql/functions/kibana/definition/least.json index 02fa58f92eaef..18ec65c60f475 100644 --- a/docs/reference/esql/functions/kibana/definition/least.json +++ b/docs/reference/esql/functions/kibana/definition/least.json @@ -52,6 +52,24 @@ "variadic" : true, "returnType" : "date" }, + { + "params" : [ + { + "name" : "first", + "type" : "date_nanos", + "optional" : false, + "description" : "First of the columns to evaluate." + }, + { + "name" : "rest", + "type" : "date_nanos", + "optional" : true, + "description" : "The rest of the columns to evaluate." + } + ], + "variadic" : true, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/less_than.json b/docs/reference/esql/functions/kibana/definition/less_than.json index 66578d73b8e9c..a73754d200d46 100644 --- a/docs/reference/esql/functions/kibana/definition/less_than.json +++ b/docs/reference/esql/functions/kibana/definition/less_than.json @@ -23,6 +23,24 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/less_than_or_equal.json b/docs/reference/esql/functions/kibana/definition/less_than_or_equal.json index 5ffd4567cdb07..7af477db32a34 100644 --- a/docs/reference/esql/functions/kibana/definition/less_than_or_equal.json +++ b/docs/reference/esql/functions/kibana/definition/less_than_or_equal.json @@ -23,6 +23,24 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/match.json b/docs/reference/esql/functions/kibana/definition/match.json index 8a355360a790f..4a5b05a3f501b 100644 --- a/docs/reference/esql/functions/kibana/definition/match.json +++ b/docs/reference/esql/functions/kibana/definition/match.json @@ -78,7 +78,7 @@ } ], "examples" : [ - "from books \n| where match(author, \"Faulkner\")\n| keep book_no, author \n| sort book_no \n| limit 5;" + "FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;" ], "preview" : true, "snapshot_only" : false diff --git a/docs/reference/esql/functions/kibana/definition/match_operator.json b/docs/reference/esql/functions/kibana/definition/match_operator.json new file mode 100644 index 0000000000000..7a0ace6168b59 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/match_operator.json @@ -0,0 +1,85 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "operator", + "name" : "match_operator", + "description" : "Performs a match query on the specified field. Returns true if the provided query matches the row.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "keyword", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "text", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "keyword", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "text", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;" + ], + "preview" : true, + "snapshot_only" : false +} diff --git a/docs/reference/esql/functions/kibana/definition/max.json b/docs/reference/esql/functions/kibana/definition/max.json index 45fd26571b091..7f3d2215ee099 100644 --- a/docs/reference/esql/functions/kibana/definition/max.json +++ b/docs/reference/esql/functions/kibana/definition/max.json @@ -28,6 +28,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/min.json b/docs/reference/esql/functions/kibana/definition/min.json index ae71fba049dbe..74e3fd8208f1b 100644 --- a/docs/reference/esql/functions/kibana/definition/min.json +++ b/docs/reference/esql/functions/kibana/definition/min.json @@ -28,6 +28,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_count.json b/docs/reference/esql/functions/kibana/definition/mv_count.json index 4767b35ec7cac..90ace2525f710 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_count.json +++ b/docs/reference/esql/functions/kibana/definition/mv_count.json @@ -52,6 +52,18 @@ "variadic" : false, "returnType" : "integer" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "integer" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json index bfca58bc3e140..ce2c96dbc1757 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json +++ b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json @@ -53,6 +53,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_first.json b/docs/reference/esql/functions/kibana/definition/mv_first.json index a2b6358023e4b..552f568c9b171 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_first.json +++ b/docs/reference/esql/functions/kibana/definition/mv_first.json @@ -52,6 +52,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_last.json b/docs/reference/esql/functions/kibana/definition/mv_last.json index b6dc268af5305..78d7b348a6042 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_last.json +++ b/docs/reference/esql/functions/kibana/definition/mv_last.json @@ -52,6 +52,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_max.json b/docs/reference/esql/functions/kibana/definition/mv_max.json index 27d2b010dc02c..a1e55c58cff70 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_max.json +++ b/docs/reference/esql/functions/kibana/definition/mv_max.json @@ -28,6 +28,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_min.json b/docs/reference/esql/functions/kibana/definition/mv_min.json index 410e97335687f..7998ca4eda94e 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_min.json +++ b/docs/reference/esql/functions/kibana/definition/mv_min.json @@ -28,6 +28,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_slice.json b/docs/reference/esql/functions/kibana/definition/mv_slice.json index dbbfe0ffb5a78..df4d48145fac6 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_slice.json +++ b/docs/reference/esql/functions/kibana/definition/mv_slice.json @@ -100,6 +100,30 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression. If `null`, the function returns `null`." + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "Start position. If `null`, the function returns `null`. The start argument can be negative. An index of -1 is used to specify the last value in the list." + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "End position(included). Optional; if omitted, the position at `start` is returned. The end argument can be negative. An index of -1 is used to specify the last value in the list." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_sort.json b/docs/reference/esql/functions/kibana/definition/mv_sort.json index 4cb255fb0afcb..072c05743af33 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_sort.json +++ b/docs/reference/esql/functions/kibana/definition/mv_sort.json @@ -40,6 +40,24 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression. If `null`, the function returns `null`." + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "Sort order. The valid options are ASC and DESC, the default is ASC." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/not_equals.json b/docs/reference/esql/functions/kibana/definition/not_equals.json index 69389d4c8d077..24f31115cbc37 100644 --- a/docs/reference/esql/functions/kibana/definition/not_equals.json +++ b/docs/reference/esql/functions/kibana/definition/not_equals.json @@ -77,6 +77,24 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/qstr.json b/docs/reference/esql/functions/kibana/definition/qstr.json index 9823c3cff8923..76473349a3414 100644 --- a/docs/reference/esql/functions/kibana/definition/qstr.json +++ b/docs/reference/esql/functions/kibana/definition/qstr.json @@ -30,7 +30,7 @@ } ], "examples" : [ - "from books \n| where qstr(\"author: Faulkner\")\n| keep book_no, author \n| sort book_no \n| limit 5;" + "FROM books \n| WHERE QSTR(\"author: Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;" ], "preview" : true, "snapshot_only" : false diff --git a/docs/reference/esql/functions/kibana/definition/std_dev.json b/docs/reference/esql/functions/kibana/definition/std_dev.json new file mode 100644 index 0000000000000..f31d3345421d9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/std_dev.json @@ -0,0 +1,50 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "agg", + "name" : "std_dev", + "description" : "The standard deviation of a numeric field.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "FROM employees\n| STATS STD_DEV(height)", + "FROM employees\n| STATS stddev_salary_change = STD_DEV(MV_MAX(salary_change))" + ], + "preview" : false, + "snapshot_only" : false +} diff --git a/docs/reference/esql/functions/kibana/definition/to_date_nanos.json b/docs/reference/esql/functions/kibana/definition/to_date_nanos.json index 07ffe84444f02..d9409bceb8e6f 100644 --- a/docs/reference/esql/functions/kibana/definition/to_date_nanos.json +++ b/docs/reference/esql/functions/kibana/definition/to_date_nanos.json @@ -4,7 +4,92 @@ "name" : "to_date_nanos", "description" : "Converts an input to a nanosecond-resolution date value (aka date_nanos).", "note" : "The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z. Additionally, integers cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch.", - "signatures" : [ ], + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "date", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + } + ], "preview" : true, "snapshot_only" : false } diff --git a/docs/reference/esql/functions/kibana/definition/to_datetime.json b/docs/reference/esql/functions/kibana/definition/to_datetime.json index 072aa66aac669..8f9ecbd139d32 100644 --- a/docs/reference/esql/functions/kibana/definition/to_datetime.json +++ b/docs/reference/esql/functions/kibana/definition/to_datetime.json @@ -17,6 +17,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/to_long.json b/docs/reference/esql/functions/kibana/definition/to_long.json index afd6de001bbc6..eb1ce7220c3f9 100644 --- a/docs/reference/esql/functions/kibana/definition/to_long.json +++ b/docs/reference/esql/functions/kibana/definition/to_long.json @@ -52,6 +52,18 @@ "variadic" : false, "returnType" : "long" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "long" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/to_string.json b/docs/reference/esql/functions/kibana/definition/to_string.json index 33e95d5bed1c2..1c86e81b31136 100644 --- a/docs/reference/esql/functions/kibana/definition/to_string.json +++ b/docs/reference/esql/functions/kibana/definition/to_string.json @@ -52,6 +52,18 @@ "variadic" : false, "returnType" : "keyword" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/values.json b/docs/reference/esql/functions/kibana/definition/values.json index ae69febd4f755..95ac402bb242a 100644 --- a/docs/reference/esql/functions/kibana/definition/values.json +++ b/docs/reference/esql/functions/kibana/definition/values.json @@ -28,6 +28,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/docs/match.md b/docs/reference/esql/functions/kibana/docs/match.md index 3c06662982bbf..b866637b41b85 100644 --- a/docs/reference/esql/functions/kibana/docs/match.md +++ b/docs/reference/esql/functions/kibana/docs/match.md @@ -6,9 +6,9 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ Performs a match query on the specified field. Returns true if the provided query matches the row. ``` -from books -| where match(author, "Faulkner") -| keep book_no, author -| sort book_no -| limit 5; +FROM books +| WHERE MATCH(author, "Faulkner") +| KEEP book_no, author +| SORT book_no +| LIMIT 5; ``` diff --git a/docs/reference/esql/functions/kibana/docs/match_operator.md b/docs/reference/esql/functions/kibana/docs/match_operator.md new file mode 100644 index 0000000000000..fda8b24ff76cc --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/match_operator.md @@ -0,0 +1,14 @@ + + +### MATCH_OPERATOR +Performs a match query on the specified field. Returns true if the provided query matches the row. + +``` +FROM books +| WHERE MATCH(author, "Faulkner") +| KEEP book_no, author +| SORT book_no +| LIMIT 5; +``` diff --git a/docs/reference/esql/functions/kibana/docs/qstr.md b/docs/reference/esql/functions/kibana/docs/qstr.md index 37b5777623185..9b5dc3f9a22eb 100644 --- a/docs/reference/esql/functions/kibana/docs/qstr.md +++ b/docs/reference/esql/functions/kibana/docs/qstr.md @@ -6,9 +6,9 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ Performs a query string query. Returns true if the provided query string matches the row. ``` -from books -| where qstr("author: Faulkner") -| keep book_no, author -| sort book_no -| limit 5; +FROM books +| WHERE QSTR("author: Faulkner") +| KEEP book_no, author +| SORT book_no +| LIMIT 5; ``` diff --git a/docs/reference/esql/functions/kibana/docs/std_dev.md b/docs/reference/esql/functions/kibana/docs/std_dev.md new file mode 100644 index 0000000000000..a6afca7b8f6b3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/std_dev.md @@ -0,0 +1,11 @@ + + +### STD_DEV +The standard deviation of a numeric field. + +``` +FROM employees +| STATS STD_DEV(height) +``` diff --git a/docs/reference/esql/functions/layout/std_dev.asciidoc b/docs/reference/esql/functions/layout/std_dev.asciidoc new file mode 100644 index 0000000000000..a7a34b1331d17 --- /dev/null +++ b/docs/reference/esql/functions/layout/std_dev.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-std_dev]] +=== `STD_DEV` + +*Syntax* + +[.text-center] +image::esql/functions/signature/std_dev.svg[Embedded,opts=inline] + +include::../parameters/std_dev.asciidoc[] +include::../description/std_dev.asciidoc[] +include::../types/std_dev.asciidoc[] +include::../examples/std_dev.asciidoc[] diff --git a/docs/reference/esql/functions/mv-functions.asciidoc b/docs/reference/esql/functions/mv-functions.asciidoc index 4093e44c16911..3da0249c9c0db 100644 --- a/docs/reference/esql/functions/mv-functions.asciidoc +++ b/docs/reference/esql/functions/mv-functions.asciidoc @@ -19,6 +19,7 @@ * <> * <> * <> +* <> * <> * <> * <> @@ -37,6 +38,7 @@ include::layout/mv_max.asciidoc[] include::layout/mv_median.asciidoc[] include::layout/mv_median_absolute_deviation.asciidoc[] include::layout/mv_min.asciidoc[] +include::layout/mv_percentile.asciidoc[] include::layout/mv_pseries_weighted_sum.asciidoc[] include::layout/mv_slice.asciidoc[] include::layout/mv_sort.asciidoc[] diff --git a/docs/reference/esql/functions/operators.asciidoc b/docs/reference/esql/functions/operators.asciidoc index ee344a52687c2..a1a2226335e9b 100644 --- a/docs/reference/esql/functions/operators.asciidoc +++ b/docs/reference/esql/functions/operators.asciidoc @@ -16,6 +16,7 @@ Boolean operators for comparing against one or multiple expressions. * <> * <> * <> +* experimental:[] <> // end::op_list[] include::binary.asciidoc[] @@ -26,3 +27,4 @@ include::cast.asciidoc[] include::in.asciidoc[] include::like.asciidoc[] include::rlike.asciidoc[] +include::search.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/std_dev.asciidoc b/docs/reference/esql/functions/parameters/std_dev.asciidoc new file mode 100644 index 0000000000000..91c56709d182a --- /dev/null +++ b/docs/reference/esql/functions/parameters/std_dev.asciidoc @@ -0,0 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`number`:: + diff --git a/docs/reference/esql/functions/search.asciidoc b/docs/reference/esql/functions/search.asciidoc new file mode 100644 index 0000000000000..ae1b003b65abb --- /dev/null +++ b/docs/reference/esql/functions/search.asciidoc @@ -0,0 +1,23 @@ +[discrete] +[[esql-search-operators]] +=== Search operators + +The only search operator is match (`:`). + +preview::["Do not use on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] + +The match operator performs a <> on the specified field. Returns true if the provided query matches the row. + +[.text-center] +image::esql/functions/signature/match_operator.svg[Embedded,opts=inline] + +include::types/match.asciidoc[] + +[source.merge.styled,esql] +---- +include::{esql-specs}/match-operator.csv-spec[tag=match-with-field] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/match-operator.csv-spec[tag=match-with-field-result] +|=== diff --git a/docs/reference/esql/functions/signature/match_operator.svg b/docs/reference/esql/functions/signature/match_operator.svg new file mode 100644 index 0000000000000..70cea841622eb --- /dev/null +++ b/docs/reference/esql/functions/signature/match_operator.svg @@ -0,0 +1 @@ +field:query \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/std_dev.svg b/docs/reference/esql/functions/signature/std_dev.svg new file mode 100644 index 0000000000000..606d285154f59 --- /dev/null +++ b/docs/reference/esql/functions/signature/std_dev.svg @@ -0,0 +1 @@ +STD_DEV(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/case.asciidoc b/docs/reference/esql/functions/types/case.asciidoc index c6fb6a091e9d0..9e6915a37fc14 100644 --- a/docs/reference/esql/functions/types/case.asciidoc +++ b/docs/reference/esql/functions/types/case.asciidoc @@ -13,6 +13,8 @@ boolean | cartesian_shape | cartesian_shape | cartesian_shape boolean | cartesian_shape | | cartesian_shape boolean | date | date | date boolean | date | | date +boolean | date_nanos | date_nanos | date_nanos +boolean | date_nanos | | date_nanos boolean | double | double | double boolean | double | | double boolean | geo_point | geo_point | geo_point diff --git a/docs/reference/esql/functions/types/coalesce.asciidoc b/docs/reference/esql/functions/types/coalesce.asciidoc index 23a249494e0a2..b6479dc7ff86a 100644 --- a/docs/reference/esql/functions/types/coalesce.asciidoc +++ b/docs/reference/esql/functions/types/coalesce.asciidoc @@ -10,6 +10,7 @@ boolean | | boolean cartesian_point | cartesian_point | cartesian_point cartesian_shape | cartesian_shape | cartesian_shape date | date | date +date_nanos | date_nanos | date_nanos geo_point | geo_point | geo_point geo_shape | geo_shape | geo_shape integer | integer | integer diff --git a/docs/reference/esql/functions/types/count_distinct.asciidoc b/docs/reference/esql/functions/types/count_distinct.asciidoc index c365c8814573c..f5758a8914d20 100644 --- a/docs/reference/esql/functions/types/count_distinct.asciidoc +++ b/docs/reference/esql/functions/types/count_distinct.asciidoc @@ -13,6 +13,10 @@ date | integer | long date | long | long date | unsigned_long | long date | | long +date_nanos | integer | long +date_nanos | long | long +date_nanos | unsigned_long | long +date_nanos | | long double | integer | long double | long | long double | unsigned_long | long diff --git a/docs/reference/esql/functions/types/date_trunc.asciidoc b/docs/reference/esql/functions/types/date_trunc.asciidoc index aa7dee99c6c44..c610f9119e51c 100644 --- a/docs/reference/esql/functions/types/date_trunc.asciidoc +++ b/docs/reference/esql/functions/types/date_trunc.asciidoc @@ -6,5 +6,7 @@ |=== interval | date | result date_period | date | date +date_period | date_nanos | date_nanos time_duration | date | date +time_duration | date_nanos | date_nanos |=== diff --git a/docs/reference/esql/functions/types/equals.asciidoc b/docs/reference/esql/functions/types/equals.asciidoc index ad0e46ef4b8da..8d48b7ebf084a 100644 --- a/docs/reference/esql/functions/types/equals.asciidoc +++ b/docs/reference/esql/functions/types/equals.asciidoc @@ -9,6 +9,7 @@ boolean | boolean | boolean cartesian_point | cartesian_point | boolean cartesian_shape | cartesian_shape | boolean date | date | boolean +date_nanos | date_nanos | boolean double | double | boolean double | integer | boolean double | long | boolean diff --git a/docs/reference/esql/functions/types/greater_than.asciidoc b/docs/reference/esql/functions/types/greater_than.asciidoc index c506328126a94..8000fd34c8507 100644 --- a/docs/reference/esql/functions/types/greater_than.asciidoc +++ b/docs/reference/esql/functions/types/greater_than.asciidoc @@ -6,6 +6,7 @@ |=== lhs | rhs | result date | date | boolean +date_nanos | date_nanos | boolean double | double | boolean double | integer | boolean double | long | boolean diff --git a/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc b/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc index c506328126a94..8000fd34c8507 100644 --- a/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc +++ b/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc @@ -6,6 +6,7 @@ |=== lhs | rhs | result date | date | boolean +date_nanos | date_nanos | boolean double | double | boolean double | integer | boolean double | long | boolean diff --git a/docs/reference/esql/functions/types/greatest.asciidoc b/docs/reference/esql/functions/types/greatest.asciidoc index 7df77a6991315..0bc11b569d426 100644 --- a/docs/reference/esql/functions/types/greatest.asciidoc +++ b/docs/reference/esql/functions/types/greatest.asciidoc @@ -8,6 +8,7 @@ first | rest | result boolean | boolean | boolean boolean | | boolean date | date | date +date_nanos | date_nanos | date_nanos double | double | double integer | integer | integer integer | | integer diff --git a/docs/reference/esql/functions/types/least.asciidoc b/docs/reference/esql/functions/types/least.asciidoc index 7df77a6991315..0bc11b569d426 100644 --- a/docs/reference/esql/functions/types/least.asciidoc +++ b/docs/reference/esql/functions/types/least.asciidoc @@ -8,6 +8,7 @@ first | rest | result boolean | boolean | boolean boolean | | boolean date | date | date +date_nanos | date_nanos | date_nanos double | double | double integer | integer | integer integer | | integer diff --git a/docs/reference/esql/functions/types/less_than.asciidoc b/docs/reference/esql/functions/types/less_than.asciidoc index c506328126a94..8000fd34c8507 100644 --- a/docs/reference/esql/functions/types/less_than.asciidoc +++ b/docs/reference/esql/functions/types/less_than.asciidoc @@ -6,6 +6,7 @@ |=== lhs | rhs | result date | date | boolean +date_nanos | date_nanos | boolean double | double | boolean double | integer | boolean double | long | boolean diff --git a/docs/reference/esql/functions/types/less_than_or_equal.asciidoc b/docs/reference/esql/functions/types/less_than_or_equal.asciidoc index c506328126a94..8000fd34c8507 100644 --- a/docs/reference/esql/functions/types/less_than_or_equal.asciidoc +++ b/docs/reference/esql/functions/types/less_than_or_equal.asciidoc @@ -6,6 +6,7 @@ |=== lhs | rhs | result date | date | boolean +date_nanos | date_nanos | boolean double | double | boolean double | integer | boolean double | long | boolean diff --git a/docs/reference/esql/functions/types/match_operator.asciidoc b/docs/reference/esql/functions/types/match_operator.asciidoc new file mode 100644 index 0000000000000..7523b29c62b1d --- /dev/null +++ b/docs/reference/esql/functions/types/match_operator.asciidoc @@ -0,0 +1,12 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +field | query | result +keyword | keyword | boolean +keyword | text | boolean +text | keyword | boolean +text | text | boolean +|=== diff --git a/docs/reference/esql/functions/types/max.asciidoc b/docs/reference/esql/functions/types/max.asciidoc index 564fb8dc3bfb0..adf457dac31b8 100644 --- a/docs/reference/esql/functions/types/max.asciidoc +++ b/docs/reference/esql/functions/types/max.asciidoc @@ -7,6 +7,7 @@ field | result boolean | boolean date | date +date_nanos | date_nanos double | double integer | integer ip | ip diff --git a/docs/reference/esql/functions/types/min.asciidoc b/docs/reference/esql/functions/types/min.asciidoc index 564fb8dc3bfb0..adf457dac31b8 100644 --- a/docs/reference/esql/functions/types/min.asciidoc +++ b/docs/reference/esql/functions/types/min.asciidoc @@ -7,6 +7,7 @@ field | result boolean | boolean date | date +date_nanos | date_nanos double | double integer | integer ip | ip diff --git a/docs/reference/esql/functions/types/mv_count.asciidoc b/docs/reference/esql/functions/types/mv_count.asciidoc index 260c531731f04..c58c4eda44396 100644 --- a/docs/reference/esql/functions/types/mv_count.asciidoc +++ b/docs/reference/esql/functions/types/mv_count.asciidoc @@ -9,6 +9,7 @@ boolean | integer cartesian_point | integer cartesian_shape | integer date | integer +date_nanos | integer double | integer geo_point | integer geo_shape | integer diff --git a/docs/reference/esql/functions/types/mv_dedupe.asciidoc b/docs/reference/esql/functions/types/mv_dedupe.asciidoc index 976de79bb0910..1524ec86cd5ec 100644 --- a/docs/reference/esql/functions/types/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/types/mv_dedupe.asciidoc @@ -9,6 +9,7 @@ boolean | boolean cartesian_point | cartesian_point cartesian_shape | cartesian_shape date | date +date_nanos | date_nanos double | double geo_point | geo_point geo_shape | geo_shape diff --git a/docs/reference/esql/functions/types/mv_first.asciidoc b/docs/reference/esql/functions/types/mv_first.asciidoc index 47736e76d1db4..e68af2f992b43 100644 --- a/docs/reference/esql/functions/types/mv_first.asciidoc +++ b/docs/reference/esql/functions/types/mv_first.asciidoc @@ -9,6 +9,7 @@ boolean | boolean cartesian_point | cartesian_point cartesian_shape | cartesian_shape date | date +date_nanos | date_nanos double | double geo_point | geo_point geo_shape | geo_shape diff --git a/docs/reference/esql/functions/types/mv_last.asciidoc b/docs/reference/esql/functions/types/mv_last.asciidoc index 47736e76d1db4..e68af2f992b43 100644 --- a/docs/reference/esql/functions/types/mv_last.asciidoc +++ b/docs/reference/esql/functions/types/mv_last.asciidoc @@ -9,6 +9,7 @@ boolean | boolean cartesian_point | cartesian_point cartesian_shape | cartesian_shape date | date +date_nanos | date_nanos double | double geo_point | geo_point geo_shape | geo_shape diff --git a/docs/reference/esql/functions/types/mv_max.asciidoc b/docs/reference/esql/functions/types/mv_max.asciidoc index d4e014554c86c..ffba14489b97c 100644 --- a/docs/reference/esql/functions/types/mv_max.asciidoc +++ b/docs/reference/esql/functions/types/mv_max.asciidoc @@ -7,6 +7,7 @@ field | result boolean | boolean date | date +date_nanos | date_nanos double | double integer | integer ip | ip diff --git a/docs/reference/esql/functions/types/mv_min.asciidoc b/docs/reference/esql/functions/types/mv_min.asciidoc index d4e014554c86c..ffba14489b97c 100644 --- a/docs/reference/esql/functions/types/mv_min.asciidoc +++ b/docs/reference/esql/functions/types/mv_min.asciidoc @@ -7,6 +7,7 @@ field | result boolean | boolean date | date +date_nanos | date_nanos double | double integer | integer ip | ip diff --git a/docs/reference/esql/functions/types/mv_slice.asciidoc b/docs/reference/esql/functions/types/mv_slice.asciidoc index 60c1f6315a599..75f45e333ee0c 100644 --- a/docs/reference/esql/functions/types/mv_slice.asciidoc +++ b/docs/reference/esql/functions/types/mv_slice.asciidoc @@ -9,6 +9,7 @@ boolean | integer | integer | boolean cartesian_point | integer | integer | cartesian_point cartesian_shape | integer | integer | cartesian_shape date | integer | integer | date +date_nanos | integer | integer | date_nanos double | integer | integer | double geo_point | integer | integer | geo_point geo_shape | integer | integer | geo_shape diff --git a/docs/reference/esql/functions/types/mv_sort.asciidoc b/docs/reference/esql/functions/types/mv_sort.asciidoc index c21ea5983945e..83d3e45c7be02 100644 --- a/docs/reference/esql/functions/types/mv_sort.asciidoc +++ b/docs/reference/esql/functions/types/mv_sort.asciidoc @@ -7,6 +7,7 @@ field | order | result boolean | keyword | boolean date | keyword | date +date_nanos | keyword | date_nanos double | keyword | double integer | keyword | integer ip | keyword | ip diff --git a/docs/reference/esql/functions/types/not_equals.asciidoc b/docs/reference/esql/functions/types/not_equals.asciidoc index ad0e46ef4b8da..8d48b7ebf084a 100644 --- a/docs/reference/esql/functions/types/not_equals.asciidoc +++ b/docs/reference/esql/functions/types/not_equals.asciidoc @@ -9,6 +9,7 @@ boolean | boolean | boolean cartesian_point | cartesian_point | boolean cartesian_shape | cartesian_shape | boolean date | date | boolean +date_nanos | date_nanos | boolean double | double | boolean double | integer | boolean double | long | boolean diff --git a/docs/reference/esql/functions/types/std_dev.asciidoc b/docs/reference/esql/functions/types/std_dev.asciidoc new file mode 100644 index 0000000000000..273dae4af76c2 --- /dev/null +++ b/docs/reference/esql/functions/types/std_dev.asciidoc @@ -0,0 +1,11 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +number | result +double | double +integer | double +long | double +|=== diff --git a/docs/reference/esql/functions/types/to_date_nanos.asciidoc b/docs/reference/esql/functions/types/to_date_nanos.asciidoc index 1f50b65f25a77..dec6833d14b08 100644 --- a/docs/reference/esql/functions/types/to_date_nanos.asciidoc +++ b/docs/reference/esql/functions/types/to_date_nanos.asciidoc @@ -5,5 +5,11 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== field | result -date_nanos +date | date_nanos +date_nanos | date_nanos +double | date_nanos +keyword | date_nanos +long | date_nanos +text | date_nanos +unsigned_long | date_nanos |=== diff --git a/docs/reference/esql/functions/types/to_datetime.asciidoc b/docs/reference/esql/functions/types/to_datetime.asciidoc index 80c986efca794..118ed6c09c11c 100644 --- a/docs/reference/esql/functions/types/to_datetime.asciidoc +++ b/docs/reference/esql/functions/types/to_datetime.asciidoc @@ -6,6 +6,7 @@ |=== field | result date | date +date_nanos | date double | date integer | date keyword | date diff --git a/docs/reference/esql/functions/types/to_long.asciidoc b/docs/reference/esql/functions/types/to_long.asciidoc index a07990cb1cfbf..1009543c1bbde 100644 --- a/docs/reference/esql/functions/types/to_long.asciidoc +++ b/docs/reference/esql/functions/types/to_long.asciidoc @@ -9,6 +9,7 @@ boolean | long counter_integer | long counter_long | long date | long +date_nanos | long double | long integer | long keyword | long diff --git a/docs/reference/esql/functions/types/to_string.asciidoc b/docs/reference/esql/functions/types/to_string.asciidoc index 26a5b31a2a589..9d4188214b3d8 100644 --- a/docs/reference/esql/functions/types/to_string.asciidoc +++ b/docs/reference/esql/functions/types/to_string.asciidoc @@ -9,6 +9,7 @@ boolean | keyword cartesian_point | keyword cartesian_shape | keyword date | keyword +date_nanos | keyword double | keyword geo_point | keyword geo_shape | keyword diff --git a/docs/reference/esql/functions/types/values.asciidoc b/docs/reference/esql/functions/types/values.asciidoc index 564fb8dc3bfb0..adf457dac31b8 100644 --- a/docs/reference/esql/functions/types/values.asciidoc +++ b/docs/reference/esql/functions/types/values.asciidoc @@ -7,6 +7,7 @@ field | result boolean | boolean date | date +date_nanos | date_nanos double | double integer | integer ip | ip diff --git a/docs/reference/esql/implicit-casting.asciidoc b/docs/reference/esql/implicit-casting.asciidoc index ffb6d3fc35acb..b24be0b645472 100644 --- a/docs/reference/esql/implicit-casting.asciidoc +++ b/docs/reference/esql/implicit-casting.asciidoc @@ -5,7 +5,7 @@ Implicit casting ++++ -Often users will input `date`, `ip`, `version`, `date_period` or `time_duration` as simple strings in their queries for use in predicates, functions, or expressions. {esql} provides <> to explicitly convert these strings into the desired data types. +Often users will input `date`, `date_period`, `time_duration`, `ip` or `version` as simple strings in their queries for use in predicates, functions, or expressions. {esql} provides <> to explicitly convert these strings into the desired data types. Without implicit casting users must explicitly code these `to_X` functions in their queries, when string literals don't match the target data types they are assigned or compared to. Here is an example of using `to_datetime` to explicitly perform a data type conversion. @@ -18,7 +18,10 @@ FROM employees | LIMIT 1 ---- -Implicit casting improves usability, by automatically converting string literals to the target data type. This is most useful when the target data type is `date`, `ip`, `version`, `date_period` or `time_duration`. It is natural to specify these as a string in queries. +[discrete] +[[esql-implicit-casting-example]] +==== Implicit casting example +Implicit casting automatically converts string literals to the target data type. This allows users to specify string values for types like `date`, `date_period`, `time_duration`, `ip` and `version` in their queries. The first query can be coded without calling the `to_datetime` function, as follows: @@ -31,35 +34,36 @@ FROM employees | LIMIT 1 ---- -[float] -=== Implicit casting support +[discrete] +[[esql-implicit-casting-supported-operations]] +==== Operations that support implicit casting The following table details which {esql} operations support implicit casting for different data types. [%header.monospaced.styled,format=dsv,separator=|] |=== -||ScalarFunction*|Operator*|<>|<> -|DATE|Y|Y|Y|N -|IP|Y|Y|Y|N -|VERSION|Y|Y|Y|N -|BOOLEAN|Y|Y|Y|N -|DATE_PERIOD/TIME_DURATION|Y|N|Y|N +|ScalarFunctions|Operators|<>|<> +DATE|Y|Y|Y|N +DATE_PERIOD/TIME_DURATION|Y|N|Y|N +IP|Y|Y|Y|N +VERSION|Y|Y|Y|N +BOOLEAN|Y|Y|Y|N |=== -ScalarFunction* includes: +ScalarFunctions includes: -<> +* <> -<> +* <> -<> +* <> -Operator* includes: +Operators includes: -<> +* <> -<> +* <> -<> +* <> diff --git a/docs/reference/esql/processing-commands/inlinestats.asciidoc b/docs/reference/esql/processing-commands/inlinestats.disabled similarity index 100% rename from docs/reference/esql/processing-commands/inlinestats.asciidoc rename to docs/reference/esql/processing-commands/inlinestats.disabled diff --git a/docs/reference/esql/processing-commands/lookup.asciidoc b/docs/reference/esql/processing-commands/lookup.disabled similarity index 100% rename from docs/reference/esql/processing-commands/lookup.asciidoc rename to docs/reference/esql/processing-commands/lookup.disabled diff --git a/docs/reference/esql/processing-commands/stats.asciidoc b/docs/reference/esql/processing-commands/stats.asciidoc index 0c479c1f62b76..3ed296fb6db24 100644 --- a/docs/reference/esql/processing-commands/stats.asciidoc +++ b/docs/reference/esql/processing-commands/stats.asciidoc @@ -1,16 +1,18 @@ [discrete] [[esql-stats-by]] -=== `STATS ... BY` +=== `STATS` -The `STATS ... BY` processing command groups rows according to a common value +The `STATS` processing command groups rows according to a common value and calculates one or more aggregated values over the grouped rows. **Syntax** [source,esql] ---- -STATS [column1 =] expression1[, ..., [columnN =] expressionN] -[BY grouping_expression1[, ..., grouping_expressionN]] +STATS [column1 =] expression1 [WHERE boolean_expression1][, + ..., + [columnN =] expressionN [WHERE boolean_expressionN]] + [BY grouping_expression1[, ..., grouping_expressionN]] ---- *Parameters* @@ -28,14 +30,18 @@ An expression that computes an aggregated value. An expression that outputs the values to group by. If its name coincides with one of the computed columns, that column will be ignored. +`boolean_expressionX`:: +The condition that must be met for a row to be included in the evaluation of `expressionX`. + NOTE: Individual `null` values are skipped when computing aggregations. *Description* -The `STATS ... BY` processing command groups rows according to a common value -and calculate one or more aggregated values over the grouped rows. If `BY` is -omitted, the output table contains exactly one row with the aggregations applied -over the entire dataset. +The `STATS` processing command groups rows according to a common value +and calculates one or more aggregated values over the grouped rows. For the +calculation of each aggregated value, the rows in a group can be filtered with +`WHERE`. If `BY` is omitted, the output table contains exactly one row with +the aggregations applied over the entire dataset. The following <> are supported: @@ -90,6 +96,29 @@ include::{esql-specs}/stats.csv-spec[tag=statsCalcMultipleValues] include::{esql-specs}/stats.csv-spec[tag=statsCalcMultipleValues-result] |=== +To filter the rows that go into an aggregation, use the `WHERE` clause: + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=aggFiltering] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=aggFiltering-result] +|=== + +The aggregations can be mixed, with and without a filter and grouping is +optional as well: + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=aggFilteringNoGroup] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=aggFilteringNoGroup-result] +|=== + [[esql-stats-mv-group]] If the grouping key is multivalued then the input row is in all groups: @@ -109,7 +138,7 @@ It's also possible to group by multiple values: include::{esql-specs}/stats.csv-spec[tag=statsGroupByMultipleValues] ---- -If the all grouping keys are multivalued then the input row is in all groups: +If all the grouping keys are multivalued then the input row is in all groups: [source.merge.styled,esql] ---- @@ -121,7 +150,7 @@ include::{esql-specs}/stats.csv-spec[tag=multi-mv-group-result] |=== Both the aggregating functions and the grouping expressions accept other -functions. This is useful for using `STATS...BY` on multivalue columns. +functions. This is useful for using `STATS` on multivalue columns. For example, to calculate the average salary change, you can use `MV_AVG` to first average the multiple values per employee, and use the result with the `AVG` function: diff --git a/docs/reference/esql/time-spans.asciidoc b/docs/reference/esql/time-spans.asciidoc new file mode 100644 index 0000000000000..d2aa0c4fa252e --- /dev/null +++ b/docs/reference/esql/time-spans.asciidoc @@ -0,0 +1,111 @@ +[[esql-time-spans]] +=== {esql} time spans + +++++ +Time spans +++++ + +Time spans represent intervals between two datetime values. There are currently two supported types of time spans: + +* `DATE_PERIOD` specifies intervals in years, quarters, months, weeks and days +* `TIME_DURATION` specifies intervals in hours, minutes, seconds and milliseconds + +A time span requires two elements: an integer value and a temporal unit. + +Time spans work with grouping functions such as <>, scalar functions such as <> and arithmetic operators such as <> and <>. Convert strings to time spans using <>, <>, or the cast operators `::DATE_PERIOD`, `::TIME_DURATION`. + +[discrete] +[[esql-time-spans-examples]] +==== Examples of using time spans in {esql} + + +With `BUCKET`: +[source.merge.styled,esql] +---- +include::{esql-specs}/bucket.csv-spec[tag=docsBucketWeeklyHistogramWithSpan] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/bucket.csv-spec[tag=docsBucketWeeklyHistogramWithSpan-result] +|=== + + +With `DATE_TRUNC`: +[source.merge.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsDateTrunc] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/date.csv-spec[tag=docsDateTrunc-result] +|=== + + +With `+` and/or `-`: +[source.merge.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsNowWhere] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/date.csv-spec[tag=docsNowWhere-result] +|=== + + +When a time span is provided as a named parameter in string format, `TO_DATEPERIOD`, `::DATE_PERIOD`, `TO_TIMEDURATION` or `::TIME_DURATION` can be used to convert to its corresponding time span value for arithmetic operations like `+` and/or `-`. +[source, esql] +---- +POST /_query +{ + "query": """ + FROM employees + | EVAL x = hire_date + ?timespan::DATE_PERIOD, y = hire_date - TO_DATEPERIOD(?timespan) + """, + "params": [{"timespan" : "1 day"}] +} +---- + +When a time span is provided as a named parameter in string format, it can be automatically converted to its corresponding time span value in grouping functions and scalar functions, like `BUCKET` and `DATE_TRUNC`. +[source, esql] +---- +POST /_query +{ + "query": """ + FROM employees + | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" + | STATS hires_per_week = COUNT(*) BY week = BUCKET(hire_date, ?timespan) + | SORT week + """, + "params": [{"timespan" : "1 week"}] +} +---- + +[source, esql] +---- +POST /_query +{ + "query": """ + FROM employees + | KEEP first_name, last_name, hire_date + | EVAL year_hired = DATE_TRUNC(?timespan, hire_date) + """, + "params": [{"timespan" : "1 year"}] +} +---- + +[discrete] +[[esql-time-spans-table]] +==== Supported temporal units +[%header.monospaced.styled,format=dsv,separator=|] +|=== +Temporal Units|Valid Abbreviations +year|y, yr, years +quarter|q, quarters +month|mo, months +week|w, weeks +day|d, days +hour|h, hours +minute|min, minutes +second|s, sec, seconds +millisecond|ms, milliseconds +|=== diff --git a/docs/reference/how-to.asciidoc b/docs/reference/how-to.asciidoc index ec514eb05be29..eeac2fe5c2f50 100644 --- a/docs/reference/how-to.asciidoc +++ b/docs/reference/how-to.asciidoc @@ -1,23 +1,21 @@ [[how-to]] -= How to += Optimizations -[partintro] --- -Elasticsearch ships with defaults which are intended to give a good out of -the box experience. Full text search, highlighting, aggregations, and indexing -should all just work without the user having to change anything. +Elasticsearch's default settings provide a good out-of-box experience for basic operations like full text search, highlighting, aggregations, and indexing. -Once you better understand how you want to use Elasticsearch, however, -there are a number of optimizations you can make to improve performance -for your use case. +However, there are a number of optimizations you can make to improve performance for your use case. -This section provides guidance about which changes should and shouldn't be -made. --- +This section provides recommendations for various use cases. -include::how-to/general.asciidoc[] +* <> +* <> +* <> +* <> +* <> +* <> +* <> -include::how-to/recipes.asciidoc[] +include::how-to/general.asciidoc[] include::how-to/indexing-speed.asciidoc[] diff --git a/docs/reference/how-to/indexing-speed.asciidoc b/docs/reference/how-to/indexing-speed.asciidoc index 12de469c68449..d4cdb85e4d624 100644 --- a/docs/reference/how-to/indexing-speed.asciidoc +++ b/docs/reference/how-to/indexing-speed.asciidoc @@ -112,7 +112,7 @@ different nodes so there's redundancy for any node failures. You can also use insurance. [discrete] -==== Local vs.remote storage +==== Local vs. remote storage include::./remote-storage.asciidoc[] diff --git a/docs/reference/how-to/recipes.asciidoc b/docs/reference/how-to/recipes.asciidoc index b46f624aef51d..de23404be6164 100644 --- a/docs/reference/how-to/recipes.asciidoc +++ b/docs/reference/how-to/recipes.asciidoc @@ -1,7 +1,7 @@ [[recipes]] -== Recipes +== Search relevance optimizations -This section includes a few recipes to help with common problems: +This section includes a few recipes to help with common search relevance issues: * <> * <> diff --git a/docs/reference/how-to/recipes/scoring.asciidoc b/docs/reference/how-to/recipes/scoring.asciidoc index 5c5a8977d34d4..a578826e31fac 100644 --- a/docs/reference/how-to/recipes/scoring.asciidoc +++ b/docs/reference/how-to/recipes/scoring.asciidoc @@ -88,8 +88,9 @@ pages independently of the query. There are two main queries that allow combining static score contributions with textual relevance, eg. as computed with BM25: - - <> - - <> + +* <> +* <> For instance imagine that you have a `pagerank` field that you wish to combine with the BM25 score so that the final score is equal to diff --git a/docs/reference/ilm/error-handling.asciidoc b/docs/reference/ilm/error-handling.asciidoc index e8df44653e9c5..911dc8b9cce40 100644 --- a/docs/reference/ilm/error-handling.asciidoc +++ b/docs/reference/ilm/error-handling.asciidoc @@ -8,8 +8,9 @@ When this happens, {ilm-init} moves the index to an `ERROR` step. If {ilm-init} cannot resolve the error automatically, execution is halted until you resolve the underlying issues with the policy, index, or cluster. -See this https://www.youtube.com/watch?v=VCIqkji3IwY[{ilm-init} health video] -for example troubleshooting walkthrough. +See https://www.youtube.com/watch?v=VCIqkji3IwY[this video] +for a walkthrough of troubleshooting current {ilm-init} health issues, and https://www.youtube.com/watch?v=onrnnwjYWSQ[this video] +for a walkthrough of troubleshooting historical {ilm-init} issues. For example, you might have a `shrink-index` policy that shrinks an index to four shards once it is at least five days old: diff --git a/docs/reference/migration/migrate_8_18.asciidoc b/docs/reference/migration/migrate_8_18.asciidoc new file mode 100644 index 0000000000000..c989ff9f85b6d --- /dev/null +++ b/docs/reference/migration/migrate_8_18.asciidoc @@ -0,0 +1,20 @@ +[[migrating-8.18]] +== Migrating to 8.18 +++++ +8.18 +++++ + +This section discusses the changes that you need to be aware of when migrating +your application to {es} 8.18. + +See also <> and <>. + +coming::[8.18.0] + + +[discrete] +[[breaking-changes-8.18]] +=== Breaking changes + +There are no breaking changes in {es} 8.18. + diff --git a/docs/reference/migration/migrate_9_0.asciidoc b/docs/reference/migration/migrate_9_0.asciidoc index 6569647fd993e..5048220966bba 100644 --- a/docs/reference/migration/migrate_9_0.asciidoc +++ b/docs/reference/migration/migrate_9_0.asciidoc @@ -1,6 +1,3 @@ -// THIS IS A GENERATED FILE. DO NOT EDIT DIRECTLY. -// The content generated here are is not correct and most has been manually commented out until it can be fixed. -// See ES-9931 for more details. [[migrating-9.0]] == Migrating to 9.0 ++++ @@ -23,204 +20,229 @@ The following changes in {es} 9.0 might affect your applications and prevent them from operating normally. Before upgrading to 9.0, review these changes and take the described steps to mitigate the impact. -// -// [discrete] -// [[breaking_90_analysis_changes]] -// ==== Analysis changes -// -// [[set_lenient_to_true_by_default_when_using_updateable_synonyms]] -// .Set lenient to true by default when using updateable synonyms -// [%collapsible] -// ==== -// *Details* + -// When a `synonym` or `synonym_graph` token filter is configured with `updateable: true`, the default `lenient` -// value will now be `true`. -// -// *Impact* + -// `synonym` or `synonym_graph` token filters configured with `updateable: true` will ignore invalid synonyms by -// default. This prevents shard initialization errors on invalid synonyms. -// ==== -// -// [discrete] -// [[breaking_90_mapping_changes]] -// ==== Mapping changes -// -// [[jdk_locale_database_change]] -// .JDK locale database change -// [%collapsible] -// ==== -// *Details* + -// {es} 8.16 changes the version of the JDK that is included from version 22 to version 23. This changes the locale database that is used by Elasticsearch from the COMPAT database to the CLDR database. This change can cause significant differences to the textual date formats accepted by Elasticsearch, and to calculated week-dates. -// -// If you run {es} 8.16 on JDK version 22 or below, it will use the COMPAT locale database to match the behavior of 8.15. However, starting with {es} 9.0, {es} will use the CLDR database regardless of JDK version it is run on. -// -// *Impact* + -// This affects you if you use custom date formats using textual or week-date field specifiers. If you use date fields or calculated week-dates that change between the COMPAT and CLDR databases, then this change will cause Elasticsearch to reject previously valid date fields as invalid data. You might need to modify your ingest or output integration code to account for the differences between these two JDK versions. -// -// Starting in version 8.15.2, Elasticsearch will log deprecation warnings if you are using date format specifiers that might change on upgrading to JDK 23. These warnings are visible in Kibana. -// -// For detailed guidance, refer to <> and the https://ela.st/jdk-23-locales[Elastic blog]. -// ==== -// -// [discrete] -// [[breaking_90_analysis_changes]] -// ==== Analysis changes -// -// [[snowball_stemmers_have_been_upgraded]] -// .Snowball stemmers have been upgraded -// [%collapsible] -// ==== -// *Details* + -// Lucene 10 ships with an upgrade of its Snowball stemmers. For details see https://github.com/apache/lucene/issues/13209. Users using Snowball stemmers that are experiencing changes in search behaviour on existing data are advised to reindex. -// -// *Impact* + -// The upgrade should generally provide improved stemming results. Small changes in token analysis can lead to mismatches with previously index data, so existing indices using Snowball stemmers as part of their analysis chain should be reindexed. -// ==== -// -// [[german2_snowball_stemmer_an_alias_for_german_stemmer]] -// .The "german2" snowball stemmer is now an alias for the "german" stemmer -// [%collapsible] -// ==== -// *Details* + -// Lucene 10 has merged the improved "german2" snowball language stemmer with the "german" stemmer. For Elasticsearch, "german2" is now a deprecated alias for "german". This may results in slightly different tokens being generated for terms with umlaut substitution (like "ue" for "ü" etc...) -// -// *Impact* + -// Replace usages of "german2" with "german" in analysis configuration. Old indices that use the "german" stemmer should be reindexed if possible. -// ==== -// -// [[persian_analyzer_has_stemmer_by_default]] -// .The 'persian' analyzer has stemmer by default -// [%collapsible] -// ==== -// *Details* + -// Lucene 10 has added a final stemming step to its PersianAnalyzer that Elasticsearch exposes as 'persian' analyzer. Existing indices will keep the old non-stemming behaviour while new indices will see the updated behaviour with added stemming. Users that wish to maintain the non-stemming behaviour need to define their own analyzer as outlined in https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. Users that wish to use the new stemming behaviour for existing indices will have to reindex their data. -// -// *Impact* + -// Indexing with the 'persian' analyzer will produce slightly different tokens. Users should check if this impacts their search results. If they wish to maintain the legacy non-stemming behaviour they can define their own analyzer equivalent as explained in https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. -// ==== -// -// [[korean_dictionary_for_nori_has_been_updated]] -// .The Korean dictionary for Nori has been updated -// [%collapsible] -// ==== -// *Details* + -// Lucene 10 ships with an updated Korean dictionary (mecab-ko-dic-2.1.1). For details see https://github.com/apache/lucene/issues/11452. Users experiencing changes in search behaviour on existing data are advised to reindex. -// -// *Impact* + -// The change is small and should generally provide better analysis results. Existing indices for full-text use cases should be reindexed though. -// ==== -// -// [discrete] -// [[breaking_90_cluster_and_node_setting_changes]] -// ==== Cluster and node setting changes -// -// [[remove_unsupported_legacy_value_for_discovery_type]] -// .Remove unsupported legacy value for `discovery.type` -// [%collapsible] -// ==== -// *Details* + -// Earlier versions of {es} had a `discovery.type` setting which permitted values that referred to legacy discovery types. From v9.0.0 onwards, the only supported values for this setting are `multi-node` (the default) and `single-node`. -// -// *Impact* + -// Remove any value for `discovery.type` from your `elasticsearch.yml` configuration file. -// ==== -// -// [discrete] -// [[breaking_90_es_ql_changes]] -// ==== ES|QL changes -// -// [[esql_entirely_remove_meta_functions]] -// .ESQL: Entirely remove META FUNCTIONS -// [%collapsible] -// ==== -// *Details* + -// Removes an undocumented syntax from ESQL: META FUNCTION. This was never -// reliable or really useful. Consult the documentation instead. -// -// *Impact* + -// Removes an undocumented syntax from ESQL: META FUNCTION -// ==== -// -// [discrete] -// [[breaking_90_rest_api_changes]] -// ==== REST API changes -// -// [[remove_cluster_state_from_cluster_reroute_response]] -// .Remove cluster state from `/_cluster/reroute` response -// [%collapsible] -// ==== -// *Details* + -// The `POST /_cluster/reroute` API no longer returns the cluster state in its response. The `?metric` query parameter to this API now has no effect and its use will be forbidden in a future version. -// -// *Impact* + -// Cease usage of the `?metric` query parameter when calling the `POST /_cluster/reroute` API. -// ==== -// -// [[remove_deprecated_local_attribute_from_alias_apis]] -// .Remove deprecated local attribute from alias APIs -// [%collapsible] -// ==== -// *Details* + -// The following APIs no longer accept the `?local` query parameter: `GET /_alias`, `GET /_aliases`, `GET /_alias/{name}`, `HEAD /_alias/{name}`, `GET /{index}/_alias`, `HEAD /{index}/_alias`, `GET /{index}/_alias/{name}`, `HEAD /{index}/_alias/{name}`, `GET /_cat/aliases`, and `GET /_cat/aliases/{alias}`. This parameter has been deprecated and ignored since version 8.12. -// -// *Impact* + -// Cease usage of the `?local` query parameter when calling the listed APIs. -// ==== -// -// [[reworking_rrf_retriever_to_be_evaluated_during_rewrite_phase]] -// .Reworking RRF retriever to be evaluated during rewrite phase -// [%collapsible] -// ==== -// *Details* + -// In this release (8.16), we have introduced major changes to the retrievers framework -// and how they can be evaluated, focusing mainly on compound retrievers -// like `rrf` and `text_similarity_reranker`, which allowed us to support full -// composability (i.e. any retriever can be nested under any compound retriever), -// as well as supporting additional search features like collapsing, explaining, -// aggregations, and highlighting. -// -// To ensure consistency, and given that this rework is not available until 8.16, -// `rrf` and `text_similarity_reranker` retriever queries would now -// throw an exception in a mixed cluster scenario, where there are nodes -// both in current or later (i.e. >= 8.16) and previous ( <= 8.15) versions. -// -// As part of the rework, we have also removed the `_rank` property from -// the responses of an `rrf` retriever. -// -// *Impact* + -// - Users will not be able to use the `rrf` and `text_similarity_reranker` retrievers in a mixed cluster scenario -// with previous releases (i.e. prior to 8.16), and the request will throw an `IllegalArgumentException`. -// - `_rank` has now been removed from the output of the `rrf` retrievers so trying to directly parse the field -// will throw an exception -// ==== -// -// [[update_data_stream_lifecycle_telemetry_to_track_global_retention]] -// .Update data stream lifecycle telemetry to track global retention -// [%collapsible] -// ==== -// *Details* + -// In this release we introduced global retention settings that fulfil the following criteria: -// -// - a data stream managed by the data stream lifecycle, -// - a data stream that is not an internal data stream. -// -// As a result, we defined different types of retention: -// -// - **data retention**: the retention configured on data stream level by the data stream user or owner -// - **default global retention:** the retention configured by an admin on a cluster level and applied to any -// data stream that doesn't have data retention and fulfils the criteria. -// - **max global retention:** the retention configured by an admin to guard against having long retention periods. -// Any data stream that fulfills the criteria will adhere to the data retention unless it exceeds the max retention, -// in which case the max global retention applies. -// - **effective retention:** the retention that applies on the data stream that fulfill the criteria at a given moment -// in time. It takes into consideration all the retention above and resolves it to the retention that will take effect. -// -// Considering the above changes, having a field named `retention` in the usage API was confusing. For this reason, we -// renamed it to `data_retention` and added telemetry about the other configurations too. -// -// *Impact* + -// Users that use the field `data_lifecycle.retention` should use the `data_lifecycle.data_retention` -// ==== + + +There are no notable breaking changes in {es} 9.0. +But there are some less critical breaking changes. + +[discrete] +[[breaking_90_analysis_changes]] +==== Analysis changes + +[[snowball_stemmers_have_been_upgraded]] +.Snowball stemmers have been upgraded +[%collapsible] +==== +*Details* + +Lucene 10 ships with an upgrade of its Snowball stemmers. For details see https://github.com/apache/lucene/issues/13209. Users using Snowball stemmers that are experiencing changes in search behaviour on existing data are advised to reindex. + +*Impact* + +The upgrade should generally provide improved stemming results. Small changes in token analysis can lead to mismatches with previously index data, so existing indices using Snowball stemmers as part of their analysis chain should be reindexed. +==== + +[[german2_snowball_stemmer_an_alias_for_german_stemmer]] +.The "german2" snowball stemmer is now an alias for the "german" stemmer +[%collapsible] +==== +*Details* + +Lucene 10 has merged the improved "german2" snowball language stemmer with the "german" stemmer. For Elasticsearch, "german2" is now a deprecated alias for "german". This may results in slightly different tokens being generated for terms with umlaut substitution (like "ue" for "ü" etc...) + +*Impact* + +Replace usages of "german2" with "german" in analysis configuration. Old indices that use the "german" stemmer should be reindexed if possible. +==== + +[[persian_analyzer_has_stemmer_by_default]] +.The 'persian' analyzer has stemmer by default +[%collapsible] +==== +*Details* + +Lucene 10 has added a final stemming step to its PersianAnalyzer that Elasticsearch exposes as 'persian' analyzer. Existing indices will keep the old non-stemming behaviour while new indices will see the updated behaviour with added stemming. Users that wish to maintain the non-stemming behaviour need to define their own analyzer as outlined in https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. Users that wish to use the new stemming behaviour for existing indices will have to reindex their data. + +*Impact* + +Indexing with the 'persian' analyzer will produce slightly different tokens. Users should check if this impacts their search results. If they wish to maintain the legacy non-stemming behaviour they can define their own analyzer equivalent as explained in https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. +==== + +[[korean_dictionary_for_nori_has_been_updated]] +.The Korean dictionary for Nori has been updated +[%collapsible] +==== +*Details* + +Lucene 10 ships with an updated Korean dictionary (mecab-ko-dic-2.1.1). For details see https://github.com/apache/lucene/issues/11452. Users experiencing changes in search behaviour on existing data are advised to reindex. + +*Impact* + +The change is small and should generally provide better analysis results. Existing indices for full-text use cases should be reindexed though. +==== + +[discrete] +[[breaking_90_cluster_and_node_setting_changes]] +==== Cluster and node setting changes + +[[minimum_shard_balancer_threshold_1_0]] +.Minimum shard balancer threshold is now 1.0 +[%collapsible] +==== +*Details* + +Earlier versions of {es} accepted any non-negative value for `cluster.routing.allocation.balance.threshold`, but values smaller than `1.0` do not make sense and have been ignored since version 8.6.1. From 9.0.0 these nonsensical values are now forbidden. + +*Impact* + +Do not set `cluster.routing.allocation.balance.threshold` to a value less than `1.0`. +==== + +[[remove_cluster_routing_allocation_disk_watermark_enable_for_single_data_node_setting]] +.Remove `cluster.routing.allocation.disk.watermark.enable_for_single_data_node` setting +[%collapsible] +==== +*Details* + +Prior to 7.8, whenever a cluster had only a single data node, the watermarks would not be respected. In order to change this in 7.8+ in a backwards compatible way, we introduced the `cluster.routing.allocation.disk.watermark.enable_for_single_data_node` node setting. The setting was deprecated in 7.14 and was made to accept only true in 8.0 + +*Impact* + +No known end user impact +==== + +[[remove_deprecated_xpack_searchable_snapshot_allocate_on_rolling_restart_setting]] +.Remove deprecated `xpack.searchable.snapshot.allocate_on_rolling_restart` setting +[%collapsible] +==== +*Details* + +The `xpack.searchable.snapshot.allocate_on_rolling_restart` setting was created as an escape-hatch just in case relying on the `cluster.routing.allocation.enable=primaries` setting for allocating searchable snapshots during rolling restarts had some unintended side-effects. It has been deprecated since 8.2.0. + +*Impact* + +Remove `xpack.searchable.snapshot.allocate_on_rolling_restart` from your settings if present. +==== + +[[remove_unsupported_legacy_value_for_discovery_type]] +.Remove unsupported legacy value for `discovery.type` +[%collapsible] +==== +*Details* + +Earlier versions of {es} had a `discovery.type` setting which permitted values that referred to legacy discovery types. From v9.0.0 onwards, the only supported values for this setting are `multi-node` (the default) and `single-node`. + +*Impact* + +Remove any value for `discovery.type` from your `elasticsearch.yml` configuration file. +==== + +[discrete] +[[breaking_90_ingest_changes]] +==== Ingest changes + +[[remove_ecs_option_on_user_agent_processor]] +.Remove `ecs` option on `user_agent` processor +[%collapsible] +==== +*Details* + +The `user_agent` ingest processor no longer accepts the `ecs` option. (It was previously deprecated and ignored.) + +*Impact* + +Users should stop using the `ecs` option when creating instances of the `user_agent` ingest processor. The option will be removed from existing processors stored in the cluster state on upgrade. +==== + +[[remove_ignored_fallback_option_on_geoip_processor]] +.Remove ignored fallback option on GeoIP processor +[%collapsible] +==== +*Details* + +The option fallback_to_default_databases on the geoip ingest processor has been removed. (It was deprecated and ignored since 8.0.0.) + +*Impact* + +Customers should stop remove the noop fallback_to_default_databases option on any geoip ingest processors. +==== + +[discrete] +[[breaking_90_mapping_changes]] +==== Mapping changes + +[[remove_support_for_type_fields_copy_to_boost_in_metadata_field_definition]] +.Remove support for type, fields, copy_to and boost in metadata field definition +[%collapsible] +==== +*Details* + +The type, fields, copy_to and boost parameters are no longer supported in metadata field definition + +*Impact* + +Users providing type, fields, copy_to or boost as part of metadata field definition should remove them from their mappings. +==== + +[discrete] +[[breaking_90_rest_api_changes]] +==== REST API changes + +[[apply_more_strict_parsing_of_actions_in_bulk_api]] +.Apply more strict parsing of actions in bulk API +[%collapsible] +==== +*Details* + +Previously, the following classes of malformed input were deprecated but not rejected in the action lines of the a bulk request: missing closing brace; additional keys after the action (which were ignored); additional data after the closing brace (which was ignored). They will now be considered errors and rejected. + +*Impact* + +Users must provide well-formed input when using the bulk API. (They can request REST API compatibility with v8 to get the previous behaviour back as an interim measure.) +==== + +[[error_json_structure_has_changed_when_detailed_errors_are_disabled]] +.Error JSON structure has changed when detailed errors are disabled +[%collapsible] +==== +*Details* + +This change modifies the JSON format of error messages returned to REST clients +when detailed messages are turned off. +Previously, JSON returned when an exception occurred, and `http.detailed_errors.enabled: false` was set, +just consisted of a single `"error"` text field with some basic information. +Setting `http.detailed_errors.enabled: true` (the default) changed this field +to an object with more detailed information. +With this change, non-detailed errors now have the same structure as detailed errors. `"error"` will now always +be an object with, at a minimum, a `"type"` and `"reason"` field. Additional fields are included when detailed +errors are enabled. +To use the previous structure for non-detailed errors, use the v8 REST API. + +*Impact* + +If you have set `http.detailed_errors.enabled: false` (the default is `true`) +the structure of JSON when any exceptions occur now matches the structure when +detailed errors are enabled. +To use the previous structure for non-detailed errors, use the v8 REST API. +==== + +[[remove_cluster_state_from_cluster_reroute_response]] +.Remove cluster state from `/_cluster/reroute` response +[%collapsible] +==== +*Details* + +The `POST /_cluster/reroute` API no longer returns the cluster state in its response. The `?metric` query parameter to this API now has no effect and its use will be forbidden in a future version. + +*Impact* + +Cease usage of the `?metric` query parameter when calling the `POST /_cluster/reroute` API. +==== + +[[remove_deprecated_local_attribute_from_alias_apis]] +.Remove deprecated local attribute from alias APIs +[%collapsible] +==== +*Details* + +The following APIs no longer accept the `?local` query parameter: `GET /_alias`, `GET /_aliases`, `GET /_alias/{name}`, `HEAD /_alias/{name}`, `GET /{index}/_alias`, `HEAD /{index}/_alias`, `GET /{index}/_alias/{name}`, `HEAD /{index}/_alias/{name}`, `GET /_cat/aliases`, and `GET /_cat/aliases/{alias}`. This parameter has been deprecated and ignored since version 8.12. + +*Impact* + +Cease usage of the `?local` query parameter when calling the listed APIs. +==== + +[[remove_legacy_params_from_range_query]] +.Remove legacy params from range query +[%collapsible] +==== +*Details* + +The deprecated range query parameters `to`, `from`, `include_lower`, and `include_upper` are no longer supported. + +*Impact* + +Users should use `lt`, `lte`, `gt`, and `gte` query parameters instead. +==== + +[[remove_support_for_deprecated_force_source_highlighting_parameter]] +.Remove support for deprecated `force_source` highlighting parameter +[%collapsible] +==== +*Details* + +The deprecated highlighting `force_source` parameter is no longer supported. + +*Impact* + +Users should remove usages of the `force_source` parameter from their search requests. +==== [discrete] @@ -235,85 +257,45 @@ after upgrading to 9.0. To find out if you are using any deprecated functionality, enable <>. -// -// [discrete] -// [[deprecations_90_analysis]] -// ==== Analysis deprecations -// -// [[deprecate_dutch_kp_lovins_stemmer_as_they_are_removed_in_lucene_10]] -// .Deprecate dutch_kp and lovins stemmer as they are removed in Lucene 10 -// [%collapsible] -// ==== -// *Details* + -// kp, dutch_kp, dutchKp and lovins stemmers are deprecated and will be removed. -// -// *Impact* + -// These stemmers will be removed and will be no longer supported. -// ==== -// -// [[deprecate_edge_ngram_side_parameter]] -// .deprecate `edge_ngram` side parameter -// [%collapsible] -// ==== -// *Details* + -// edge_ngram will no longer accept the side parameter. -// -// *Impact* + -// Users will need to update any usage of edge_ngram token filter that utilizes `side`. If the `back` value was used, they can achieve the same behavior by using the `reverse` token filter. -// ==== -// -// [discrete] -// [[deprecations_90_crud]] -// ==== CRUD deprecations -// -// [[deprecate_dot_prefixed_indices_composable_template_index_patterns]] -// .Deprecate dot-prefixed indices and composable template index patterns -// [%collapsible] -// ==== -// *Details* + -// Indices beginning with a dot '.' are reserved for system and internal indices, and should not be used by and end-user. Additionally, composable index templates that contain patterns for dot-prefixed indices should also be avoided, as these patterns are meant for internal use only. In a future Elasticsearch version, creation of these dot-prefixed indices will no longer be allowed. -// -// *Impact* + -// Requests performing an action that would create an index beginning with a dot (indexing a document, manual creation, reindex), or creating an index template with index patterns beginning with a dot, will contain a deprecation header warning about dot-prefixed indices in the response. -// ==== -// -// [discrete] -// [[deprecations_90_rest_api]] -// ==== REST API deprecations -// -// [[adding_deprecation_warnings_for_rrf_using_rank_sub_searches]] -// .Adding deprecation warnings for rrf using rank and `sub_searches` -// [%collapsible] -// ==== -// *Details* + -// Search API parameter `sub_searches` will no longer be a supported and will be removed in future releases. Similarly, `rrf` can only be used through the specified `retriever` and no longer though the `rank` parameter -// -// *Impact* + -// Requests specifying rrf through `rank` and/or `sub_searches` elements will be disallowed in a future version. Users should instead utilize the new `retriever` parameter. -// ==== -// -// [[deprecate_legacy_params_from_range_query]] -// .Deprecate legacy params from range query -// [%collapsible] -// ==== -// *Details* + -// Range query will not longer accept `to`, `from`, `include_lower`, and `include_upper` parameters. -// -// *Impact* + -// Instead use `gt`, `gte`, `lt` and `lte` parameters. -// ==== -// -// [[inference_api_deprecate_elser_service]] -// .[Inference API] Deprecate elser service -// [%collapsible] -// ==== -// *Details* + -// The `elser` service of the inference API will be removed in an upcoming release. Please use the elasticsearch service instead. -// -// *Impact* + -// In the current version there is no impact. In a future version, users of the `elser` service will no longer be able to use it, and will be required to use the `elasticsearch` service to access elser through the inference API. -// ==== - -// BELOW WAS MANUALLY ADDED TO FIX THE BUILD -include::migrate_9_0/transient-settings-migration-guide.asciidoc[] -//include::migrate_9_0/rest-api-changes.asciidoc[] //see ES-9932 + +[discrete] +[[deprecations_90_mapping]] +==== Mapping deprecations + +[[deprecate_source_mode_in_mappings]] +.Deprecate `_source.mode` in mappings +[%collapsible] +==== +*Details* + +Configuring `_source.mode` in mappings is deprecated and will be removed in future versions. Use `index.mapping.source.mode` index setting instead. + +*Impact* + +Use `index.mapping.source.mode` index setting instead +==== + +[discrete] +[[deprecations_90_rest_api]] +==== REST API deprecations + +[[document_type_deprecated_on_simulate_pipeline_api]] +.Document `_type` deprecated on simulate pipeline API +[%collapsible] +==== +*Details* + +Passing a document with a `_type` property is deprecated in the `/_ingest/pipeline/{id}/_simulate` and `/_ingest/pipeline/_simulate` APIs. + +*Impact* + +Users should already have stopped using mapping types, which were deprecated in {es} 7. This deprecation warning will fire if they specify mapping types on documents pass to the simulate pipeline API. +==== + +[[inference_api_deprecate_elser_service]] +.[Inference API] Deprecate elser service +[%collapsible] +==== +*Details* + +The `elser` service of the inference API will be removed in an upcoming release. Please use the elasticsearch service instead. + +*Impact* + +In the current version there is no impact. In a future version, users of the `elser` service will no longer be able to use it, and will be required to use the `elasticsearch` service to access elser through the inference API. +==== + diff --git a/docs/reference/modules/http.asciidoc b/docs/reference/modules/http.asciidoc index 984fb0d5bf1c1..17d67e00e2ebb 100644 --- a/docs/reference/modules/http.asciidoc +++ b/docs/reference/modules/http.asciidoc @@ -145,11 +145,9 @@ NOTE: This header is only returned when the setting is set to `true`. `http.detailed_errors.enabled`:: (<>, boolean) -Configures whether detailed error reporting in HTTP responses is enabled. -Defaults to `true`, which means that HTTP requests that include the -<> will return a -detailed error message including a stack trace if they encounter an exception. -If set to `false`, requests with the `?error_trace` parameter are rejected. +Configures whether detailed error reporting in HTTP responses is enabled. Defaults to `true`. +When this option is set to `false`, only basic information is returned if an error occurs in the request, +and requests with <> set are rejected. `http.pipelining.max_events`:: (<>, integer) diff --git a/docs/reference/release-notes/8.18.0.asciidoc b/docs/reference/release-notes/8.18.0.asciidoc new file mode 100644 index 0000000000000..332edfbc23eb7 --- /dev/null +++ b/docs/reference/release-notes/8.18.0.asciidoc @@ -0,0 +1,8 @@ +[[release-notes-8.18.0]] +== {es} version 8.18.0 + +coming[8.18.0] + +Also see <>. + + diff --git a/docs/reference/release-notes/9.0.0.asciidoc b/docs/reference/release-notes/9.0.0.asciidoc index af26fd57385e3..93e5a30cb82f7 100644 --- a/docs/reference/release-notes/9.0.0.asciidoc +++ b/docs/reference/release-notes/9.0.0.asciidoc @@ -1,6 +1,3 @@ -// THIS IS A GENERATED FILE. DO NOT EDIT DIRECTLY. -// The content generated here are is not correct and most has been manually commented out until it can be fixed. -// See ES-9931 for more details. [[release-notes-9.0.0]] == {es} version 9.0.0 @@ -12,546 +9,289 @@ Also see <>. [float] === Breaking changes -// Allocation:: -// * Remove cluster state from `/_cluster/reroute` response {es-pull}114231[#114231] (issue: {es-issue}88978[#88978]) -// -// Analysis:: -// * Set lenient to true by default when using updateable synonyms {es-pull}110901[#110901] -// * Snowball stemmers have been upgraded {es-pull}114146[#114146] -// * The 'german2' stemmer is now an alias for the 'german' snowball stemmer {es-pull}113614[#113614] -// * The 'persian' analyzer has stemmer by default {es-pull}113482[#113482] (issue: {es-issue}113050[#113050]) -// * The Korean dictionary for Nori has been updated {es-pull}114124[#114124] -// -// Cluster Coordination:: -// * Remove unsupported legacy value for `discovery.type` {es-pull}112903[#112903] -// -// Data streams:: -// * Update data stream lifecycle telemetry to track global retention {es-pull}112451[#112451] -// -// ES|QL:: -// * ESQL: Entirely remove META FUNCTIONS {es-pull}113967[#113967] -// -// Indices APIs:: -// * Remove deprecated local attribute from alias APIs {es-pull}115393[#115393] -// -// Mapping:: -// * JDK locale database change {es-pull}113975[#113975] -// -// Search:: -// * Adding breaking change entry for retrievers {es-pull}115399[#115399] +Allocation:: +* Increase minimum threshold in shard balancer {es-pull}115831[#115831] +* Remove `cluster.routing.allocation.disk.watermark.enable_for_single_data_node` setting {es-pull}114207[#114207] +* Remove cluster state from `/_cluster/reroute` response {es-pull}114231[#114231] (issue: {es-issue}88978[#88978]) + +Analysis:: +* Snowball stemmers have been upgraded {es-pull}114146[#114146] +* The 'german2' stemmer is now an alias for the 'german' snowball stemmer {es-pull}113614[#113614] +* The 'persian' analyzer has stemmer by default {es-pull}113482[#113482] (issue: {es-issue}113050[#113050]) +* The Korean dictionary for Nori has been updated {es-pull}114124[#114124] + +Cluster Coordination:: +* Remove unsupported legacy value for `discovery.type` {es-pull}112903[#112903] + +Highlighting:: +* Remove support for deprecated `force_source` highlighting parameter {es-pull}116943[#116943] + +Indices APIs:: +* Apply more strict parsing of actions in bulk API {es-pull}115923[#115923] +* Remove deprecated local attribute from alias APIs {es-pull}115393[#115393] + +Infra/REST API:: +* Output a consistent format when generating error json {es-pull}90529[#90529] (issue: {es-issue}89387[#89387]) + +Ingest Node:: +* Remove `ecs` option on `user_agent` processor {es-pull}116077[#116077] +* Remove ignored fallback option on GeoIP processor {es-pull}116112[#116112] + +Mapping:: +* Remove support for type, fields, `copy_to` and boost in metadata field definition {es-pull}116944[#116944] + +Search:: +* Remove legacy params from range query {es-pull}116970[#116970] + +Snapshot/Restore:: +* Remove deprecated `xpack.searchable.snapshot.allocate_on_rolling_restart` setting {es-pull}114202[#114202] [[bug-9.0.0]] [float] === Bug fixes -// -// Aggregations:: -// * Always check the parent breaker with zero bytes in `PreallocatedCircuitBreakerService` {es-pull}115181[#115181] -// * Force using the last centroid during merging {es-pull}111644[#111644] (issue: {es-issue}111065[#111065]) -// -// Authentication:: -// * Check for disabling own user in Put User API {es-pull}112262[#112262] (issue: {es-issue}90205[#90205]) -// * Expose cluster-state role mappings in APIs {es-pull}114951[#114951] -// -// Authorization:: -// * Fix DLS & FLS sometimes being enforced when it is disabled {es-pull}111915[#111915] (issue: {es-issue}94709[#94709]) -// * Fix DLS using runtime fields and synthetic source {es-pull}112341[#112341] -// -// CRUD:: -// * Don't fail retention lease sync actions due to capacity constraints {es-pull}109414[#109414] (issue: {es-issue}105926[#105926]) -// * Preserve thread context when waiting for segment generation in RTG {es-pull}114623[#114623] -// * Standardize error code when bulk body is invalid {es-pull}114869[#114869] -// -// Cluster Coordination:: -// * Ensure clean thread context in `MasterService` {es-pull}114512[#114512] -// -// Data streams:: -// * Adding support for data streams with a match-all template {es-pull}111311[#111311] (issue: {es-issue}111204[#111204]) -// * Exclude internal data streams from global retention {es-pull}112100[#112100] -// * Fix verbose get data stream API not requiring extra privileges {es-pull}112973[#112973] -// * OTel mappings: avoid metrics to be rejected when attributes are malformed {es-pull}114856[#114856] -// * [otel-data] Add more kubernetes aliases {es-pull}115429[#115429] -// * logs-apm.error-*: define log.level field as keyword {es-pull}112440[#112440] -// -// Distributed:: -// * Handle `InternalSendException` inline for non-forking handlers {es-pull}114375[#114375] -// -// EQL:: -// * Don't use a `BytesStreamOutput` to copy keys in `BytesRefBlockHash` {es-pull}114819[#114819] (issue: {es-issue}114599[#114599]) -// * Fix validation of TEXT fields with case insensitive comparison {es-pull}111238[#111238] (issue: {es-issue}111235[#111235]) -// -// ES|QL:: -// * ESQL: Add Values aggregation tests, fix `ConstantBytesRefBlock` memory handling {es-pull}111367[#111367] -// * ESQL: Align year diffing to the rest of the units in DATE_DIFF: chronological {es-pull}113103[#113103] (issue: {es-issue}112482[#112482]) -// * ESQL: Disable pushdown of WHERE past STATS {es-pull}115308[#115308] (issue: {es-issue}115281[#115281]) -// * ESQL: Fix CASE when conditions are multivalued {es-pull}112401[#112401] (issue: {es-issue}112359[#112359]) -// * ESQL: Fix Double operations returning infinite {es-pull}111064[#111064] (issue: {es-issue}111026[#111026]) -// * ESQL: Fix `REVERSE` with backspace character {es-pull}115245[#115245] (issues: {es-issue}114372[#114372], {es-issue}115227[#115227], {es-issue}115228[#115228]) -// * ESQL: Fix a bug in `MV_PERCENTILE` {es-pull}112218[#112218] (issues: {es-issue}112193[#112193], {es-issue}112180[#112180], {es-issue}112187[#112187], {es-issue}112188[#112188]) -// * ESQL: Fix filtered grouping on ords {es-pull}115312[#115312] (issue: {es-issue}114897[#114897]) -// * ESQL: Fix grammar changes around per agg filtering {es-pull}114848[#114848] -// * ESQL: Fix serialization during `can_match` {es-pull}111779[#111779] (issues: {es-issue}111701[#111701], {es-issue}111726[#111726]) -// * ESQL: Fix synthetic attribute pruning {es-pull}111413[#111413] (issue: {es-issue}105821[#105821]) -// * ESQL: don't lose the original casting error message {es-pull}111968[#111968] (issue: {es-issue}111967[#111967]) -// * ESQL: fix for missing indices error message {es-pull}111797[#111797] (issue: {es-issue}111712[#111712]) -// * ES|QL: Fix stats by constant expression {es-pull}114899[#114899] -// * ES|QL: Restrict sorting for `_source` and counter field types {es-pull}114638[#114638] (issues: {es-issue}114423[#114423], {es-issue}111976[#111976]) -// * ES|QL: better validation for GROK patterns {es-pull}110574[#110574] (issue: {es-issue}110533[#110533]) -// * ES|QL: better validation for RLIKE patterns {es-pull}112489[#112489] (issue: {es-issue}112485[#112485]) -// * ES|QL: better validation of GROK patterns {es-pull}112200[#112200] (issue: {es-issue}112111[#112111]) -// * Fix ST_CENTROID_AGG when no records are aggregated {es-pull}114888[#114888] (issue: {es-issue}106025[#106025]) -// * Fix TDigestState.read CB leaks {es-pull}114303[#114303] (issue: {es-issue}114194[#114194]) -// * Spatial search functions support multi-valued fields in compute engine {es-pull}112063[#112063] (issues: {es-issue}112102[#112102], {es-issue}112505[#112505], {es-issue}110830[#110830]) -// * [ES|QL] Check expression resolved before checking its data type in `ImplicitCasting` {es-pull}113314[#113314] (issue: {es-issue}113242[#113242]) -// * [ES|QL] Simplify patterns for subfields {es-pull}111118[#111118] -// * [ES|QL] Simplify syntax of named parameter for identifier and pattern {es-pull}115061[#115061] -// * [ES|QL] Skip validating remote cluster index names in parser {es-pull}114271[#114271] -// * [ES|QL] Use `RangeQuery` and String in `BinaryComparison` on datetime fields {es-pull}110669[#110669] (issue: {es-issue}107900[#107900]) -// * [ES|QL] add tests for stats by constant {es-pull}110593[#110593] (issue: {es-issue}105383[#105383]) -// * [ES|QL] make named parameter for identifier and pattern snapshot {es-pull}114784[#114784] -// * [ES|QL] validate `mv_sort` order {es-pull}110021[#110021] (issue: {es-issue}109910[#109910]) -// -// Geo:: -// * Fix cases of collections with one point {es-pull}111193[#111193] (issue: {es-issue}110982[#110982]) -// -// Health:: -// * Set `replica_unassigned_buffer_time` in constructor {es-pull}112612[#112612] -// -// ILM+SLM:: -// * Make `SnapshotLifecycleStats` immutable so `SnapshotLifecycleMetadata.EMPTY` isn't changed as side-effect {es-pull}111215[#111215] -// -// Indices APIs:: -// * Revert "Add `ResolvedExpression` wrapper" {es-pull}115317[#115317] -// -// Infra/Core:: -// * Fix max file size check to use `getMaxFileSize` {es-pull}113723[#113723] (issue: {es-issue}113705[#113705]) -// * Guard blob store local directory creation with `doPrivileged` {es-pull}115459[#115459] -// * Handle `BigInteger` in xcontent copy {es-pull}111937[#111937] (issue: {es-issue}111812[#111812]) -// * Report JVM stats for all memory pools (97046) {es-pull}115117[#115117] (issue: {es-issue}97046[#97046]) -// * `ByteArrayStreamInput:` Return -1 when there are no more bytes to read {es-pull}112214[#112214] -// -// Infra/Logging:: -// * Only emit product origin in deprecation log if present {es-pull}111683[#111683] (issue: {es-issue}81757[#81757]) -// -// Infra/Metrics:: -// * Make `randomInstantBetween` always return value in range [minInstant, `maxInstant]` {es-pull}114177[#114177] -// -// Infra/REST API:: -// * Fixed a `NullPointerException` in `_capabilities` API when the `path` parameter is null. {es-pull}113413[#113413] (issue: {es-issue}113413[#113413]) -// -// Infra/Settings:: -// * GET _cluster/settings with include_defaults returns the expected fallback value if defined in elasticsearch.yml {es-pull}110816[#110816] (issue: {es-issue}110815[#110815]) -// -// Ingest Node:: -// * Add warning headers for ingest pipelines containing special characters {es-pull}114837[#114837] (issue: {es-issue}104411[#104411]) -// * Fix IPinfo geolocation schema {es-pull}115147[#115147] -// * Fix `getDatabaseType` for unusual MMDBs {es-pull}112888[#112888] -// * Reducing error-level stack trace logging for normal events in `GeoIpDownloader` {es-pull}114924[#114924] -// -// License:: -// * Fix Start Trial API output acknowledgement header for features {es-pull}111740[#111740] (issue: {es-issue}111739[#111739]) -// * Fix `TokenService` always appearing used in Feature Usage {es-pull}112263[#112263] (issue: {es-issue}61956[#61956]) -// -// Logs:: -// * Do not expand dots when storing objects in ignored source {es-pull}113910[#113910] -// * Fix `ignore_above` handling in synthetic source when index level setting is used {es-pull}113570[#113570] (issue: {es-issue}113538[#113538]) -// * Fix synthetic source for flattened field when used with `ignore_above` {es-pull}113499[#113499] (issue: {es-issue}112044[#112044]) -// -// Machine Learning:: -// * Avoid `ModelAssignment` deadlock {es-pull}109684[#109684] -// * Fix NPE in Get Deployment Stats {es-pull}115404[#115404] -// * Fix bug in ML serverless autoscaling which prevented trained model updates from triggering a scale up {es-pull}110734[#110734] -// * Ignore unrecognized openai sse fields {es-pull}114715[#114715] -// * Mitigate IOSession timeouts {es-pull}115414[#115414] (issues: {es-issue}114385[#114385], {es-issue}114327[#114327], {es-issue}114105[#114105], {es-issue}114232[#114232]) -// * Prevent NPE if model assignment is removed while waiting to start {es-pull}115430[#115430] -// * Send mid-stream errors to users {es-pull}114549[#114549] -// * Temporarily return both `modelId` and `inferenceId` for GET /_inference until we migrate clients to only `inferenceId` {es-pull}111490[#111490] -// * Warn for model load failures if they have a status code <500 {es-pull}113280[#113280] -// * [Inference API] Remove unused Cohere rerank service settings fields in a BWC way {es-pull}110427[#110427] -// * [ML] Create Inference API will no longer return model_id and now only return inference_id {es-pull}112508[#112508] -// -// Mapping:: -// * Fix `MapperBuilderContext#isDataStream` when used in dynamic mappers {es-pull}110554[#110554] -// * Fix synthetic source field names for multi-fields {es-pull}112850[#112850] -// * Retrieve the source for objects and arrays in a separate parsing phase {es-pull}113027[#113027] (issue: {es-issue}112374[#112374]) -// * Two empty mappings now are created equally {es-pull}107936[#107936] (issue: {es-issue}107031[#107031]) -// -// Ranking:: -// * Fix MLTQuery handling of custom term frequencies {es-pull}110846[#110846] -// * Fix RRF validation for `rank_constant` < 1 {es-pull}112058[#112058] -// * Fix score count validation in reranker response {es-pull}111212[#111212] (issue: {es-issue}111202[#111202]) -// -// Search:: -// * Allow for querries on `_tier` to skip shards in the `can_match` phase {es-pull}114990[#114990] (issue: {es-issue}114910[#114910]) -// * Allow out of range term queries for numeric types {es-pull}112916[#112916] -// * Do not exclude empty arrays or empty objects in source filtering {es-pull}112250[#112250] (issue: {es-issue}109668[#109668]) -// * Fix synthetic source handling for `bit` type in `dense_vector` field {es-pull}114407[#114407] (issue: {es-issue}114402[#114402]) -// * Improve DateTime error handling and add some bad date tests {es-pull}112723[#112723] (issue: {es-issue}112190[#112190]) -// * Improve date expression/remote handling in index names {es-pull}112405[#112405] (issue: {es-issue}112243[#112243]) -// * Make "too many clauses" throw IllegalArgumentException to avoid 500s {es-pull}112678[#112678] (issue: {es-issue}112177[#112177]) -// * Make empty string searches be consistent with case (in)sensitivity {es-pull}110833[#110833] -// * Prevent flattening of ordered and unordered interval sources {es-pull}114234[#114234] -// * Remove needless forking to GENERIC in `TransportMultiSearchAction` {es-pull}110796[#110796] -// * Search/Mapping: KnnVectorQueryBuilder support for allowUnmappedFields {es-pull}107047[#107047] (issue: {es-issue}106846[#106846]) -// * Span term query to convert to match no docs when unmapped field is targeted {es-pull}113251[#113251] -// * Speedup `CanMatchPreFilterSearchPhase` constructor {es-pull}110860[#110860] -// * Updated Date Range to Follow Documentation When Assuming Missing Values {es-pull}112258[#112258] (issue: {es-issue}111484[#111484]) -// -// Security:: -// * Updated the transport CA name in Security Auto-Configuration. {es-pull}106520[#106520] (issue: {es-issue}106455[#106455]) -// -// Snapshot/Restore:: -// * Retry throttled snapshot deletions {es-pull}113237[#113237] -// -// TSDB:: -// * Implement `parseBytesRef` for `TimeSeriesRoutingHashFieldType` {es-pull}113373[#113373] (issue: {es-issue}112399[#112399]) -// -// Task Management:: -// * Improve handling of failure to create persistent task {es-pull}114386[#114386] -// -// Transform:: -// * Allow task canceling of validate API calls {es-pull}110951[#110951] -// * Include reason when no nodes are found {es-pull}112409[#112409] (issue: {es-issue}112404[#112404]) -// -// Vector Search:: -// * Fix dim validation for bit `element_type` {es-pull}114533[#114533] -// * Support semantic_text in object fields {es-pull}114601[#114601] (issue: {es-issue}114401[#114401]) -// -// Watcher:: -// * Truncating watcher history if it is too large {es-pull}111245[#111245] (issue: {es-issue}94745[#94745]) -// * Watch Next Run Interval Resets On Shard Move or Node Restart {es-pull}115102[#115102] (issue: {es-issue}111433[#111433]) -// -// [[deprecation-9.0.0]] -// [float] -// === Deprecations -// -// Analysis:: -// * Deprecate dutch_kp and lovins stemmer as they are removed in Lucene 10 {es-pull}113143[#113143] -// * deprecate `edge_ngram` side parameter {es-pull}110829[#110829] -// -// CRUD:: -// * Deprecate dot-prefixed indices and composable template index patterns {es-pull}112571[#112571] -// -// Machine Learning:: -// * [Inference API] Deprecate elser service {es-pull}113216[#113216] -// -// Search:: -// * Adding deprecation warnings for rrf using rank and `sub_searches` {es-pull}114854[#114854] -// * Deprecate legacy params from range query {es-pull}113286[#113286] -// -// [[enhancement-9.0.0]] -// [float] -// === Enhancements -// -// Aggregations:: -// * Account for `DelayedBucket` before reduction {es-pull}113013[#113013] -// * Add protection for OOM during aggregations partial reduction {es-pull}110520[#110520] -// * Deduplicate `BucketOrder` when deserializing {es-pull}112707[#112707] -// * Lower the memory footprint when creating `DelayedBucket` {es-pull}112519[#112519] -// * Reduce heap usage for `AggregatorsReducer` {es-pull}112874[#112874] -// * Remove reduce and `reduceContext` from `DelayedBucket` {es-pull}112547[#112547] -// -// Allocation:: -// * Add link to flood-stage watermark exception message {es-pull}111315[#111315] -// * Always allow rebalancing by default {es-pull}111015[#111015] -// * Only publish desired balance gauges on master {es-pull}115383[#115383] -// -// Application:: -// * [Profiling] add `container.id` field to event index template {es-pull}111969[#111969] -// -// Authorization:: -// * Add manage roles privilege {es-pull}110633[#110633] -// * Add privileges required for CDR misconfiguration features to work on AWS SecurityHub integration {es-pull}112574[#112574] -// * [Security Solution] Add `create_index` to `kibana_system` role for index/DS `.logs-endpoint.action.responses-*` {es-pull}115241[#115241] -// -// CRUD:: -// * Suppress merge-on-recovery for older indices {es-pull}113462[#113462] -// -// Codec:: -// * Remove zstd feature flag for index codec best compression {es-pull}112665[#112665] -// -// Data streams:: -// * Add 'verbose' flag retrieving `maximum_timestamp` for get data stream API {es-pull}112303[#112303] -// * Display effective retention in the relevant data stream APIs {es-pull}112019[#112019] -// * Expose global retention settings via data stream lifecycle API {es-pull}112210[#112210] -// * Make ecs@mappings work with OTel attributes {es-pull}111600[#111600] -// -// Distributed:: -// * Add link to Max Shards Per Node exception message {es-pull}110993[#110993] -// * Use Azure blob batch API to delete blobs in batches {es-pull}114566[#114566] -// -// EQL:: -// * ESQL: Delay construction of warnings {es-pull}114368[#114368] -// -// ES|QL:: -// * Add EXP ES|QL function {es-pull}110879[#110879] -// * Add `CircuitBreaker` to TDigest, Step 3: Connect with ESQL CB {es-pull}113387[#113387] -// * Add `CircuitBreaker` to TDigest, Step 4: Take into account shallow classes size {es-pull}113613[#113613] (issue: {es-issue}113916[#113916]) -// * Collect and display execution metadata for ES|QL cross cluster searches {es-pull}112595[#112595] (issue: {es-issue}112402[#112402]) -// * ESQL: Add support for multivalue fields in Arrow output {es-pull}114774[#114774] -// * ESQL: BUCKET: allow numerical spans as whole numbers {es-pull}111874[#111874] (issues: {es-issue}104646[#104646], {es-issue}109340[#109340], {es-issue}105375[#105375]) -// * ESQL: Have BUCKET generate friendlier intervals {es-pull}111879[#111879] (issue: {es-issue}110916[#110916]) -// * ESQL: Profile more timing information {es-pull}111855[#111855] -// * ESQL: Push down filters even in case of renames in Evals {es-pull}114411[#114411] -// * ESQL: Remove parent from `FieldAttribute` {es-pull}112881[#112881] -// * ESQL: Speed up CASE for some parameters {es-pull}112295[#112295] -// * ESQL: Speed up grouping by bytes {es-pull}114021[#114021] -// * ESQL: Support INLINESTATS grouped on expressions {es-pull}111690[#111690] -// * ESQL: Use less memory in listener {es-pull}114358[#114358] -// * ES|QL: Add support for cached strings in plan serialization {es-pull}112929[#112929] -// * ES|QL: add Telemetry API and track top functions {es-pull}111226[#111226] -// * ES|QL: add metrics for functions {es-pull}114620[#114620] -// * Enhance SORT push-down to Lucene to cover references to fields and ST_DISTANCE function {es-pull}112938[#112938] (issue: {es-issue}109973[#109973]) -// * Siem ea 9521 improve test {es-pull}111552[#111552] -// * Support multi-valued fields in compute engine for ST_DISTANCE {es-pull}114836[#114836] (issue: {es-issue}112910[#112910]) -// * [ESQL] Add `SPACE` function {es-pull}112350[#112350] -// * [ESQL] Add finish() elapsed time to aggregation profiling times {es-pull}113172[#113172] (issue: {es-issue}112950[#112950]) -// * [ESQL] Make query wrapped by `SingleValueQuery` cacheable {es-pull}110116[#110116] -// * [ES|QL] Add hypot function {es-pull}114382[#114382] -// * [ES|QL] Cast mixed numeric types to a common numeric type for Coalesce and In at Analyzer {es-pull}111917[#111917] (issue: {es-issue}111486[#111486]) -// * [ES|QL] Combine Disjunctive CIDRMatch {es-pull}111501[#111501] (issue: {es-issue}105143[#105143]) -// * [ES|QL] Create `Range` in `PushFiltersToSource` for qualified pushable filters on the same field {es-pull}111437[#111437] -// * [ES|QL] Name parameter with leading underscore {es-pull}111950[#111950] (issue: {es-issue}111821[#111821]) -// * [ES|QL] Named parameter for field names and field name patterns {es-pull}112905[#112905] -// * [ES|QL] Validate index name in parser {es-pull}112081[#112081] -// * [ES|QL] add reverse function {es-pull}113297[#113297] -// * [ES|QL] explicit cast a string literal to `date_period` and `time_duration` in arithmetic operations {es-pull}109193[#109193] -// -// Experiences:: -// * Integrate IBM watsonx to Inference API for text embeddings {es-pull}111770[#111770] -// -// Geo:: -// * Add support for spatial relationships in point field mapper {es-pull}112126[#112126] -// * Small performance improvement in h3 library {es-pull}113385[#113385] -// * Support docvalues only query in shape field {es-pull}112199[#112199] -// -// Health:: -// * (API) Cluster Health report `unassigned_primary_shards` {es-pull}112024[#112024] -// * Do not treat replica as unassigned if primary recently created and unassigned time is below a threshold {es-pull}112066[#112066] -// * Increase `replica_unassigned_buffer_time` default from 3s to 5s {es-pull}112834[#112834] -// -// ILM+SLM:: -// * ILM: Add `total_shards_per_node` setting to searchable snapshot {es-pull}112972[#112972] (issue: {es-issue}112261[#112261]) -// * PUT slm policy should only increase version if actually changed {es-pull}111079[#111079] -// * Preserve Step Info Across ILM Auto Retries {es-pull}113187[#113187] -// * Register SLM run before snapshotting to save stats {es-pull}110216[#110216] -// * SLM interval schedule followup - add back `getFieldName` style getters {es-pull}112123[#112123] -// -// Infra/Circuit Breakers:: -// * Add link to Circuit Breaker "Data too large" exception message {es-pull}113561[#113561] -// -// Infra/Core:: -// * Add nanos support to `ZonedDateTime` serialization {es-pull}111689[#111689] (issue: {es-issue}68292[#68292]) -// * Extend logging for dropped warning headers {es-pull}111624[#111624] (issue: {es-issue}90527[#90527]) -// * Give the kibana system user permission to read security entities {es-pull}114363[#114363] -// -// Infra/Metrics:: -// * Add `TaskManager` to `pluginServices` {es-pull}112687[#112687] -// * Add `ensureGreen` test method for use with `adminClient` {es-pull}113425[#113425] -// -// Infra/REST API:: -// * Optimize the loop processing of URL decoding {es-pull}110237[#110237] (issue: {es-issue}110235[#110235]) -// -// Infra/Scripting:: -// * Add a `mustache.max_output_size_bytes` setting to limit the length of results from mustache scripts {es-pull}114002[#114002] -// * Expose `HexFormat` in Painless {es-pull}112412[#112412] -// -// Infra/Settings:: -// * Improve exception message for bad environment variable placeholders in settings {es-pull}114552[#114552] (issue: {es-issue}110858[#110858]) -// * Reprocess operator file settings when settings service starts, due to node restart or master node change {es-pull}114295[#114295] -// -// Ingest Node:: -// * Add `size_in_bytes` to enrich cache stats {es-pull}110578[#110578] -// * Add support for templates when validating mappings in the simulate ingest API {es-pull}111161[#111161] -// * Adding `index_template_substitutions` to the simulate ingest API {es-pull}114128[#114128] -// * Adding component template substitutions to the simulate ingest API {es-pull}113276[#113276] -// * Adding mapping validation to the simulate ingest API {es-pull}110606[#110606] -// * Adding support for additional mapping to simulate ingest API {es-pull}114742[#114742] -// * Adding support for simulate ingest mapping adddition for indices with mappings that do not come from templates {es-pull}115359[#115359] -// * Adds example plugin for custom ingest processor {es-pull}112282[#112282] (issue: {es-issue}111539[#111539]) -// * Fix unnecessary mustache template evaluation {es-pull}110986[#110986] (issue: {es-issue}110191[#110191]) -// * Listing all available databases in the _ingest/geoip/database API {es-pull}113498[#113498] -// * Make enrich cache based on memory usage {es-pull}111412[#111412] (issue: {es-issue}106081[#106081]) -// * Tag redacted document in ingest metadata {es-pull}113552[#113552] -// * Verify Maxmind database types in the geoip processor {es-pull}114527[#114527] -// -// Logs:: -// * Add validation for synthetic source mode in logs mode indices {es-pull}110677[#110677] -// * Store original source for keywords using a normalizer {es-pull}112151[#112151] -// -// Machine Learning:: -// * Add Completion Inference API for Alibaba Cloud AI Search Model {es-pull}112512[#112512] -// * Add DeBERTa-V2/V3 tokenizer {es-pull}111852[#111852] -// * Add Streaming Inference spec {es-pull}113812[#113812] -// * Add chunking settings configuration to `CohereService,` `AmazonBedrockService,` and `AzureOpenAiService` {es-pull}113897[#113897] -// * Add chunking settings configuration to `ElasticsearchService/ELSER` {es-pull}114429[#114429] -// * Add custom rule parameters to force time shift {es-pull}110974[#110974] -// * Adding chunking settings to `GoogleVertexAiService,` `AzureAiStudioService,` and `AlibabaCloudSearchService` {es-pull}113981[#113981] -// * Adding chunking settings to `MistralService,` `GoogleAiStudioService,` and `HuggingFaceService` {es-pull}113623[#113623] -// * Adds a new Inference API for streaming responses back to the user. {es-pull}113158[#113158] -// * Create `StreamingHttpResultPublisher` {es-pull}112026[#112026] -// * Create an ml node inference endpoint referencing an existing model {es-pull}114750[#114750] -// * Default inference endpoint for ELSER {es-pull}113873[#113873] -// * Default inference endpoint for the multilingual-e5-small model {es-pull}114683[#114683] -// * Enable OpenAI Streaming {es-pull}113911[#113911] -// * Filter empty task settings objects from the API response {es-pull}114389[#114389] -// * Increase default `queue_capacity` to 10_000 and decrease max `queue_capacity` to 100_000 {es-pull}115041[#115041] -// * Migrate Inference to `ChunkedToXContent` {es-pull}111655[#111655] -// * Register Task while Streaming {es-pull}112369[#112369] -// * Server-Sent Events for Inference response {es-pull}112565[#112565] -// * Stream Anthropic Completion {es-pull}114321[#114321] -// * Stream Azure Completion {es-pull}114464[#114464] -// * Stream Bedrock Completion {es-pull}114732[#114732] -// * Stream Cohere Completion {es-pull}114080[#114080] -// * Stream Google Completion {es-pull}114596[#114596] -// * Stream OpenAI Completion {es-pull}112677[#112677] -// * Support sparse embedding models in the elasticsearch inference service {es-pull}112270[#112270] -// * Switch default chunking strategy to sentence {es-pull}114453[#114453] -// * Upgrade to AWS SDK v2 {es-pull}114309[#114309] (issue: {es-issue}110590[#110590]) -// * Use the same chunking configurations for models in the Elasticsearch service {es-pull}111336[#111336] -// * Validate streaming HTTP Response {es-pull}112481[#112481] -// * Wait for allocation on scale up {es-pull}114719[#114719] -// * [Inference API] Add Alibaba Cloud AI Search Model support to Inference API {es-pull}111181[#111181] -// * [Inference API] Add Docs for AlibabaCloud AI Search Support for the Inference API {es-pull}111181[#111181] -// * [Inference API] Introduce Update API to change some aspects of existing inference endpoints {es-pull}114457[#114457] -// * [Inference API] Prevent inference endpoints from being deleted if they are referenced by semantic text {es-pull}110399[#110399] -// * [Inference API] alibabacloud ai search service support chunk infer to support semantic_text field {es-pull}110399[#110399] -// -// Mapping:: -// * Add Field caps support for Semantic Text {es-pull}111809[#111809] -// * Add Lucene segment-level fields stats {es-pull}111123[#111123] -// * Add Search Inference ID To Semantic Text Mapping {es-pull}113051[#113051] -// * Add object param for keeping synthetic source {es-pull}113690[#113690] -// * Add support for multi-value dimensions {es-pull}112645[#112645] (issue: {es-issue}110387[#110387]) -// * Allow dimension fields to have multiple values in standard and logsdb index mode {es-pull}112345[#112345] (issues: {es-issue}112232[#112232], {es-issue}112239[#112239]) -// * Allow fields with dots in sparse vector field mapper {es-pull}111981[#111981] (issue: {es-issue}109118[#109118]) -// * Allow querying `index_mode` {es-pull}110676[#110676] -// * Configure keeping source in `FieldMapper` {es-pull}112706[#112706] -// * Control storing array source with index setting {es-pull}112397[#112397] -// * Introduce mode `subobjects=auto` for objects {es-pull}110524[#110524] -// * Update `semantic_text` field to support indexing numeric and boolean data types {es-pull}111284[#111284] -// * Use ELSER By Default For Semantic Text {es-pull}113563[#113563] -// * Use fallback synthetic source for `copy_to` and doc_values: false cases {es-pull}112294[#112294] (issues: {es-issue}110753[#110753], {es-issue}110038[#110038], {es-issue}109546[#109546]) -// -// Network:: -// * Add links to network disconnect troubleshooting {es-pull}112330[#112330] -// -// Ranking:: -// * Add timeout and cancellation check to rescore phase {es-pull}115048[#115048] -// -// Recovery:: -// * Trigger merges after recovery {es-pull}113102[#113102] -// -// Relevance:: -// * Add a query rules tester API call {es-pull}114168[#114168] -// -// Search:: -// * Add initial support for `semantic_text` field type {es-pull}113920[#113920] -// * Add more `dense_vector` details for cluster stats field stats {es-pull}113607[#113607] -// * Add range and regexp Intervals {es-pull}111465[#111465] -// * Adding support for `allow_partial_search_results` in PIT {es-pull}111516[#111516] -// * Allow incubating Panama Vector in simdvec, and add vectorized `ipByteBin` {es-pull}112933[#112933] -// * Avoid using concurrent collector manager in `LuceneChangesSnapshot` {es-pull}113816[#113816] -// * Bool query early termination should also consider `must_not` clauses {es-pull}115031[#115031] -// * Deduplicate Kuromoji User Dictionary {es-pull}112768[#112768] -// * Multi term intervals: increase max_expansions {es-pull}112826[#112826] (issue: {es-issue}110491[#110491]) -// * Search coordinator uses `event.ingested` in cluster state to do rewrites {es-pull}111523[#111523] -// * Update cluster stats for retrievers {es-pull}114109[#114109] -// -// Security:: -// * (logger) change from error to warn for short circuiting user {es-pull}112895[#112895] -// * Add asset criticality indices for `kibana_system_user` {es-pull}113588[#113588] -// * Add tier preference to security index settings allowlist {es-pull}111818[#111818] -// * [Service Account] Add `AutoOps` account {es-pull}111316[#111316] -// -// Snapshot/Restore:: -// * Add `max_multipart_parts` setting to S3 repository {es-pull}113989[#113989] -// * Add support for Azure Managed Identity {es-pull}111344[#111344] -// * Add telemetry for repository usage {es-pull}112133[#112133] -// * Add workaround for missing shard gen blob {es-pull}112337[#112337] -// * Clean up dangling S3 multipart uploads {es-pull}111955[#111955] (issues: {es-issue}101169[#101169], {es-issue}44971[#44971]) -// * Execute shard snapshot tasks in shard-id order {es-pull}111576[#111576] (issue: {es-issue}108739[#108739]) -// * Include account name in Azure settings exceptions {es-pull}111274[#111274] -// * Introduce repository integrity verification API {es-pull}112348[#112348] (issue: {es-issue}52622[#52622]) -// * Retry `S3BlobContainer#getRegister` on all exceptions {es-pull}114813[#114813] -// * Track shard snapshot progress during node shutdown {es-pull}112567[#112567] -// -// Stats:: -// * Track search and fetch failure stats {es-pull}113988[#113988] -// -// TSDB:: -// * Add support for boolean dimensions {es-pull}111457[#111457] (issue: {es-issue}111338[#111338]) -// * Stop iterating over all fields to extract @timestamp value {es-pull}110603[#110603] (issue: {es-issue}92297[#92297]) -// * Support booleans in routing path {es-pull}111445[#111445] -// -// Vector Search:: -// * Dense vector field types updatable for int4 {es-pull}110928[#110928] -// * Use native scalar scorer for int8_flat index {es-pull}111071[#111071] -// -// [[feature-9.0.0]] -// [float] -// === New features -// -// Data streams:: -// * Introduce global retention in data stream lifecycle. {es-pull}111972[#111972] -// * X-pack/plugin/otel: introduce x-pack-otel plugin {es-pull}111091[#111091] -// -// ES|QL:: -// * Add ESQL match function {es-pull}113374[#113374] -// * ESQL: Add `MV_PSERIES_WEIGHTED_SUM` for score calculations used by security solution {es-pull}109017[#109017] -// * ESQL: Add async ID and `is_running` headers to ESQL async query {es-pull}111840[#111840] -// * ESQL: Add boolean support to Max and Min aggs {es-pull}110527[#110527] -// * ESQL: Add boolean support to TOP aggregation {es-pull}110718[#110718] -// * ESQL: Added `mv_percentile` function {es-pull}111749[#111749] (issue: {es-issue}111591[#111591]) -// * ESQL: INLINESTATS {es-pull}109583[#109583] (issue: {es-issue}107589[#107589]) -// * ESQL: Introduce per agg filter {es-pull}113735[#113735] -// * ESQL: Strings support for MAX and MIN aggregations {es-pull}111544[#111544] -// * ESQL: Support IP fields in MAX and MIN aggregations {es-pull}110921[#110921] -// * ESQL: TOP aggregation IP support {es-pull}111105[#111105] -// * ESQL: TOP support for strings {es-pull}113183[#113183] (issue: {es-issue}109849[#109849]) -// * ESQL: `mv_median_absolute_deviation` function {es-pull}112055[#112055] (issue: {es-issue}111590[#111590]) -// * Remove snapshot build restriction for match and qstr functions {es-pull}114482[#114482] -// * Search in ES|QL: Add MATCH operator {es-pull}110971[#110971] -// -// ILM+SLM:: -// * SLM Interval based scheduling {es-pull}110847[#110847] -// -// Inference:: -// * EIS integration {es-pull}111154[#111154] -// -// Ingest Node:: -// * Add a `terminate` ingest processor {es-pull}114157[#114157] (issue: {es-issue}110218[#110218]) -// -// Machine Learning:: -// * Inference autoscaling {es-pull}109667[#109667] -// * Telemetry for inference adaptive allocations {es-pull}110630[#110630] -// -// Relevance:: -// * [Query rules] Add `exclude` query rule type {es-pull}111420[#111420] -// -// Search:: -// * Async search: Add ID and "is running" http headers {es-pull}112431[#112431] (issue: {es-issue}109576[#109576]) -// * Cross-cluster search telemetry {es-pull}113825[#113825] -// -// Vector Search:: -// * Adding new bbq index types behind a feature flag {es-pull}114439[#114439] + +Aggregations:: +* Handle with `illegalArgumentExceptions` negative values in HDR percentile aggregations {es-pull}116174[#116174] (issue: {es-issue}115777[#115777]) + +Analysis:: +* Adjust analyze limit exception to be a `bad_request` {es-pull}116325[#116325] + +CCS:: +* Fix long metric deserialize & add - auto-resize needs to be set manually {es-pull}117105[#117105] (issue: {es-issue}116914[#116914]) + +CRUD:: +* Preserve thread context when waiting for segment generation in RTG {es-pull}114623[#114623] +* Standardize error code when bulk body is invalid {es-pull}114869[#114869] + +Data streams:: +* Add missing header in `put_data_lifecycle` rest-api-spec {es-pull}116292[#116292] + +EQL:: +* Don't use a `BytesStreamOutput` to copy keys in `BytesRefBlockHash` {es-pull}114819[#114819] (issue: {es-issue}114599[#114599]) + +ES|QL:: +* Added stricter range type checks and runtime warnings for ENRICH {es-pull}115091[#115091] (issues: {es-issue}107357[#107357], {es-issue}116799[#116799]) +* Don't return TEXT type for functions that take TEXT {es-pull}114334[#114334] (issues: {es-issue}111537[#111537], {es-issue}114333[#114333]) +* ESQL: Fix sorts containing `_source` {es-pull}116980[#116980] (issue: {es-issue}116659[#116659]) +* ESQL: fix the column position in errors {es-pull}117153[#117153] +* ES|QL: Fix stats by constant expression {es-pull}114899[#114899] +* Fix NPE in `EnrichLookupService` on mixed clusters with <8.14 versions {es-pull}116583[#116583] (issues: {es-issue}116529[#116529], {es-issue}116544[#116544]) +* Fix TDigestState.read CB leaks {es-pull}114303[#114303] (issue: {es-issue}114194[#114194]) +* Fixing remote ENRICH by pushing the Enrich inside `FragmentExec` {es-pull}114665[#114665] (issue: {es-issue}105095[#105095]) +* Use `SearchStats` instead of field.isAggregatable in data node planning {es-pull}115744[#115744] (issue: {es-issue}115737[#115737]) +* [ESQL] Fix Binary Comparisons on Date Nanos {es-pull}116346[#116346] +* [ES|QL] To_DatePeriod and To_TimeDuration return better error messages on `union_type` fields {es-pull}114934[#114934] + +Infra/CLI:: +* Fix NPE on plugin sync {es-pull}115640[#115640] (issue: {es-issue}114818[#114818]) + +Infra/Metrics:: +* Make `randomInstantBetween` always return value in range [minInstant, `maxInstant]` {es-pull}114177[#114177] + +Infra/REST API:: +* Fixed a `NullPointerException` in `_capabilities` API when the `path` parameter is null. {es-pull}113413[#113413] (issue: {es-issue}113413[#113413]) + +Infra/Settings:: +* Don't allow secure settings in YML config (109115) {es-pull}115779[#115779] (issue: {es-issue}109115[#109115]) + +Ingest Node:: +* Add warning headers for ingest pipelines containing special characters {es-pull}114837[#114837] (issue: {es-issue}104411[#104411]) +* Reducing error-level stack trace logging for normal events in `GeoIpDownloader` {es-pull}114924[#114924] + +Logs:: +* Always check if index mode is logsdb {es-pull}116922[#116922] +* Prohibit changes to index mode, source, and sort settings during resize {es-pull}115812[#115812] + +Machine Learning:: +* Fix bug in ML autoscaling when some node info is unavailable {es-pull}116650[#116650] +* Fix deberta tokenizer bug caused by bug in normalizer {es-pull}117189[#117189] +* Hides `hugging_face_elser` service from the `GET _inference/_services API` {es-pull}116664[#116664] (issue: {es-issue}116644[#116644]) +* Mitigate IOSession timeouts {es-pull}115414[#115414] (issues: {es-issue}114385[#114385], {es-issue}114327[#114327], {es-issue}114105[#114105], {es-issue}114232[#114232]) +* Propagate scoring function through random sampler {es-pull}116957[#116957] (issue: {es-issue}110134[#110134]) +* Update Deberta tokenizer {es-pull}116358[#116358] +* Wait for up to 2 seconds for yellow status before starting search {es-pull}115938[#115938] (issues: {es-issue}107777[#107777], {es-issue}105955[#105955], {es-issue}107815[#107815], {es-issue}112191[#112191]) + +Mapping:: +* Change synthetic source logic for `constant_keyword` {es-pull}117182[#117182] (issue: {es-issue}117083[#117083]) +* Ignore conflicting fields during dynamic mapping update {es-pull}114227[#114227] (issue: {es-issue}114228[#114228]) + +Network:: +* Use underlying `ByteBuf` `refCount` for `ReleasableBytesReference` {es-pull}116211[#116211] + +Ranking:: +* Propagating nested `inner_hits` to the parent compound retriever {es-pull}116408[#116408] (issue: {es-issue}116397[#116397]) + +Relevance:: +* Fix handling of bulk requests with semantic text fields and delete ops {es-pull}116942[#116942] + +Search:: +* Catch and handle disconnect exceptions in search {es-pull}115836[#115836] +* Fields caps does not honour ignore_unavailable {es-pull}116021[#116021] (issue: {es-issue}107767[#107767]) +* Fix handling of time exceeded exception in fetch phase {es-pull}116676[#116676] +* Fix leak in `DfsQueryPhase` and introduce search disconnect stress test {es-pull}116060[#116060] (issue: {es-issue}115056[#115056]) +* Inconsistency in the _analyzer api when the index is not included {es-pull}115930[#115930] +* Semantic text simple partial update {es-pull}116478[#116478] +* Updated Date Range to Follow Documentation When Assuming Missing Values {es-pull}112258[#112258] (issue: {es-issue}111484[#111484]) +* Validate missing shards after the coordinator rewrite {es-pull}116382[#116382] +* _validate does not honour ignore_unavailable {es-pull}116656[#116656] (issue: {es-issue}116594[#116594]) + +Snapshot/Restore:: +* Retry throttled snapshot deletions {es-pull}113237[#113237] + +Vector Search:: +* Update Semantic Query To Handle Zero Size Responses {es-pull}116277[#116277] (issue: {es-issue}116083[#116083]) + +Watcher:: +* Watch Next Run Interval Resets On Shard Move or Node Restart {es-pull}115102[#115102] (issue: {es-issue}111433[#111433]) + +[[deprecation-9.0.0]] +[float] +=== Deprecations + +Ingest Node:: +* Fix `_type` deprecation on simulate pipeline API {es-pull}116259[#116259] + +Machine Learning:: +* [Inference API] Deprecate elser service {es-pull}113216[#113216] + +Mapping:: +* Deprecate `_source.mode` in mappings {es-pull}116689[#116689] + +[[enhancement-9.0.0]] +[float] +=== Enhancements + +Allocation:: +* Only publish desired balance gauges on master {es-pull}115383[#115383] + +Authorization:: +* Add a `monitor_stats` privilege and allow that privilege for remote cluster privileges {es-pull}114964[#114964] +* [Security Solution] Add `create_index` to `kibana_system` role for index/DS `.logs-endpoint.action.responses-*` {es-pull}115241[#115241] + +CRUD:: +* Suppress merge-on-recovery for older indices {es-pull}113462[#113462] + +Data streams:: +* Adding a deprecation info API warning for data streams with old indices {es-pull}116447[#116447] +* Apm-data: disable date_detection for all apm data streams {es-pull}116995[#116995] + +Distributed:: +* Metrics for incremental bulk splits {es-pull}116765[#116765] +* Use Azure blob batch API to delete blobs in batches {es-pull}114566[#114566] + +ES|QL:: +* Add ES|QL `bit_length` function {es-pull}115792[#115792] +* ESQL: Honor skip_unavailable setting for nonmatching indices errors at planning time {es-pull}116348[#116348] (issue: {es-issue}114531[#114531]) +* ESQL: Remove parent from `FieldAttribute` {es-pull}112881[#112881] +* ESQL: extract common filter from aggs {es-pull}115678[#115678] +* ESQL: optimise aggregations filtered by false/null into evals {es-pull}115858[#115858] +* ES|QL CCS uses `skip_unavailable` setting for handling disconnected remote clusters {es-pull}115266[#115266] (issue: {es-issue}114531[#114531]) +* ES|QL: add metrics for functions {es-pull}114620[#114620] +* Esql Enable Date Nanos (tech preview) {es-pull}117080[#117080] +* Support partial sort fields in TopN pushdown {es-pull}116043[#116043] (issue: {es-issue}114515[#114515]) +* [ES|QL] Implicit casting string literal to intervals {es-pull}115814[#115814] (issue: {es-issue}115352[#115352]) + +Health:: +* Increase `replica_unassigned_buffer_time` default from 3s to 5s {es-pull}112834[#112834] + +Indices APIs:: +* Ensure class resource stream is closed in `ResourceUtils` {es-pull}116437[#116437] + +Inference:: +* Add version prefix to Inference Service API path {es-pull}117095[#117095] + +Infra/Circuit Breakers:: +* Add link to Circuit Breaker "Data too large" exception message {es-pull}113561[#113561] + +Infra/Core:: +* Support for unsigned 64 bit numbers in Cpu stats {es-pull}114681[#114681] (issue: {es-issue}112274[#112274]) + +Infra/Metrics:: +* Add `ensureGreen` test method for use with `adminClient` {es-pull}113425[#113425] + +Infra/Scripting:: +* Add a `mustache.max_output_size_bytes` setting to limit the length of results from mustache scripts {es-pull}114002[#114002] + +Ingest Node:: +* Add postal_code support to the City and Enterprise databases {es-pull}114193[#114193] +* Add support for registered country fields for maxmind geoip databases {es-pull}114521[#114521] +* Adding support for additional mapping to simulate ingest API {es-pull}114742[#114742] +* Adding support for simulate ingest mapping adddition for indices with mappings that do not come from templates {es-pull}115359[#115359] +* Support IPinfo database configurations {es-pull}114548[#114548] +* Support more maxmind fields in the geoip processor {es-pull}114268[#114268] + +Logs:: +* Add logsdb telemetry {es-pull}115994[#115994] +* Add num docs and size to logsdb telemetry {es-pull}116128[#116128] +* Feature: re-structure document ID generation favoring _id inverted index compression {es-pull}104683[#104683] + +Machine Learning:: +* Add DeBERTa-V2/V3 tokenizer {es-pull}111852[#111852] +* Add special case for elastic reranker in inference API {es-pull}116962[#116962] +* Adding inference endpoint validation for `AzureAiStudioService` {es-pull}113713[#113713] +* Adds support for `input_type` field to Vertex inference service {es-pull}116431[#116431] +* Enable built-in Inference Endpoints and default for Semantic Text {es-pull}116931[#116931] +* Increase default `queue_capacity` to 10_000 and decrease max `queue_capacity` to 100_000 {es-pull}115041[#115041] +* Inference duration and error metrics {es-pull}115876[#115876] +* Remove all mentions of eis and gateway and deprecate flags that do {es-pull}116692[#116692] +* [Inference API] Add API to get configuration of inference services {es-pull}114862[#114862] +* [Inference API] Improve chunked results error message {es-pull}115807[#115807] + +Network:: +* Allow http unsafe buffers by default {es-pull}116115[#116115] + +Recovery:: +* Attempt to clean up index before remote transfer {es-pull}115142[#115142] (issue: {es-issue}104473[#104473]) +* Trigger merges after recovery {es-pull}113102[#113102] + +Reindex:: +* Change Reindexing metrics unit from millis to seconds {es-pull}115721[#115721] + +Relevance:: +* Add query rules retriever {es-pull}114855[#114855] +* Add tracking for query rule types {es-pull}116357[#116357] + +Search:: +* Add Search Phase APM metrics {es-pull}113194[#113194] +* Add `docvalue_fields` Support for `dense_vector` Fields {es-pull}114484[#114484] (issue: {es-issue}108470[#108470]) +* Add initial support for `semantic_text` field type {es-pull}113920[#113920] +* Adds access to flags no_sub_matches and no_overlapping_matches to hyphenation-decompounder-tokenfilter {es-pull}115459[#115459] (issue: {es-issue}97849[#97849]) +* Better sizing `BytesRef` for Strings in Queries {es-pull}115655[#115655] +* Enable `_tier` based coordinator rewrites for all indices (not just mounted indices) {es-pull}115797[#115797] +* Only aggregations require at least one shard request {es-pull}115314[#115314] + +Security:: +* Add refresh `.security` index call between security migrations {es-pull}114879[#114879] + +Snapshot/Restore:: +* Improve message about insecure S3 settings {es-pull}116915[#116915] +* Retry `S3BlobContainer#getRegister` on all exceptions {es-pull}114813[#114813] +* Split searchable snapshot into multiple repo operations {es-pull}116918[#116918] +* Track shard snapshot progress during node shutdown {es-pull}112567[#112567] + +Vector Search:: +* Add support for bitwise inner-product in painless {es-pull}116082[#116082] + +[[feature-9.0.0]] +[float] +=== New features + +Data streams:: +* Add default ILM policies and switch to ILM for apm-data plugin {es-pull}115687[#115687] + +ES|QL:: +* Add support for `BYTE_LENGTH` scalar function {es-pull}116591[#116591] +* Esql/lookup join grammar {es-pull}116515[#116515] +* Remove snapshot build restriction for match and qstr functions {es-pull}114482[#114482] + +Search:: +* ESQL - Add match operator (:) {es-pull}116819[#116819] [[upgrade-9.0.0]] [float] === Upgrades -// -// Infra/Core:: -// * Upgrade xcontent to Jackson 2.17.0 {es-pull}111948[#111948] -// * Upgrade xcontent to Jackson 2.17.2 {es-pull}112320[#112320] -// -// Infra/Metrics:: -// * Update APM Java Agent to support JDK 23 {es-pull}115194[#115194] (issues: {es-issue}115101[#115101], {es-issue}115100[#115100]) -// -// Search:: -// * Upgrade to Lucene 10 {es-pull}114741[#114741] -// * Upgrade to Lucene 9.12 {es-pull}113333[#113333] -// -// Snapshot/Restore:: -// * Upgrade Azure SDK {es-pull}111225[#111225] -// * Upgrade `repository-azure` dependencies {es-pull}112277[#112277] + +Search:: +* Upgrade to Lucene 10 {es-pull}114741[#114741] diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index edecd4f727583..b87081639c684 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -1,6 +1,3 @@ -// THIS IS A GENERATED FILE. DO NOT EDIT DIRECTLY. -// The content generated here are is not correct and most has been manually commented out until it can be fixed. -// See ES-9931 for more details. [[release-highlights]] == What's new in {minor-version} @@ -12,163 +9,14 @@ For detailed information about this release, see the <> and <>. endif::[] -// -// // tag::notable-highlights[] -// -// [discrete] -// [[esql_inlinestats]] -// === ESQL: INLINESTATS -// This adds the `INLINESTATS` command to ESQL which performs a STATS and -// then enriches the results into the output stream. So, this query: -// -// [source,esql] -// ---- -// FROM test -// | INLINESTATS m=MAX(a * b) BY b -// | WHERE m == a * b -// | SORT a DESC, b DESC -// | LIMIT 3 -// ---- -// -// Produces output like: -// -// | a | b | m | -// | --- | --- | ----- | -// | 99 | 999 | 98901 | -// | 99 | 998 | 98802 | -// | 99 | 997 | 98703 | -// -// {es-pull}109583[#109583] -// -// [discrete] -// [[always_allow_rebalancing_by_default]] -// === Always allow rebalancing by default -// In earlier versions of {es} the `cluster.routing.allocation.allow_rebalance` setting defaults to -// `indices_all_active` which blocks all rebalancing moves while the cluster is in `yellow` or `red` health. This was -// appropriate for the legacy allocator which might do too many rebalancing moves otherwise. Today's allocator has -// better support for rebalancing a cluster that is not in `green` health, and expects to be able to rebalance some -// shards away from over-full nodes to avoid allocating shards to undesirable locations in the first place. From -// version 8.16 `allow_rebalance` setting defaults to `always` unless the legacy allocator is explicitly enabled. -// -// {es-pull}111015[#111015] -// -// [discrete] -// [[add_global_retention_in_data_stream_lifecycle]] -// === Add global retention in data stream lifecycle -// Data stream lifecycle now supports configuring retention on a cluster level, -// namely global retention. Global retention \nallows us to configure two different -// retentions: -// -// - `data_streams.lifecycle.retention.default` is applied to all data streams managed -// by the data stream lifecycle that do not have retention defined on the data stream level. -// - `data_streams.lifecycle.retention.max` is applied to all data streams managed by the -// data stream lifecycle and it allows any data stream \ndata to be deleted after the `max_retention` has passed. -// -// {es-pull}111972[#111972] -// -// [discrete] -// [[enable_zstandard_compression_for_indices_with_index_codec_set_to_best_compression]] -// === Enable ZStandard compression for indices with index.codec set to best_compression -// Before DEFLATE compression was used to compress stored fields in indices with index.codec index setting set to -// best_compression, with this change ZStandard is used as compression algorithm to stored fields for indices with -// index.codec index setting set to best_compression. The usage ZStandard results in less storage usage with a -// similar indexing throughput depending on what options are used. Experiments with indexing logs have shown that -// ZStandard offers ~12% lower storage usage and a ~14% higher indexing throughput compared to DEFLATE. -// -// {es-pull}112665[#112665] -// -// [discrete] -// [[esql_introduce_per_agg_filter]] -// === ESQL: Introduce per agg filter -// Add support for aggregation scoped filters that work dynamically on the -// data in each group. -// -// [source,esql] -// ---- -// | STATS success = COUNT(*) WHERE 200 <= code AND code < 300, -// redirect = COUNT(*) WHERE 300 <= code AND code < 400, -// client_err = COUNT(*) WHERE 400 <= code AND code < 500, -// server_err = COUNT(*) WHERE 500 <= code AND code < 600, -// total_count = COUNT(*) -// ---- -// -// Implementation wise, the base AggregateFunction has been extended to -// allow a filter to be passed on. This is required to incorporate the -// filter as part of the aggregate equality/identity which would fail with -// the filter as an external component. -// As part of the process, the serialization for the existing aggregations -// had to be fixed so AggregateFunction implementations so that it -// delegates to their parent first. -// -// {es-pull}113735[#113735] -// -// // end::notable-highlights[] -// -// -// [discrete] -// [[esql_multi_value_fields_supported_in_geospatial_predicates]] -// === ESQL: Multi-value fields supported in Geospatial predicates -// Supporting multi-value fields in `WHERE` predicates is a challenge due to not knowing whether `ALL` or `ANY` -// of the values in the field should pass the predicate. -// For example, should the field `age:[10,30]` pass the predicate `WHERE age>20` or not? -// This ambiguity does not exist with the spatial predicates -// `ST_INTERSECTS` and `ST_DISJOINT`, because the choice between `ANY` or `ALL` -// is implied by the predicate itself. -// Consider a predicate checking a field named `location` against a test geometry named `shape`: -// -// * `ST_INTERSECTS(field, shape)` - true if `ANY` value can intersect the shape -// * `ST_DISJOINT(field, shape)` - true only if `ALL` values are disjoint from the shape -// -// This works even if the shape argument is itself a complex or compound geometry. -// -// Similar logic exists for `ST_CONTAINS` and `ST_WITHIN` predicates, but these are not as easily solved -// with `ANY` or `ALL`, because a collection of geometries contains another collection if each of the contained -// geometries is within at least one of the containing geometries. Evaluating this requires that the multi-value -// field is first combined into a single geometry before performing the predicate check. -// -// * `ST_CONTAINS(field, shape)` - true if the combined geometry contains the shape -// * `ST_WITHIN(field, shape)` - true if the combined geometry is within the shape -// -// {es-pull}112063[#112063] -// -// [discrete] -// [[enhance_sort_push_down_to_lucene_to_cover_references_to_fields_st_distance_function]] -// === Enhance SORT push-down to Lucene to cover references to fields and ST_DISTANCE function -// The most used and likely most valuable geospatial search query in Elasticsearch is the sorted proximity search, -// finding items within a certain distance of a point of interest and sorting the results by distance. -// This has been possible in ES|QL since 8.15.0, but the sorting was done in-memory, not pushed down to Lucene. -// Now the sorting is pushed down to Lucene, which results in a significant performance improvement. -// -// Queries that perform both filtering and sorting on distance are supported. For example: -// -// [source,esql] -// ---- -// FROM test -// | EVAL distance = ST_DISTANCE(location, TO_GEOPOINT("POINT(37.7749, -122.4194)")) -// | WHERE distance < 1000000 -// | SORT distance ASC, name DESC -// | LIMIT 10 -// ---- -// -// In addition, the support for sorting on EVAL expressions has been extended to cover references to fields: -// -// [source,esql] -// ---- -// FROM test -// | EVAL ref = field -// | SORT ref ASC -// | LIMIT 10 -// ---- -// -// {es-pull}112938[#112938] -// + +// The notable-highlights tag marks entries that +// should be featured in the Stack Installation and Upgrade Guide: +// tag::notable-highlights[] // [discrete] -// [[cross_cluster_search_telemetry]] -// === Cross-cluster search telemetry -// The cross-cluster search telemetry is collected when cross-cluster searches -// are performed, and is returned as "ccs" field in `_cluster/stats` output. -// It also add a new parameter `include_remotes=true` to the `_cluster/stats` API -// which will collect data from connected remote clusters. +// === Heading // -// {es-pull}113825[#113825] +// Description. +// end::notable-highlights[] + diff --git a/docs/reference/search/retriever.asciidoc b/docs/reference/search/retriever.asciidoc index 0da75ac30d2dd..86a81f1d155d2 100644 --- a/docs/reference/search/retriever.asciidoc +++ b/docs/reference/search/retriever.asciidoc @@ -704,5 +704,3 @@ Instead they are only allowed as elements of specific retrievers: * <> * <> * <> -* <> - diff --git a/docs/reference/search/search-your-data/highlighting.asciidoc b/docs/reference/search/search-your-data/highlighting.asciidoc index 7ee13d971b035..6a432e6104524 100644 --- a/docs/reference/search/search-your-data/highlighting.asciidoc +++ b/docs/reference/search/search-your-data/highlighting.asciidoc @@ -176,8 +176,6 @@ fragmenter:: Specifies how text should be broken up in highlight snippets: `simple` or `span`. Only valid for the `plain` highlighter. Defaults to `span`. -force_source:: deprecated; this parameter has no effect - `simple`::: Breaks up text into same-sized fragments. `span`::: Breaks up text into same-sized fragments, but tries to avoid breaking up text between highlighted terms. This is helpful when you're diff --git a/docs/reference/search/search-your-data/search-your-data.asciidoc b/docs/reference/search/search-your-data/search-your-data.asciidoc index 82541412db4bd..9ef1ae0ebc59b 100644 --- a/docs/reference/search/search-your-data/search-your-data.asciidoc +++ b/docs/reference/search/search-your-data/search-your-data.asciidoc @@ -43,10 +43,12 @@ DSL, with a simplified user experience. Create search applications based on your results directly in the Kibana Search UI. include::search-api.asciidoc[] +include::../../how-to/recipes.asciidoc[] +// ☝️ search relevance recipes include::retrievers-overview.asciidoc[] include::knn-search.asciidoc[] include::semantic-search.asciidoc[] include::search-across-clusters.asciidoc[] include::search-with-synonyms.asciidoc[] include::search-application-overview.asciidoc[] -include::behavioral-analytics/behavioral-analytics-overview.asciidoc[] +include::behavioral-analytics/behavioral-analytics-overview.asciidoc[] \ No newline at end of file diff --git a/docs/reference/security/authorization/built-in-roles.asciidoc b/docs/reference/security/authorization/built-in-roles.asciidoc index 6db08b307f193..d730587e7db17 100644 --- a/docs/reference/security/authorization/built-in-roles.asciidoc +++ b/docs/reference/security/authorization/built-in-roles.asciidoc @@ -14,11 +14,6 @@ roles have a fixed set of privileges and cannot be updated. Grants access necessary for the APM system user to send system-level data (such as monitoring) to {es}. -[[built-in-roles-apm-user]] `apm_user` :: -Grants the privileges required for APM users (such as `read` and -`view_index_metadata` privileges on the `apm-*` and `.ml-anomalies*` indices). -deprecated:[7.13.0,"See {kibana-ref}/apm-app-users.html[APM app users and privileges\] for alternatives."]. - [[built-in-roles-beats-admin]] `beats_admin` :: Grants access to the `.management-beats` index, which contains configuration information for the Beats. diff --git a/docs/reference/security/fips-140-compliance.asciidoc b/docs/reference/security/fips-140-compliance.asciidoc index 5bf73d43541d6..dec17927e62b8 100644 --- a/docs/reference/security/fips-140-compliance.asciidoc +++ b/docs/reference/security/fips-140-compliance.asciidoc @@ -53,8 +53,8 @@ https://docs.oracle.com/en/java/javase/17/security/java-cryptography-architectur https://docs.oracle.com/en/java/javase/17/security/java-secure-socket-extension-jsse-reference-guide.html[JSSE] implementation is required so that the JVM uses FIPS validated implementations of NIST recommended cryptographic algorithms. -Elasticsearch has been tested with Bouncy Castle's https://repo1.maven.org/maven2/org/bouncycastle/bc-fips/1.0.2.4/bc-fips-1.0.2.4.jar[bc-fips 1.0.2.4] -and https://repo1.maven.org/maven2/org/bouncycastle/bctls-fips/1.0.17/bctls-fips-1.0.17.jar[bctls-fips 1.0.17]. +Elasticsearch has been tested with Bouncy Castle's https://repo1.maven.org/maven2/org/bouncycastle/bc-fips/1.0.2.5/bc-fips-1.0.2.5.jar[bc-fips 1.0.2.5] +and https://repo1.maven.org/maven2/org/bouncycastle/bctls-fips/1.0.19/bctls-fips-1.0.19.jar[bctls-fips 1.0.19]. Please refer to the {es} https://www.elastic.co/support/matrix#matrix_jvm[JVM support matrix] for details on which combinations of JVM and security provider are supported in FIPS mode. Elasticsearch does not ship with a FIPS certified provider. It is the responsibility of the user to install and configure the security provider to ensure compliance with FIPS 140-2. Using a FIPS certified provider will ensure that only diff --git a/docs/reference/snapshot-restore/repository-s3.asciidoc b/docs/reference/snapshot-restore/repository-s3.asciidoc index 36f311b1cdd97..1b08a802a444f 100644 --- a/docs/reference/snapshot-restore/repository-s3.asciidoc +++ b/docs/reference/snapshot-restore/repository-s3.asciidoc @@ -6,6 +6,9 @@ You can use AWS S3 as a repository for {ref}/snapshot-restore.html[Snapshot/Rest *If you are looking for a hosted solution of Elasticsearch on AWS, please visit https://www.elastic.co/cloud/.* +See https://www.youtube.com/watch?v=ACqfyzWf-xs[this video] +for a walkthrough of connecting an AWS S3 repository. + [[repository-s3-usage]] ==== Getting started diff --git a/docs/reference/troubleshooting/common-issues/diagnose-unassigned-shards.asciidoc b/docs/reference/troubleshooting/common-issues/diagnose-unassigned-shards.asciidoc index fe9422d6d4c53..e1ceefb92bbec 100644 --- a/docs/reference/troubleshooting/common-issues/diagnose-unassigned-shards.asciidoc +++ b/docs/reference/troubleshooting/common-issues/diagnose-unassigned-shards.asciidoc @@ -8,5 +8,6 @@ In order to diagnose the unassigned shards in your deployment use the following include::{es-ref-dir}/tab-widgets/troubleshooting/data/diagnose-unassigned-shards-widget.asciidoc[] - +See https://www.youtube.com/watch?v=v2mbeSd1vTQ[this video] +for a walkthrough of monitoring allocation health. diff --git a/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc b/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc index eb56a37562c31..4289242deb486 100644 --- a/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc +++ b/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc @@ -19,6 +19,8 @@ operate to have a green health status. In many cases, your cluster will recover to green health status automatically. If the cluster doesn't automatically recover, then you must <> the remaining problems so management and cleanup activities can proceed. +See https://www.youtube.com/watch?v=v2mbeSd1vTQ[this video] +for a walkthrough of monitoring allocation health. [discrete] [[diagnose-cluster-status]] @@ -90,6 +92,8 @@ PUT _cluster/settings } ---- +See https://www.youtube.com/watch?v=MiKKUdZvwnI[this video] for walkthrough of troubleshooting "no allocations are allowed". + [discrete] [[fix-cluster-status-recover-nodes]] ===== Recover lost nodes @@ -262,3 +266,5 @@ POST _cluster/reroute ---- // TEST[s/^/PUT my-index\n/] // TEST[catch:bad_request] + +See https://www.youtube.com/watch?v=6OAg9IyXFO4[this video] for a walkthrough of troubleshooting `no_valid_shard_copy`. \ No newline at end of file diff --git a/docs/reference/troubleshooting/diagnostic.asciidoc b/docs/reference/troubleshooting/diagnostic.asciidoc index a944ca88d285d..c6d46b9e94fc8 100644 --- a/docs/reference/troubleshooting/diagnostic.asciidoc +++ b/docs/reference/troubleshooting/diagnostic.asciidoc @@ -13,6 +13,8 @@ This information can be used to troubleshoot problems with your cluster. For exa You can generate diagnostic information using this tool before you contact https://support.elastic.co[Elastic Support] or https://discuss.elastic.co[Elastic Discuss] to minimize turnaround time. +See this https://www.youtube.com/watch?v=Bb6SaqhqYHw[this video] for a walkthrough of capturing an {es} diagnostic. + [discrete] [[diagnostic-tool-requirements]] === Requirements diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml index e3148c6f3ef2e..05fda8e0244de 100644 --- a/gradle/build.versions.toml +++ b/gradle/build.versions.toml @@ -1,5 +1,5 @@ [versions] -asm = "9.6" +asm = "9.7.1" jackson = "2.15.0" junit5 = "5.8.1" spock = "2.1-groovy-3.0" @@ -16,7 +16,7 @@ checkstyle = "com.puppycrawl.tools:checkstyle:10.3" commons-codec = "commons-codec:commons-codec:1.11" commmons-io = "commons-io:commons-io:2.2" docker-compose = "com.avast.gradle:gradle-docker-compose-plugin:0.17.5" -forbiddenApis = "de.thetaphi:forbiddenapis:3.6" +forbiddenApis = "de.thetaphi:forbiddenapis:3.8" gradle-enterprise = "com.gradle:develocity-gradle-plugin:3.18.1" hamcrest = "org.hamcrest:hamcrest:2.1" httpcore = "org.apache.httpcomponents:httpcore:4.4.12" diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 5e874b52fc4c6..37178fd9439d0 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -944,6 +944,11 @@ + + + + + @@ -1201,6 +1206,11 @@ + + + + + @@ -1256,16 +1266,16 @@ - - - - - + + + + + @@ -1336,6 +1346,11 @@ + + + + + @@ -2367,14 +2382,14 @@ - - - + + + - - - + + + @@ -2409,9 +2424,9 @@ - - - + + + @@ -3040,6 +3055,11 @@ + + + + + @@ -3055,6 +3075,11 @@ + + + + + @@ -3125,6 +3150,11 @@ + + + + + @@ -3283,14 +3313,14 @@ - - - + + + - - - + + + @@ -3328,9 +3358,9 @@ - - - + + + @@ -3433,6 +3463,11 @@ + + + + + @@ -3448,6 +3483,16 @@ + + + + + + + + + + @@ -3863,6 +3908,11 @@ + + + + + @@ -4618,6 +4668,11 @@ + + + + + diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 6acc1431eaec1..22286c90de3d1 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=2ab88d6de2c23e6adae7363ae6e29cbdd2a709e992929b48b6530fd0c7133bd6 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-all.zip +distributionSha256Sum=89d4e70e4e84e2d2dfbb63e4daa53e21b25017cc70c37e4eea31ee51fb15098a +distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/libs/core/build.gradle b/libs/core/build.gradle index e24417e09a53d..99c22620e7354 100644 --- a/libs/core/build.gradle +++ b/libs/core/build.gradle @@ -8,7 +8,6 @@ */ apply plugin: 'elasticsearch.publish' -apply plugin: 'elasticsearch.mrjar' dependencies { // This dependency is used only by :libs:core for null-checking interop with other tools diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java index f5fe8d41c2243..a3bbb611f3e68 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java @@ -9,19 +9,29 @@ package org.elasticsearch.entitlement.instrumentation.impl; +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.instrumentation.Instrumenter; import org.elasticsearch.entitlement.instrumentation.MethodKey; +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; +import java.io.IOException; import java.lang.reflect.Method; -import java.lang.reflect.Modifier; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.stream.Stream; public class InstrumentationServiceImpl implements InstrumentationService { + @Override - public Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods) { + public Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods) { return new InstrumenterImpl(classNameSuffix, instrumentationMethods); } @@ -33,9 +43,97 @@ public MethodKey methodKeyForTarget(Method targetMethod) { return new MethodKey( Type.getInternalName(targetMethod.getDeclaringClass()), targetMethod.getName(), - Stream.of(actualType.getArgumentTypes()).map(Type::getInternalName).toList(), - Modifier.isStatic(targetMethod.getModifiers()) + Stream.of(actualType.getArgumentTypes()).map(Type::getInternalName).toList() ); } + @Override + public Map lookupMethodsToInstrument(String entitlementCheckerClassName) throws ClassNotFoundException, + IOException { + var methodsToInstrument = new HashMap(); + var checkerClass = Class.forName(entitlementCheckerClassName); + var classFileInfo = InstrumenterImpl.getClassFileInfo(checkerClass); + ClassReader reader = new ClassReader(classFileInfo.bytecodes()); + ClassVisitor visitor = new ClassVisitor(Opcodes.ASM9) { + @Override + public MethodVisitor visitMethod( + int access, + String checkerMethodName, + String checkerMethodDescriptor, + String signature, + String[] exceptions + ) { + var mv = super.visitMethod(access, checkerMethodName, checkerMethodDescriptor, signature, exceptions); + + var checkerMethodArgumentTypes = Type.getArgumentTypes(checkerMethodDescriptor); + var methodToInstrument = parseCheckerMethodSignature(checkerMethodName, checkerMethodArgumentTypes); + + var checkerParameterDescriptors = Arrays.stream(checkerMethodArgumentTypes).map(Type::getDescriptor).toList(); + var checkerMethod = new CheckerMethod(Type.getInternalName(checkerClass), checkerMethodName, checkerParameterDescriptors); + + methodsToInstrument.put(methodToInstrument, checkerMethod); + + return mv; + } + }; + reader.accept(visitor, 0); + return methodsToInstrument; + } + + private static final Type CLASS_TYPE = Type.getType(Class.class); + + static MethodKey parseCheckerMethodSignature(String checkerMethodName, Type[] checkerMethodArgumentTypes) { + var classNameStartIndex = checkerMethodName.indexOf('$'); + var classNameEndIndex = checkerMethodName.lastIndexOf('$'); + + if (classNameStartIndex == -1 || classNameStartIndex >= classNameEndIndex) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Checker method %s has incorrect name format. " + + "It should be either check$$methodName (instance) or check$package_ClassName$methodName (static)", + checkerMethodName + ) + ); + } + + // No "className" (check$$methodName) -> method is static, and we'll get the class from the actual typed argument + final boolean targetMethodIsStatic = classNameStartIndex + 1 != classNameEndIndex; + final String targetMethodName = checkerMethodName.substring(classNameEndIndex + 1); + + final String targetClassName; + final List targetParameterTypes; + if (targetMethodIsStatic) { + if (checkerMethodArgumentTypes.length < 1 || CLASS_TYPE.equals(checkerMethodArgumentTypes[0]) == false) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Checker method %s has incorrect argument types. " + "It must have a first argument of Class type.", + checkerMethodName + ) + ); + } + + targetClassName = checkerMethodName.substring(classNameStartIndex + 1, classNameEndIndex).replace('_', '/'); + targetParameterTypes = Arrays.stream(checkerMethodArgumentTypes).skip(1).map(Type::getInternalName).toList(); + } else { + if (checkerMethodArgumentTypes.length < 2 + || CLASS_TYPE.equals(checkerMethodArgumentTypes[0]) == false + || checkerMethodArgumentTypes[1].getSort() != Type.OBJECT) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Checker method %s has incorrect argument types. " + + "It must have a first argument of Class type, and a second argument of the class containing the method to " + + "instrument", + checkerMethodName + ) + ); + } + var targetClassType = checkerMethodArgumentTypes[1]; + targetClassName = targetClassType.getInternalName(); + targetParameterTypes = Arrays.stream(checkerMethodArgumentTypes).skip(2).map(Type::getInternalName).toList(); + } + return new MethodKey(targetClassName, targetMethodName, targetParameterTypes); + } } diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java index 63c9ccd80be70..dc20b16400f3d 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java @@ -9,6 +9,7 @@ package org.elasticsearch.entitlement.instrumentation.impl; +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; import org.elasticsearch.entitlement.instrumentation.Instrumenter; import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.objectweb.asm.AnnotationVisitor; @@ -23,7 +24,6 @@ import java.io.IOException; import java.io.InputStream; -import java.lang.reflect.Method; import java.util.Map; import java.util.stream.Stream; @@ -36,13 +36,29 @@ import static org.objectweb.asm.Opcodes.INVOKEVIRTUAL; public class InstrumenterImpl implements Instrumenter { + + private static final String checkerClassDescriptor; + private static final String handleClass; + static { + int javaVersion = Runtime.version().feature(); + final String classNamePrefix; + if (javaVersion >= 23) { + classNamePrefix = "Java23"; + } else { + classNamePrefix = ""; + } + String checkerClass = "org/elasticsearch/entitlement/bridge/" + classNamePrefix + "EntitlementChecker"; + handleClass = checkerClass + "Handle"; + checkerClassDescriptor = Type.getObjectType(checkerClass).getDescriptor(); + } + /** * To avoid class name collisions during testing without an agent to replace classes in-place. */ private final String classNameSuffix; - private final Map instrumentationMethods; + private final Map instrumentationMethods; - public InstrumenterImpl(String classNameSuffix, Map instrumentationMethods) { + public InstrumenterImpl(String classNameSuffix, Map instrumentationMethods) { this.classNameSuffix = classNameSuffix; this.instrumentationMethods = instrumentationMethods; } @@ -138,12 +154,7 @@ public MethodVisitor visitMethod(int access, String name, String descriptor, Str var mv = super.visitMethod(access, name, descriptor, signature, exceptions); if (isAnnotationPresent == false) { boolean isStatic = (access & ACC_STATIC) != 0; - var key = new MethodKey( - className, - name, - Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList(), - isStatic - ); + var key = new MethodKey(className, name, Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList()); var instrumentationMethod = instrumentationMethods.get(key); if (instrumentationMethod != null) { // LOGGER.debug("Will instrument method {}", key); @@ -177,7 +188,7 @@ private void addClassAnnotationIfNeeded() { class EntitlementMethodVisitor extends MethodVisitor { private final boolean instrumentedMethodIsStatic; private final String instrumentedMethodDescriptor; - private final Method instrumentationMethod; + private final CheckerMethod instrumentationMethod; private boolean hasCallerSensitiveAnnotation = false; EntitlementMethodVisitor( @@ -185,7 +196,7 @@ class EntitlementMethodVisitor extends MethodVisitor { MethodVisitor methodVisitor, boolean instrumentedMethodIsStatic, String instrumentedMethodDescriptor, - Method instrumentationMethod + CheckerMethod instrumentationMethod ) { super(api, methodVisitor); this.instrumentedMethodIsStatic = instrumentedMethodIsStatic; @@ -262,22 +273,19 @@ private void forwardIncomingArguments() { private void invokeInstrumentationMethod() { mv.visitMethodInsn( INVOKEINTERFACE, - Type.getInternalName(instrumentationMethod.getDeclaringClass()), - instrumentationMethod.getName(), - Type.getMethodDescriptor(instrumentationMethod), + instrumentationMethod.className(), + instrumentationMethod.methodName(), + Type.getMethodDescriptor( + Type.VOID_TYPE, + instrumentationMethod.parameterDescriptors().stream().map(Type::getType).toArray(Type[]::new) + ), true ); } } protected void pushEntitlementChecker(MethodVisitor mv) { - mv.visitMethodInsn( - INVOKESTATIC, - "org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle", - "instance", - "()Lorg/elasticsearch/entitlement/bridge/EntitlementChecker;", - false - ); + mv.visitMethodInsn(INVOKESTATIC, handleClass, "instance", "()" + checkerClassDescriptor, false); } public record ClassFileInfo(String fileName, byte[] bytecodes) {} diff --git a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java new file mode 100644 index 0000000000000..c0ff5d59d3c72 --- /dev/null +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java @@ -0,0 +1,262 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.instrumentation.impl; + +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; +import org.elasticsearch.entitlement.instrumentation.InstrumentationService; +import org.elasticsearch.entitlement.instrumentation.MethodKey; +import org.elasticsearch.test.ESTestCase; +import org.objectweb.asm.Type; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; + +@ESTestCase.WithoutSecurityManager +public class InstrumentationServiceImplTests extends ESTestCase { + + final InstrumentationService instrumentationService = new InstrumentationServiceImpl(); + + static class TestTargetClass {} + + interface TestChecker { + void check$org_example_TestTargetClass$staticMethod(Class clazz, int arg0, String arg1, Object arg2); + + void check$$instanceMethodNoArgs(Class clazz, TestTargetClass that); + + void check$$instanceMethodWithArgs(Class clazz, TestTargetClass that, int x, int y); + } + + interface TestCheckerOverloads { + void check$org_example_TestTargetClass$staticMethodWithOverload(Class clazz, int x, int y); + + void check$org_example_TestTargetClass$staticMethodWithOverload(Class clazz, int x, String y); + } + + public void testInstrumentationTargetLookup() throws IOException, ClassNotFoundException { + Map methodsMap = instrumentationService.lookupMethodsToInstrument(TestChecker.class.getName()); + + assertThat(methodsMap, aMapWithSize(3)); + assertThat( + methodsMap, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "staticMethod", List.of("I", "java/lang/String", "java/lang/Object"))), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker", + "check$org_example_TestTargetClass$staticMethod", + List.of("Ljava/lang/Class;", "I", "Ljava/lang/String;", "Ljava/lang/Object;") + ) + ) + ) + ); + assertThat( + methodsMap, + hasEntry( + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass", + "instanceMethodNoArgs", + List.of() + ) + ), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker", + "check$$instanceMethodNoArgs", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass;" + ) + ) + ) + ) + ); + assertThat( + methodsMap, + hasEntry( + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass", + "instanceMethodWithArgs", + List.of("I", "I") + ) + ), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker", + "check$$instanceMethodWithArgs", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass;", + "I", + "I" + ) + ) + ) + ) + ); + } + + public void testInstrumentationTargetLookupWithOverloads() throws IOException, ClassNotFoundException { + Map methodsMap = instrumentationService.lookupMethodsToInstrument(TestCheckerOverloads.class.getName()); + + assertThat(methodsMap, aMapWithSize(2)); + assertThat( + methodsMap, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "staticMethodWithOverload", List.of("I", "java/lang/String"))), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerOverloads", + "check$org_example_TestTargetClass$staticMethodWithOverload", + List.of("Ljava/lang/Class;", "I", "Ljava/lang/String;") + ) + ) + ) + ); + assertThat( + methodsMap, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "staticMethodWithOverload", List.of("I", "I"))), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerOverloads", + "check$org_example_TestTargetClass$staticMethodWithOverload", + List.of("Ljava/lang/Class;", "I", "I") + ) + ) + ) + ); + } + + public void testParseCheckerMethodSignatureStaticMethod() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$org_example_TestClass$staticMethod", + new Type[] { Type.getType(Class.class) } + ); + + assertThat(methodKey, equalTo(new MethodKey("org/example/TestClass", "staticMethod", List.of()))); + } + + public void testParseCheckerMethodSignatureStaticMethodWithArgs() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$org_example_TestClass$staticMethod", + new Type[] { Type.getType(Class.class), Type.getType("I"), Type.getType(String.class) } + ); + + assertThat(methodKey, equalTo(new MethodKey("org/example/TestClass", "staticMethod", List.of("I", "java/lang/String")))); + } + + public void testParseCheckerMethodSignatureStaticMethodInnerClass() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$org_example_TestClass$InnerClass$staticMethod", + new Type[] { Type.getType(Class.class) } + ); + + assertThat(methodKey, equalTo(new MethodKey("org/example/TestClass$InnerClass", "staticMethod", List.of()))); + } + + public void testParseCheckerMethodSignatureIncorrectName() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature("check$staticMethod", new Type[] { Type.getType(Class.class) }) + ); + + assertThat(exception.getMessage(), containsString("has incorrect name format")); + } + + public void testParseCheckerMethodSignatureStaticMethodIncorrectArgumentCount() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature("check$ClassName$staticMethod", new Type[] {}) + ); + assertThat(exception.getMessage(), containsString("It must have a first argument of Class type")); + } + + public void testParseCheckerMethodSignatureStaticMethodIncorrectArgumentType() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$ClassName$staticMethod", + new Type[] { Type.getType(String.class) } + ) + ); + assertThat(exception.getMessage(), containsString("It must have a first argument of Class type")); + } + + public void testParseCheckerMethodSignatureInstanceMethod() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$$instanceMethod", + new Type[] { Type.getType(Class.class), Type.getType(TestTargetClass.class) } + ); + + assertThat( + methodKey, + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass", + "instanceMethod", + List.of() + ) + ) + ); + } + + public void testParseCheckerMethodSignatureInstanceMethodWithArgs() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$$instanceMethod", + new Type[] { Type.getType(Class.class), Type.getType(TestTargetClass.class), Type.getType("I"), Type.getType(String.class) } + ); + + assertThat( + methodKey, + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass", + "instanceMethod", + List.of("I", "java/lang/String") + ) + ) + ); + } + + public void testParseCheckerMethodSignatureInstanceMethodIncorrectArgumentTypes() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature("check$$instanceMethod", new Type[] { Type.getType(String.class) }) + ); + assertThat(exception.getMessage(), containsString("It must have a first argument of Class type")); + } + + public void testParseCheckerMethodSignatureInstanceMethodIncorrectArgumentCount() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature("check$$instanceMethod", new Type[] { Type.getType(Class.class) }) + ); + assertThat(exception.getMessage(), containsString("a second argument of the class containing the method to instrument")); + } + + public void testParseCheckerMethodSignatureInstanceMethodIncorrectArgumentTypes2() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$$instanceMethod", + new Type[] { Type.getType(Class.class), Type.getType("I") } + ) + ); + assertThat(exception.getMessage(), containsString("a second argument of the class containing the method to instrument")); + } +} diff --git a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java index 9a57e199d4907..e3f5539999be5 100644 --- a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java @@ -11,7 +11,9 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.entitlement.bridge.EntitlementChecker; +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; +import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.ESTestCase; @@ -22,11 +24,12 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Arrays; -import java.util.stream.Collectors; +import java.util.Map; import static org.elasticsearch.entitlement.instrumentation.impl.ASMUtils.bytecode2text; import static org.elasticsearch.entitlement.instrumentation.impl.InstrumenterImpl.getClassFileInfo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.startsWith; import static org.objectweb.asm.Opcodes.INVOKESTATIC; /** @@ -53,7 +56,12 @@ public void initialize() { * Contains all the virtual methods from {@link ClassToInstrument}, * allowing this test to call them on the dynamically loaded instrumented class. */ - public interface Testable {} + public interface Testable { + // This method is here to demonstrate Instrumenter does not get confused by overloads + void someMethod(int arg); + + void someMethod(int arg, String anotherArg); + } /** * This is a placeholder for real class library methods. @@ -71,10 +79,26 @@ public static void systemExit(int status) { public static void anotherSystemExit(int status) { assertEquals(123, status); } + + public void someMethod(int arg) {} + + public void someMethod(int arg, String anotherArg) {} + + public static void someStaticMethod(int arg) {} + + public static void someStaticMethod(int arg, String anotherArg) {} } static final class TestException extends RuntimeException {} + public interface MockEntitlementChecker extends EntitlementChecker { + void checkSomeStaticMethod(Class clazz, int arg); + + void checkSomeStaticMethod(Class clazz, int arg, String anotherArg); + + void checkSomeInstanceMethod(Class clazz, Testable that, int arg, String anotherArg); + } + /** * We're not testing the permission checking logic here; * only that the instrumented methods are calling the correct check methods with the correct arguments. @@ -82,7 +106,7 @@ static final class TestException extends RuntimeException {} * just to demonstrate that the injected bytecodes succeed in calling these methods. * It also asserts that the arguments are correct. */ - public static class TestEntitlementChecker implements EntitlementChecker { + public static class TestEntitlementChecker implements MockEntitlementChecker { /** * This allows us to test that the instrumentation is correct in both cases: * if the check throws, and if it doesn't. @@ -90,9 +114,12 @@ public static class TestEntitlementChecker implements EntitlementChecker { volatile boolean isActive; int checkSystemExitCallCount = 0; + int checkSomeStaticMethodIntCallCount = 0; + int checkSomeStaticMethodIntStringCallCount = 0; + int checkSomeInstanceMethodCallCount = 0; @Override - public void checkSystemExit(Class callerClass, int status) { + public void check$java_lang_System$exit(Class callerClass, int status) { checkSystemExitCallCount++; assertSame(InstrumenterTests.class, callerClass); assertEquals(123, status); @@ -104,11 +131,48 @@ private void throwIfActive() { throw new TestException(); } } + + @Override + public void checkSomeStaticMethod(Class callerClass, int arg) { + checkSomeStaticMethodIntCallCount++; + assertSame(InstrumenterTests.class, callerClass); + assertEquals(123, arg); + throwIfActive(); + } + + @Override + public void checkSomeStaticMethod(Class callerClass, int arg, String anotherArg) { + checkSomeStaticMethodIntStringCallCount++; + assertSame(InstrumenterTests.class, callerClass); + assertEquals(123, arg); + assertEquals("abc", anotherArg); + throwIfActive(); + } + + @Override + public void checkSomeInstanceMethod(Class callerClass, Testable that, int arg, String anotherArg) { + checkSomeInstanceMethodCallCount++; + assertSame(InstrumenterTests.class, callerClass); + assertThat( + that.getClass().getName(), + startsWith("org.elasticsearch.entitlement.instrumentation.impl.InstrumenterTests$ClassToInstrument") + ); + assertEquals(123, arg); + assertEquals("def", anotherArg); + throwIfActive(); + } } public void testClassIsInstrumented() throws Exception { var classToInstrument = ClassToInstrument.class; - var instrumenter = createInstrumenter(classToInstrument, "systemExit"); + + CheckerMethod checkerMethod = getCheckerMethod(EntitlementChecker.class, "check$java_lang_System$exit", Class.class, int.class); + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("systemExit", int.class)), + checkerMethod + ); + + var instrumenter = createInstrumenter(methods); byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); @@ -117,7 +181,7 @@ public void testClassIsInstrumented() throws Exception { } Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( - ClassToInstrument.class.getName() + "_NEW", + classToInstrument.getName() + "_NEW", newBytecode ); @@ -134,7 +198,14 @@ public void testClassIsInstrumented() throws Exception { public void testClassIsNotInstrumentedTwice() throws Exception { var classToInstrument = ClassToInstrument.class; - var instrumenter = createInstrumenter(classToInstrument, "systemExit"); + + CheckerMethod checkerMethod = getCheckerMethod(EntitlementChecker.class, "check$java_lang_System$exit", Class.class, int.class); + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("systemExit", int.class)), + checkerMethod + ); + + var instrumenter = createInstrumenter(methods); InstrumenterImpl.ClassFileInfo initial = getClassFileInfo(classToInstrument); var internalClassName = Type.getInternalName(classToInstrument); @@ -146,7 +217,7 @@ public void testClassIsNotInstrumentedTwice() throws Exception { logger.trace(() -> Strings.format("Bytecode after 2nd instrumentation:\n%s", bytecode2text(instrumentedTwiceBytecode))); Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( - ClassToInstrument.class.getName() + "_NEW_NEW", + classToInstrument.getName() + "_NEW_NEW", instrumentedTwiceBytecode ); @@ -159,7 +230,16 @@ public void testClassIsNotInstrumentedTwice() throws Exception { public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { var classToInstrument = ClassToInstrument.class; - var instrumenter = createInstrumenter(classToInstrument, "systemExit", "anotherSystemExit"); + + CheckerMethod checkerMethod = getCheckerMethod(EntitlementChecker.class, "check$java_lang_System$exit", Class.class, int.class); + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("systemExit", int.class)), + checkerMethod, + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("anotherSystemExit", int.class)), + checkerMethod + ); + + var instrumenter = createInstrumenter(methods); InstrumenterImpl.ClassFileInfo initial = getClassFileInfo(classToInstrument); var internalClassName = Type.getInternalName(classToInstrument); @@ -171,7 +251,7 @@ public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { logger.trace(() -> Strings.format("Bytecode after 2nd instrumentation:\n%s", bytecode2text(instrumentedTwiceBytecode))); Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( - ClassToInstrument.class.getName() + "_NEW_NEW", + classToInstrument.getName() + "_NEW_NEW", instrumentedTwiceBytecode ); @@ -185,22 +265,78 @@ public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { assertThat(getTestEntitlementChecker().checkSystemExitCallCount, is(2)); } - /** This test doesn't replace ClassToInstrument in-place but instead loads a separate - * class ClassToInstrument_NEW that contains the instrumentation. Because of this, - * we need to configure the Transformer to use a MethodKey and instrumentationMethod - * with slightly different signatures (using the common interface Testable) which - * is not what would happen when it's run by the agent. - */ - private InstrumenterImpl createInstrumenter(Class classToInstrument, String... methodNames) throws NoSuchMethodException { - Method v1 = EntitlementChecker.class.getMethod("checkSystemExit", Class.class, int.class); - var methods = Arrays.stream(methodNames).map(name -> { - try { - return instrumentationService.methodKeyForTarget(classToInstrument.getMethod(name, int.class)); - } catch (NoSuchMethodException e) { - throw new RuntimeException(e); - } - }).collect(Collectors.toUnmodifiableMap(name -> name, name -> v1)); + public void testInstrumenterWorksWithOverloads() throws Exception { + var classToInstrument = ClassToInstrument.class; + + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("someStaticMethod", int.class)), + getCheckerMethod(MockEntitlementChecker.class, "checkSomeStaticMethod", Class.class, int.class), + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("someStaticMethod", int.class, String.class)), + getCheckerMethod(MockEntitlementChecker.class, "checkSomeStaticMethod", Class.class, int.class, String.class) + ); + + var instrumenter = createInstrumenter(methods); + + byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); + if (logger.isTraceEnabled()) { + logger.trace("Bytecode after instrumentation:\n{}", bytecode2text(newBytecode)); + } + + Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( + classToInstrument.getName() + "_NEW", + newBytecode + ); + + getTestEntitlementChecker().isActive = true; + + // After checking is activated, everything should throw + assertThrows(TestException.class, () -> callStaticMethod(newClass, "someStaticMethod", 123)); + assertThrows(TestException.class, () -> callStaticMethod(newClass, "someStaticMethod", 123, "abc")); + + assertThat(getTestEntitlementChecker().checkSomeStaticMethodIntCallCount, is(1)); + assertThat(getTestEntitlementChecker().checkSomeStaticMethodIntStringCallCount, is(1)); + } + + public void testInstrumenterWorksWithInstanceMethodsAndOverloads() throws Exception { + var classToInstrument = ClassToInstrument.class; + + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("someMethod", int.class, String.class)), + getCheckerMethod(MockEntitlementChecker.class, "checkSomeInstanceMethod", Class.class, Testable.class, int.class, String.class) + ); + + var instrumenter = createInstrumenter(methods); + + byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); + + if (logger.isTraceEnabled()) { + logger.trace("Bytecode after instrumentation:\n{}", bytecode2text(newBytecode)); + } + + Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( + classToInstrument.getName() + "_NEW", + newBytecode + ); + + getTestEntitlementChecker().isActive = true; + + Testable testTargetClass = (Testable) (newClass.getConstructor().newInstance()); + + // This overload is not instrumented, so it will not throw + testTargetClass.someMethod(123); + assertThrows(TestException.class, () -> testTargetClass.someMethod(123, "def")); + + assertThat(getTestEntitlementChecker().checkSomeInstanceMethodCallCount, is(1)); + } + + /** This test doesn't replace classToInstrument in-place but instead loads a separate + * class with the same class name plus a "_NEW" suffix (classToInstrument.class.getName() + "_NEW") + * that contains the instrumentation. Because of this, we need to configure the Transformer to use a + * MethodKey and instrumentationMethod with slightly different signatures (using the common interface + * Testable) which is not what would happen when it's run by the agent. + */ + private InstrumenterImpl createInstrumenter(Map methods) throws NoSuchMethodException { Method getter = InstrumenterTests.class.getMethod("getTestEntitlementChecker"); return new InstrumenterImpl("_NEW", methods) { /** @@ -220,13 +356,38 @@ protected void pushEntitlementChecker(MethodVisitor mv) { }; } + private static CheckerMethod getCheckerMethod(Class clazz, String methodName, Class... parameterTypes) + throws NoSuchMethodException { + var method = clazz.getMethod(methodName, parameterTypes); + return new CheckerMethod( + Type.getInternalName(clazz), + method.getName(), + Arrays.stream(Type.getArgumentTypes(method)).map(Type::getDescriptor).toList() + ); + } + /** * Calling a static method of a dynamically loaded class is significantly more cumbersome * than calling a virtual method. */ - private static void callStaticMethod(Class c, String methodName, int status) throws NoSuchMethodException, IllegalAccessException { + private static void callStaticMethod(Class c, String methodName, int arg) throws NoSuchMethodException, IllegalAccessException { + try { + c.getMethod(methodName, int.class).invoke(null, arg); + } catch (InvocationTargetException e) { + Throwable cause = e.getCause(); + if (cause instanceof TestException n) { + // Sometimes we're expecting this one! + throw n; + } else { + throw new AssertionError(cause); + } + } + } + + private static void callStaticMethod(Class c, String methodName, int arg1, String arg2) throws NoSuchMethodException, + IllegalAccessException { try { - c.getMethod(methodName, int.class).invoke(null, status); + c.getMethod(methodName, int.class, String.class).invoke(null, arg1, arg2); } catch (InvocationTargetException e) { Throwable cause = e.getCause(); if (cause instanceof TestException n) { diff --git a/libs/entitlement/bridge/build.gradle b/libs/entitlement/bridge/build.gradle index 3d59dd3eaf33e..a9f8f6e3a3b0a 100644 --- a/libs/entitlement/bridge/build.gradle +++ b/libs/entitlement/bridge/build.gradle @@ -7,19 +7,18 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ +import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask + apply plugin: 'elasticsearch.build' +apply plugin: 'elasticsearch.mrjar' -configurations { - bridgeJar { - canBeConsumed = true - canBeResolved = false +tasks.named('jar').configure { + // guarding for intellij + if (sourceSets.findByName("main23")) { + from sourceSets.main23.output } } -artifacts { - bridgeJar(jar) -} - -tasks.named('forbiddenApisMain').configure { +tasks.withType(CheckForbiddenApisTask).configureEach { replaceSignatureFiles 'jdk-signatures' } diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index 5ebb7d00e26f5..167c93c90df5c 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -10,5 +10,5 @@ package org.elasticsearch.entitlement.bridge; public interface EntitlementChecker { - void checkSystemExit(Class callerClass, int status); + void check$java_lang_System$exit(Class callerClass, int status); } diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle.java index 2fe4a163a4136..26c9c83b8eb51 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle.java @@ -9,9 +9,6 @@ package org.elasticsearch.entitlement.bridge; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; - /** * Makes the {@link EntitlementChecker} available to injected bytecode. */ @@ -35,27 +32,7 @@ private static class Holder { * The {@code EntitlementInitialization} class is what actually instantiates it and makes it available; * here, we copy it into a static final variable for maximum performance. */ - private static final EntitlementChecker instance; - static { - String initClazz = "org.elasticsearch.entitlement.initialization.EntitlementInitialization"; - final Class clazz; - try { - clazz = ClassLoader.getSystemClassLoader().loadClass(initClazz); - } catch (ClassNotFoundException e) { - throw new AssertionError("java.base cannot find entitlement initialziation", e); - } - final Method checkerMethod; - try { - checkerMethod = clazz.getMethod("checker"); - } catch (NoSuchMethodException e) { - throw new AssertionError("EntitlementInitialization is missing checker() method", e); - } - try { - instance = (EntitlementChecker) checkerMethod.invoke(null); - } catch (IllegalAccessException | InvocationTargetException e) { - throw new AssertionError(e); - } - } + private static final EntitlementChecker instance = HandleLoader.load(EntitlementChecker.class); } // no construction diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/HandleLoader.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/HandleLoader.java new file mode 100644 index 0000000000000..bbfec47884f79 --- /dev/null +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/HandleLoader.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.bridge; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; + +class HandleLoader { + + static T load(Class checkerClass) { + String initClassName = "org.elasticsearch.entitlement.initialization.EntitlementInitialization"; + final Class initClazz; + try { + initClazz = ClassLoader.getSystemClassLoader().loadClass(initClassName); + } catch (ClassNotFoundException e) { + throw new AssertionError("java.base cannot find entitlement initialization", e); + } + final Method checkerMethod; + try { + checkerMethod = initClazz.getMethod("checker"); + } catch (NoSuchMethodException e) { + throw new AssertionError("EntitlementInitialization is missing checker() method", e); + } + try { + return checkerClass.cast(checkerMethod.invoke(null)); + } catch (IllegalAccessException | InvocationTargetException e) { + throw new AssertionError(e); + } + } + + // no instance + private HandleLoader() {} +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerUbiElasticsearchDistributionType.java b/libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementChecker.java similarity index 52% rename from build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerUbiElasticsearchDistributionType.java rename to libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementChecker.java index aa19bf6d60c53..244632e80ffa0 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerUbiElasticsearchDistributionType.java +++ b/libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementChecker.java @@ -7,21 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.gradle.internal.distribution; +package org.elasticsearch.entitlement.bridge; -import org.elasticsearch.gradle.ElasticsearchDistributionType; - -public class DockerUbiElasticsearchDistributionType implements ElasticsearchDistributionType { - - DockerUbiElasticsearchDistributionType() {} - - @Override - public String getName() { - return "dockerUbi"; - } - - @Override - public boolean isDocker() { - return true; - } -} +public interface Java23EntitlementChecker extends EntitlementChecker {} diff --git a/libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementCheckerHandle.java b/libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementCheckerHandle.java new file mode 100644 index 0000000000000..f41c5dcdf14fd --- /dev/null +++ b/libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementCheckerHandle.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.bridge; + +/** + * Java23 variant of {@link EntitlementChecker} handle holder. + */ +public class Java23EntitlementCheckerHandle { + + public static Java23EntitlementChecker instance() { + return Holder.instance; + } + + private static class Holder { + private static final Java23EntitlementChecker instance = HandleLoader.load(Java23EntitlementChecker.class); + } + + // no construction + private Java23EntitlementCheckerHandle() {} +} diff --git a/libs/entitlement/build.gradle b/libs/entitlement/build.gradle index 12e0bb48a54b7..841591873153c 100644 --- a/libs/entitlement/build.gradle +++ b/libs/entitlement/build.gradle @@ -6,10 +6,13 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ + +import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask + apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.publish' - apply plugin: 'elasticsearch.embedded-providers' +apply plugin: 'elasticsearch.mrjar' embeddedProviders { impl 'entitlement', project(':libs:entitlement:asm-provider') @@ -23,8 +26,13 @@ dependencies { testImplementation(project(":test:framework")) { exclude group: 'org.elasticsearch', module: 'entitlement' } + + // guarding for intellij + if (sourceSets.findByName("main23")) { + main23CompileOnly project(path: ':libs:entitlement:bridge', configuration: 'java23') + } } -tasks.named('forbiddenApisMain').configure { +tasks.withType(CheckForbiddenApisTask).configureEach { replaceSignatureFiles 'jdk-signatures' } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java index 7f68457baea9e..01b8f4d574f90 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java @@ -15,6 +15,7 @@ import com.sun.tools.attach.VirtualMachine; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.Tuple; import org.elasticsearch.entitlement.initialization.EntitlementInitialization; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -22,15 +23,33 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Collection; +import java.util.Objects; +import java.util.function.Function; public class EntitlementBootstrap { + public record BootstrapArgs(Collection> pluginData, Function, String> pluginResolver) {} + + private static BootstrapArgs bootstrapArgs; + + public static BootstrapArgs bootstrapArgs() { + return bootstrapArgs; + } + /** - * Activates entitlement checking. Once this method returns, calls to forbidden methods - * will throw {@link org.elasticsearch.entitlement.runtime.api.NotEntitledException}. + * Activates entitlement checking. Once this method returns, calls to methods protected by Entitlements from classes without a valid + * policy will throw {@link org.elasticsearch.entitlement.runtime.api.NotEntitledException}. + * @param pluginData a collection of (plugin path, boolean), that holds the paths of all the installed Elasticsearch modules and + * plugins, and whether they are Java modular or not. + * @param pluginResolver a functor to map a Java Class to the plugin it belongs to (the plugin name). */ - public static void bootstrap() { + public static void bootstrap(Collection> pluginData, Function, String> pluginResolver) { logger.debug("Loading entitlement agent"); + if (EntitlementBootstrap.bootstrapArgs != null) { + throw new IllegalStateException("plugin data is already set"); + } + EntitlementBootstrap.bootstrapArgs = new BootstrapArgs(Objects.requireNonNull(pluginData), Objects.requireNonNull(pluginResolver)); exportInitializationToAgent(); loadAgent(findAgentJar()); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 155d5a27c606b..ca57e7b255bca 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -9,17 +9,37 @@ package org.elasticsearch.entitlement.initialization; +import org.elasticsearch.core.Tuple; import org.elasticsearch.core.internal.provider.ProviderLocator; +import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; import org.elasticsearch.entitlement.bridge.EntitlementChecker; +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.elasticsearch.entitlement.instrumentation.Transformer; import org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementChecker; +import org.elasticsearch.entitlement.runtime.policy.Policy; +import org.elasticsearch.entitlement.runtime.policy.PolicyManager; +import org.elasticsearch.entitlement.runtime.policy.PolicyParser; +import org.elasticsearch.entitlement.runtime.policy.Scope; +import java.io.IOException; import java.lang.instrument.Instrumentation; -import java.lang.reflect.Method; +import java.lang.module.ModuleFinder; +import java.lang.module.ModuleReference; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; + +import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.ALL_UNNAMED; /** * Called by the agent during {@code agentmain} to configure the entitlement system, @@ -29,6 +49,9 @@ * to begin injecting our instrumentation. */ public class EntitlementInitialization { + + private static final String POLICY_FILE_NAME = "entitlement-policy.yaml"; + private static ElasticsearchEntitlementChecker manager; // Note: referenced by bridge reflectively @@ -38,16 +61,112 @@ public static EntitlementChecker checker() { // Note: referenced by agent reflectively public static void initialize(Instrumentation inst) throws Exception { - manager = new ElasticsearchEntitlementChecker(); + manager = initChecker(); + + Map methodMap = INSTRUMENTER_FACTORY.lookupMethodsToInstrument( + "org.elasticsearch.entitlement.bridge.EntitlementChecker" + ); + + var classesToTransform = methodMap.keySet().stream().map(MethodKey::className).collect(Collectors.toSet()); + + inst.addTransformer(new Transformer(INSTRUMENTER_FACTORY.newInstrumenter("", methodMap), classesToTransform), true); + // TODO: should we limit this array somehow? + var classesToRetransform = classesToTransform.stream().map(EntitlementInitialization::internalNameToClass).toArray(Class[]::new); + inst.retransformClasses(classesToRetransform); + } + + private static Class internalNameToClass(String internalName) { + try { + return Class.forName(internalName.replace('/', '.'), false, ClassLoader.getPlatformClassLoader()); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } + + private static PolicyManager createPolicyManager() throws IOException { + Map pluginPolicies = createPluginPolicies(EntitlementBootstrap.bootstrapArgs().pluginData()); + + // TODO: What should the name be? + // TODO(ES-10031): Decide what goes in the elasticsearch default policy and extend it + var serverPolicy = new Policy("server", List.of()); + return new PolicyManager(serverPolicy, pluginPolicies, EntitlementBootstrap.bootstrapArgs().pluginResolver()); + } + + private static Map createPluginPolicies(Collection> pluginData) throws IOException { + Map pluginPolicies = new HashMap<>(pluginData.size()); + for (Tuple entry : pluginData) { + Path pluginRoot = entry.v1(); + boolean isModular = entry.v2(); + + String pluginName = pluginRoot.getFileName().toString(); + final Policy policy = loadPluginPolicy(pluginRoot, isModular, pluginName); + + pluginPolicies.put(pluginName, policy); + } + return pluginPolicies; + } + + private static Policy loadPluginPolicy(Path pluginRoot, boolean isModular, String pluginName) throws IOException { + Path policyFile = pluginRoot.resolve(POLICY_FILE_NAME); + + final Set moduleNames = getModuleNames(pluginRoot, isModular); + final Policy policy = parsePolicyIfExists(pluginName, policyFile); + + // TODO: should this check actually be part of the parser? + for (Scope scope : policy.scopes) { + if (moduleNames.contains(scope.name) == false) { + throw new IllegalStateException("policy [" + policyFile + "] contains invalid module [" + scope.name + "]"); + } + } + return policy; + } + + private static Policy parsePolicyIfExists(String pluginName, Path policyFile) throws IOException { + if (Files.exists(policyFile)) { + return new PolicyParser(Files.newInputStream(policyFile, StandardOpenOption.READ), pluginName).parsePolicy(); + } + return new Policy(pluginName, List.of()); + } + + private static Set getModuleNames(Path pluginRoot, boolean isModular) { + if (isModular) { + ModuleFinder moduleFinder = ModuleFinder.of(pluginRoot); + Set moduleReferences = moduleFinder.findAll(); + + return moduleReferences.stream().map(mr -> mr.descriptor().name()).collect(Collectors.toUnmodifiableSet()); + } + // When isModular == false we use the same "ALL-UNNAMED" constant as the JDK to indicate (any) unnamed module for this plugin + return Set.of(ALL_UNNAMED); + } - // TODO: Configure actual entitlement grants instead of this hardcoded one - Method targetMethod = System.class.getMethod("exit", int.class); - Method instrumentationMethod = Class.forName("org.elasticsearch.entitlement.bridge.EntitlementChecker") - .getMethod("checkSystemExit", Class.class, int.class); - Map methodMap = Map.of(INSTRUMENTER_FACTORY.methodKeyForTarget(targetMethod), instrumentationMethod); + private static ElasticsearchEntitlementChecker initChecker() throws IOException { + final PolicyManager policyManager = createPolicyManager(); - inst.addTransformer(new Transformer(INSTRUMENTER_FACTORY.newInstrumenter("", methodMap), Set.of(internalName(System.class))), true); - inst.retransformClasses(System.class); + int javaVersion = Runtime.version().feature(); + final String classNamePrefix; + if (javaVersion >= 23) { + classNamePrefix = "Java23"; + } else { + classNamePrefix = ""; + } + final String className = "org.elasticsearch.entitlement.runtime.api." + classNamePrefix + "ElasticsearchEntitlementChecker"; + Class clazz; + try { + clazz = Class.forName(className); + } catch (ClassNotFoundException e) { + throw new AssertionError("entitlement lib cannot find entitlement impl", e); + } + Constructor constructor; + try { + constructor = clazz.getConstructor(PolicyManager.class); + } catch (NoSuchMethodException e) { + throw new AssertionError("entitlement impl is missing no arg constructor", e); + } + try { + return (ElasticsearchEntitlementChecker) constructor.newInstance(policyManager); + } catch (IllegalAccessException | InvocationTargetException | InstantiationException e) { + throw new AssertionError(e); + } } private static String internalName(Class c) { diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/CheckerMethod.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/CheckerMethod.java new file mode 100644 index 0000000000000..c20a75a61a608 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/CheckerMethod.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.instrumentation; + +import java.util.List; + +/** + * A structure to use as a representation of the checker method the instrumentation will inject. + * + * @param className the "internal name" of the class: includes the package info, but with periods replaced by slashes + * @param methodName the checker method name + * @param parameterDescriptors a list of + * type descriptors) + * for methodName parameters. + */ +public record CheckerMethod(String className, String methodName, List parameterDescriptors) {} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java index 25fa84ec7c4ba..12316bfb043c5 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java @@ -9,6 +9,7 @@ package org.elasticsearch.entitlement.instrumentation; +import java.io.IOException; import java.lang.reflect.Method; import java.util.Map; @@ -16,10 +17,12 @@ * The SPI service entry point for instrumentation. */ public interface InstrumentationService { - Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods); + Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods); /** * @return a {@link MethodKey} suitable for looking up the given {@code targetMethod} in the entitlements trampoline */ MethodKey methodKeyForTarget(Method targetMethod); + + Map lookupMethodsToInstrument(String entitlementCheckerClassName) throws ClassNotFoundException, IOException; } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java index 54e09c10bcc57..256a4d709d9dc 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java @@ -12,7 +12,10 @@ import java.util.List; /** + * A structure to use as a key/lookup for a method target of instrumentation * - * @param className the "internal name" of the class: includes the package info, but with periods replaced by slashes + * @param className the "internal name" of the class: includes the package info, but with periods replaced by slashes + * @param methodName the method name + * @param parameterTypes a list of "internal names" for the parameter types */ -public record MethodKey(String className, String methodName, List parameterTypes, boolean isStatic) {} +public record MethodKey(String className, String methodName, List parameterTypes) {} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index 6d5dbd4098aa9..790416ca5659a 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -10,10 +10,8 @@ package org.elasticsearch.entitlement.runtime.api; import org.elasticsearch.entitlement.bridge.EntitlementChecker; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; - -import java.util.Optional; +import org.elasticsearch.entitlement.runtime.policy.FlagEntitlementType; +import org.elasticsearch.entitlement.runtime.policy.PolicyManager; /** * Implementation of the {@link EntitlementChecker} interface, providing additional @@ -21,51 +19,14 @@ * The trampoline module loads this object via SPI. */ public class ElasticsearchEntitlementChecker implements EntitlementChecker { - private static final Logger logger = LogManager.getLogger(ElasticsearchEntitlementChecker.class); + private final PolicyManager policyManager; - @Override - public void checkSystemExit(Class callerClass, int status) { - var requestingModule = requestingModule(callerClass); - if (isTriviallyAllowed(requestingModule)) { - return; - } - // Hard-forbidden until we develop the permission granting scheme - throw new NotEntitledException("Missing entitlement for " + requestingModule); + public ElasticsearchEntitlementChecker(PolicyManager policyManager) { + this.policyManager = policyManager; } - private static Module requestingModule(Class callerClass) { - if (callerClass != null) { - Module callerModule = callerClass.getModule(); - if (callerModule.getLayer() != ModuleLayer.boot()) { - // fast path - return callerModule; - } - } - int framesToSkip = 1 // getCallingClass (this method) - + 1 // the checkXxx method - + 1 // the runtime config method - + 1 // the instrumented method - ; - Optional module = StackWalker.getInstance(StackWalker.Option.RETAIN_CLASS_REFERENCE) - .walk( - s -> s.skip(framesToSkip) - .map(f -> f.getDeclaringClass().getModule()) - .filter(m -> m.getLayer() != ModuleLayer.boot()) - .findFirst() - ); - return module.orElse(null); - } - - private static boolean isTriviallyAllowed(Module requestingModule) { - if (requestingModule == null) { - logger.debug("Trivially allowed: Entire call stack is in the boot module layer"); - return true; - } - if (requestingModule == System.class.getModule()) { - logger.debug("Trivially allowed: Caller is in {}", System.class.getModule().getName()); - return true; - } - logger.trace("Not trivially allowed"); - return false; + @Override + public void check$java_lang_System$exit(Class callerClass, int status) { + policyManager.checkFlagEntitlement(callerClass, FlagEntitlementType.SYSTEM_EXIT); } } diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesFeatures.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java similarity index 53% rename from server/src/main/java/org/elasticsearch/indices/IndicesFeatures.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java index bd39d125969ce..60490baf41a10 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesFeatures.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java @@ -7,17 +7,8 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.indices; +package org.elasticsearch.entitlement.runtime.policy; -import org.elasticsearch.Version; -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Map; - -public class IndicesFeatures implements FeatureSpecification { - @Override - public Map getHistoricalFeatures() { - return Map.of(IndicesService.SUPPORTS_AUTO_PUT, Version.V_8_8_0); - } +public enum FlagEntitlementType { + SYSTEM_EXIT; } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java new file mode 100644 index 0000000000000..c06dc09758de5 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import org.elasticsearch.core.Strings; +import org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementChecker; +import org.elasticsearch.entitlement.runtime.api.NotEntitledException; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; + +import java.lang.module.ModuleFinder; +import java.lang.module.ModuleReference; +import java.util.Collections; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; + +public class PolicyManager { + private static final Logger logger = LogManager.getLogger(ElasticsearchEntitlementChecker.class); + + protected final Policy serverPolicy; + protected final Map pluginPolicies; + private final Function, String> pluginResolver; + + public static final String ALL_UNNAMED = "ALL-UNNAMED"; + + private static final Set systemModules = findSystemModules(); + + private static Set findSystemModules() { + var systemModulesDescriptors = ModuleFinder.ofSystem() + .findAll() + .stream() + .map(ModuleReference::descriptor) + .collect(Collectors.toUnmodifiableSet()); + + return ModuleLayer.boot() + .modules() + .stream() + .filter(m -> systemModulesDescriptors.contains(m.getDescriptor())) + .collect(Collectors.toUnmodifiableSet()); + } + + public PolicyManager(Policy defaultPolicy, Map pluginPolicies, Function, String> pluginResolver) { + this.serverPolicy = Objects.requireNonNull(defaultPolicy); + this.pluginPolicies = Collections.unmodifiableMap(Objects.requireNonNull(pluginPolicies)); + this.pluginResolver = pluginResolver; + } + + public void checkFlagEntitlement(Class callerClass, FlagEntitlementType type) { + var requestingModule = requestingModule(callerClass); + if (isTriviallyAllowed(requestingModule)) { + return; + } + + // TODO: real policy check. For now, we only allow our hardcoded System.exit policy for server. + // TODO: this will be checked using policies + if (requestingModule.isNamed() + && requestingModule.getName().equals("org.elasticsearch.server") + && type == FlagEntitlementType.SYSTEM_EXIT) { + logger.debug("Allowed: caller [{}] in module [{}] has entitlement [{}]", callerClass, requestingModule.getName(), type); + return; + } + + // TODO: plugins policy check using pluginResolver and pluginPolicies + throw new NotEntitledException( + Strings.format("Missing entitlement [%s] for caller [%s] in module [%s]", type, callerClass, requestingModule.getName()) + ); + } + + private static Module requestingModule(Class callerClass) { + if (callerClass != null) { + Module callerModule = callerClass.getModule(); + if (systemModules.contains(callerModule) == false) { + // fast path + return callerModule; + } + } + int framesToSkip = 1 // getCallingClass (this method) + + 1 // the checkXxx method + + 1 // the runtime config method + + 1 // the instrumented method + ; + Optional module = StackWalker.getInstance(StackWalker.Option.RETAIN_CLASS_REFERENCE) + .walk( + s -> s.skip(framesToSkip) + .map(f -> f.getDeclaringClass().getModule()) + .filter(m -> systemModules.contains(m) == false) + .findFirst() + ); + return module.orElse(null); + } + + private static boolean isTriviallyAllowed(Module requestingModule) { + if (requestingModule == null) { + logger.debug("Trivially allowed: entire call stack is in composed of classes in system modules"); + return true; + } + logger.trace("Not trivially allowed"); + return false; + } + + @Override + public String toString() { + return "PolicyManager{" + "serverPolicy=" + serverPolicy + ", pluginPolicies=" + pluginPolicies + '}'; + } +} diff --git a/libs/entitlement/src/main23/java/org/elasticsearch/entitlement/runtime/api/Java23ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main23/java/org/elasticsearch/entitlement/runtime/api/Java23ElasticsearchEntitlementChecker.java new file mode 100644 index 0000000000000..d0f9f4f48609c --- /dev/null +++ b/libs/entitlement/src/main23/java/org/elasticsearch/entitlement/runtime/api/Java23ElasticsearchEntitlementChecker.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.api; + +import org.elasticsearch.entitlement.bridge.Java23EntitlementChecker; +import org.elasticsearch.entitlement.runtime.policy.PolicyManager; + +public class Java23ElasticsearchEntitlementChecker extends ElasticsearchEntitlementChecker implements Java23EntitlementChecker { + + public Java23ElasticsearchEntitlementChecker(PolicyManager policyManager) { + super(policyManager); + } + + @Override + public void check$java_lang_System$exit(Class callerClass, int status) { + // TODO: this is just an example, we shouldn't really override a method implemented in the superclass + super.check$java_lang_System$exit(callerClass, status); + } +} diff --git a/libs/entitlement/tools/common/build.gradle b/libs/entitlement/tools/common/build.gradle index 3373a8f747430..89772b4132c5f 100644 --- a/libs/entitlement/tools/common/build.gradle +++ b/libs/entitlement/tools/common/build.gradle @@ -7,9 +7,8 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -plugins { - id 'java' -} - -group = 'org.elasticsearch.entitlement.tools' +apply plugin: 'elasticsearch.build' +tasks.named('forbiddenApisMain').configure { + replaceSignatureFiles 'jdk-signatures' +} diff --git a/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/ingest/PipelineConfigurationBridge.java b/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/ingest/PipelineConfigurationBridge.java index e146b06fe3f53..cb90d10665659 100644 --- a/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/ingest/PipelineConfigurationBridge.java +++ b/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/ingest/PipelineConfigurationBridge.java @@ -28,8 +28,12 @@ public String getId() { return delegate.getId(); } - public Map getConfigAsMap() { - return delegate.getConfigAsMap(); + public Map getConfig() { + return delegate.getConfig(); + } + + public Map getConfig(final boolean unmodifiable) { + return delegate.getConfig(unmodifiable); } @Override diff --git a/libs/plugin-analysis-api/build.gradle b/libs/plugin-analysis-api/build.gradle index 3f1670d76a0c1..41fbbdbafe998 100644 --- a/libs/plugin-analysis-api/build.gradle +++ b/libs/plugin-analysis-api/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/libs/secure-sm/src/main/java/org/elasticsearch/secure_sm/SecureSM.java b/libs/secure-sm/src/main/java/org/elasticsearch/secure_sm/SecureSM.java index 4fd471c529e75..02d0491118dc7 100644 --- a/libs/secure-sm/src/main/java/org/elasticsearch/secure_sm/SecureSM.java +++ b/libs/secure-sm/src/main/java/org/elasticsearch/secure_sm/SecureSM.java @@ -157,7 +157,9 @@ private static void debugThreadGroups(final ThreadGroup caller, final ThreadGrou // Returns true if the given thread is an instance of the JDK's InnocuousThread. private static boolean isInnocuousThread(Thread t) { final Class c = t.getClass(); - return c.getModule() == Object.class.getModule() && c.getName().equals("jdk.internal.misc.InnocuousThread"); + return c.getModule() == Object.class.getModule() + && (c.getName().equals("jdk.internal.misc.InnocuousThread") + || c.getName().equals("java.util.concurrent.ForkJoinWorkerThread$InnocuousForkJoinWorkerThread")); } protected void checkThreadAccess(Thread t) { @@ -184,11 +186,21 @@ protected void checkThreadAccess(Thread t) { private static final Permission MODIFY_THREADGROUP_PERMISSION = new RuntimePermission("modifyThreadGroup"); private static final Permission MODIFY_ARBITRARY_THREADGROUP_PERMISSION = new ThreadPermission("modifyArbitraryThreadGroup"); + // Returns true if the given thread is an instance of the JDK's InnocuousThread. + private static boolean isInnocuousThreadGroup(ThreadGroup t) { + final Class c = t.getClass(); + return c.getModule() == Object.class.getModule() && t.getName().equals("InnocuousForkJoinWorkerThreadGroup"); + } + protected void checkThreadGroupAccess(ThreadGroup g) { Objects.requireNonNull(g); + boolean targetThreadGroupIsInnocuous = isInnocuousThreadGroup(g); + // first, check if we can modify thread groups at all. - checkPermission(MODIFY_THREADGROUP_PERMISSION); + if (targetThreadGroupIsInnocuous == false) { + checkPermission(MODIFY_THREADGROUP_PERMISSION); + } // check the threadgroup, if its our thread group or an ancestor, its fine. final ThreadGroup source = Thread.currentThread().getThreadGroup(); @@ -196,7 +208,7 @@ protected void checkThreadGroupAccess(ThreadGroup g) { if (source == null) { return; // we are a dead thread, do nothing - } else if (source.parentOf(target) == false) { + } else if (source.parentOf(target) == false && targetThreadGroupIsInnocuous == false) { checkPermission(MODIFY_ARBITRARY_THREADGROUP_PERMISSION); } } diff --git a/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java b/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java index b94639414ffe5..69c6973f57cdf 100644 --- a/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java +++ b/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java @@ -14,7 +14,10 @@ import java.security.Permission; import java.security.Policy; import java.security.ProtectionDomain; +import java.util.ArrayList; +import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.stream.Collectors; /** Simple tests for SecureSM */ public class SecureSMTests extends TestCase { @@ -128,4 +131,12 @@ public void run() { t1.join(); assertTrue(interrupted1.get()); } + + public void testParallelStreamThreadGroup() throws Exception { + List list = new ArrayList<>(); + for (int i = 0; i < 100; ++i) { + list.add(i); + } + list.parallelStream().collect(Collectors.toSet()); + } } diff --git a/libs/simdvec/build.gradle b/libs/simdvec/build.gradle index ffc50ecb1f6ff..95b8ddf28cf2f 100644 --- a/libs/simdvec/build.gradle +++ b/libs/simdvec/build.gradle @@ -7,7 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask apply plugin: 'elasticsearch.publish' diff --git a/modules/aggregations/build.gradle b/modules/aggregations/build.gradle index 5df0a890af753..94fdddf6d711a 100644 --- a/modules/aggregations/build.gradle +++ b/modules/aggregations/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License @@ -20,7 +18,7 @@ esplugin { restResources { restApi { - include '_common', 'indices', 'cluster', 'index', 'search', 'nodes', 'bulk', 'scripts_painless_execute', 'put_script' + include 'capabilities', '_common', 'indices', 'cluster', 'index', 'search', 'nodes', 'bulk', 'scripts_painless_execute', 'put_script' } restTests { // Pulls in all aggregation tests from core AND the forwards v7's core for forwards compatibility diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java index 2b4fea0327e86..203105edc5a24 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -177,65 +178,66 @@ public void collect(int doc, long bucket) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { // Buckets are ordered into groups - [keyed filters] [key1&key2 intersects] - int maxOrd = owningBucketOrds.length * totalNumKeys; - int totalBucketsToBuild = 0; - for (int ord = 0; ord < maxOrd; ord++) { + long maxOrd = owningBucketOrds.size() * totalNumKeys; + long totalBucketsToBuild = 0; + for (long ord = 0; ord < maxOrd; ord++) { if (bucketDocCount(ord) > 0) { totalBucketsToBuild++; } } - long[] bucketOrdsToBuild = new long[totalBucketsToBuild]; - int builtBucketIndex = 0; - for (int ord = 0; ord < maxOrd; ord++) { - if (bucketDocCount(ord) > 0) { - bucketOrdsToBuild[builtBucketIndex++] = ord; - } - } - assert builtBucketIndex == totalBucketsToBuild; - builtBucketIndex = 0; - var bucketSubAggs = buildSubAggsForBuckets(bucketOrdsToBuild); - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int owningBucketOrdIdx = 0; owningBucketOrdIdx < owningBucketOrds.length; owningBucketOrdIdx++) { - List buckets = new ArrayList<>(filters.length); - for (int i = 0; i < keys.length; i++) { - long bucketOrd = bucketOrd(owningBucketOrds[owningBucketOrdIdx], i); - long docCount = bucketDocCount(bucketOrd); - // Empty buckets are not returned because this aggregation will commonly be used under a - // a date-histogram where we will look for transactions over time and can expect many - // empty buckets. - if (docCount > 0) { - InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket( - keys[i], - docCount, - bucketSubAggs.apply(builtBucketIndex++) - ); - buckets.add(bucket); + try (LongArray bucketOrdsToBuild = bigArrays().newLongArray(totalBucketsToBuild)) { + int[] builtBucketIndex = new int[] { 0 }; + for (int ord = 0; ord < maxOrd; ord++) { + if (bucketDocCount(ord) > 0) { + bucketOrdsToBuild.set(builtBucketIndex[0]++, ord); } } - int pos = keys.length; - for (int i = 0; i < keys.length; i++) { - for (int j = i + 1; j < keys.length; j++) { - long bucketOrd = bucketOrd(owningBucketOrds[owningBucketOrdIdx], pos); + assert builtBucketIndex[0] == totalBucketsToBuild; + builtBucketIndex[0] = 0; + var bucketSubAggs = buildSubAggsForBuckets(bucketOrdsToBuild); + InternalAggregation[] aggregations = buildAggregations(Math.toIntExact(owningBucketOrds.size()), owningBucketOrdIdx -> { + List buckets = new ArrayList<>(filters.length); + for (int i = 0; i < keys.length; i++) { + long bucketOrd = bucketOrd(owningBucketOrds.get(owningBucketOrdIdx), i); long docCount = bucketDocCount(bucketOrd); - // Empty buckets are not returned due to potential for very sparse matrices + // Empty buckets are not returned because this aggregation will commonly be used under a + // a date-histogram where we will look for transactions over time and can expect many + // empty buckets. if (docCount > 0) { - String intersectKey = keys[i] + separator + keys[j]; + checkRealMemoryCBForInternalBucket(); InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket( - intersectKey, + keys[i], docCount, - bucketSubAggs.apply(builtBucketIndex++) + bucketSubAggs.apply(builtBucketIndex[0]++) ); buckets.add(bucket); } - pos++; } - } - results[owningBucketOrdIdx] = new InternalAdjacencyMatrix(name, buckets, metadata()); + int pos = keys.length; + for (int i = 0; i < keys.length; i++) { + for (int j = i + 1; j < keys.length; j++) { + long bucketOrd = bucketOrd(owningBucketOrds.get(owningBucketOrdIdx), pos); + long docCount = bucketDocCount(bucketOrd); + // Empty buckets are not returned due to potential for very sparse matrices + if (docCount > 0) { + String intersectKey = keys[i] + separator + keys[j]; + InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket( + intersectKey, + docCount, + bucketSubAggs.apply(builtBucketIndex[0]++) + ); + buckets.add(bucket); + } + pos++; + } + } + return new InternalAdjacencyMatrix(name, buckets, metadata()); + }); + assert builtBucketIndex[0] == totalBucketsToBuild; + return aggregations; } - assert builtBucketIndex == totalBucketsToBuild; - return results; } @Override diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java index 6f36f1f17bf8b..824f009bc7d8e 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java @@ -81,14 +81,12 @@ public InternalAggregations getAggregations() { return aggregations; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(CommonFields.KEY.getPreferredName(), key); builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -237,7 +235,7 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (InternalBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } builder.endArray(); return builder; diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java index d4e1c2928c441..6add1b0ac4a13 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java @@ -141,7 +141,7 @@ public final LeafBucketCollector getLeafCollector(AggregationExecutionContext ag protected final InternalAggregation[] buildAggregations( LongKeyedBucketOrds bucketOrds, LongToIntFunction roundingIndexFor, - long[] owningBucketOrds + LongArray owningBucketOrds ) throws IOException { return buildAggregationsForVariableBuckets( owningBucketOrds, @@ -324,7 +324,7 @@ private void increaseRoundingIfNeeded(long rounded) { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregations(bucketOrds, l -> roundingIdx, owningBucketOrds); } @@ -594,7 +594,7 @@ private void rebucket() { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { /* * Rebucket before building the aggregation to build as small as result * as possible. diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java index 42aa79f990fc6..edb7ec4cffce7 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -99,8 +99,7 @@ public Object getKey() { return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, DocValueFormat format) throws IOException { String keyAsString = format.format(key).toString(); builder.startObject(); if (format != DocValueFormat.RAW) { @@ -110,7 +109,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -597,7 +595,7 @@ private BucketReduceResult mergeConsecutiveBuckets( public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, format); } builder.endArray(); builder.field("interval", getInterval().toString()); diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java index c4cdacd135cb4..d7590f2126325 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java @@ -36,24 +36,21 @@ public class InternalTimeSeries extends InternalMultiBucketAggregation buckets = new ArrayList<>(size); for (int i = 0; i < size; i++) { - buckets.add(new InternalTimeSeries.InternalBucket(in, keyed)); + buckets.add(new InternalTimeSeries.InternalBucket(in)); } this.buckets = buckets; this.bucketMap = null; @@ -162,7 +156,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (InternalBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); @@ -252,14 +246,14 @@ public InternalTimeSeries create(List buckets) { @Override public InternalBucket createBucket(InternalAggregations aggregations, InternalBucket prototype) { - return new InternalBucket(prototype.key, prototype.docCount, aggregations, prototype.keyed); + return new InternalBucket(prototype.key, prototype.docCount, aggregations); } private InternalBucket reduceBucket(List buckets, AggregationReduceContext context) { InternalTimeSeries.InternalBucket reduced = null; for (InternalTimeSeries.InternalBucket bucket : buckets) { if (reduced == null) { - reduced = new InternalTimeSeries.InternalBucket(bucket.key, bucket.docCount, bucket.aggregations, bucket.keyed); + reduced = new InternalTimeSeries.InternalBucket(bucket.key, bucket.docCount, bucket.aggregations); } else { reduced.docCount += bucket.docCount; } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java index c74637330dd7a..63472bca1d9ac 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java @@ -11,6 +11,8 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.mapper.RoutingPathFields; @@ -30,6 +32,7 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Map; @@ -67,42 +70,39 @@ public TimeSeriesAggregator( } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { BytesRef spare = new BytesRef(); - InternalTimeSeries.InternalBucket[][] allBucketsPerOrd = new InternalTimeSeries.InternalBucket[owningBucketOrds.length][]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - List buckets = new ArrayList<>(); - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - ordsEnum.readValue(spare); - InternalTimeSeries.InternalBucket bucket = new InternalTimeSeries.InternalBucket( - BytesRef.deepCopyOf(spare), // Closing bucketOrds will corrupt the bytes ref, so need to make a deep copy here. - docCount, - null, - keyed - ); - bucket.bucketOrd = ordsEnum.ord(); - buckets.add(bucket); - if (buckets.size() >= size) { - break; + try (ObjectArray allBucketsPerOrd = bigArrays().newObjectArray(owningBucketOrds.size())) { + for (long ordIdx = 0; ordIdx < allBucketsPerOrd.size(); ordIdx++) { + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); + List buckets = new ArrayList<>(); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + ordsEnum.readValue(spare); + checkRealMemoryCBForInternalBucket(); + InternalTimeSeries.InternalBucket bucket = new InternalTimeSeries.InternalBucket( + BytesRef.deepCopyOf(spare), // Closing bucketOrds will corrupt the bytes ref, so need to make a deep copy here. + docCount, + null + ); + bucket.bucketOrd = ordsEnum.ord(); + buckets.add(bucket); + if (buckets.size() >= size) { + break; + } } + // NOTE: after introducing _tsid hashing time series are sorted by (_tsid hash, @timestamp) instead of (_tsid, timestamp). + // _tsid hash and _tsid might sort differently, and out of order data might result in incorrect buckets due to _tsid value + // changes not matching _tsid hash changes. Changes in _tsid hash are handled creating a new bucket as a result of making + // the assumption that sorting data results in new buckets whenever there is a change in _tsid hash. This is no true anymore + // because we collect data sorted on (_tsid hash, timestamp) but build aggregation results sorted by (_tsid, timestamp). + buckets.sort(Comparator.comparing(bucket -> bucket.key)); + allBucketsPerOrd.set(ordIdx, buckets.toArray(new InternalTimeSeries.InternalBucket[0])); } - // NOTE: after introducing _tsid hashing time series are sorted by (_tsid hash, @timestamp) instead of (_tsid, timestamp). - // _tsid hash and _tsid might sort differently, and out of order data might result in incorrect buckets due to _tsid value - // changes not matching _tsid hash changes. Changes in _tsid hash are handled creating a new bucket as a result of making - // the assumption that sorting data results in new buckets whenever there is a change in _tsid hash. This is no true anymore - // because we collect data sorted on (_tsid hash, timestamp) but build aggregation results sorted by (_tsid, timestamp). - buckets.sort(Comparator.comparing(bucket -> bucket.key)); - allBucketsPerOrd[ordIdx] = buckets.toArray(new InternalTimeSeries.InternalBucket[0]); - } - buildSubAggsForAllBuckets(allBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); + buildSubAggsForAllBuckets(allBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - result[ordIdx] = buildResult(allBucketsPerOrd[ordIdx]); + return buildAggregations(Math.toIntExact(allBucketsPerOrd.size()), ordIdx -> buildResult(allBucketsPerOrd.get(ordIdx))); } - return result; } @Override @@ -185,7 +185,7 @@ public void collect(int doc, long bucket) throws IOException { } InternalTimeSeries buildResult(InternalTimeSeries.InternalBucket[] topBuckets) { - return new InternalTimeSeries(name, List.of(topBuckets), keyed, metadata()); + return new InternalTimeSeries(name, Arrays.asList(topBuckets), keyed, metadata()); } @FunctionalInterface diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java index e61c02e0b9cd2..3b67d09c0d6a1 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java @@ -49,7 +49,7 @@ private List randomBuckets(boolean keyed, InternalAggregations a } try { var key = TimeSeriesIdFieldMapper.buildLegacyTsid(routingPathFields).toBytesRef(); - bucketList.add(new InternalBucket(key, docCount, aggregations, keyed)); + bucketList.add(new InternalBucket(key, docCount, aggregations)); } catch (IOException e) { throw new UncheckedIOException(e); } @@ -108,10 +108,10 @@ public void testReduceSimple() { InternalTimeSeries first = new InternalTimeSeries( "ts", List.of( - new InternalBucket(new BytesRef("1"), 3, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("10"), 6, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("2"), 2, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("9"), 5, InternalAggregations.EMPTY, false) + new InternalBucket(new BytesRef("1"), 3, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("10"), 6, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("2"), 2, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("9"), 5, InternalAggregations.EMPTY) ), false, Map.of() @@ -119,8 +119,8 @@ public void testReduceSimple() { InternalTimeSeries second = new InternalTimeSeries( "ts", List.of( - new InternalBucket(new BytesRef("2"), 1, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("3"), 3, InternalAggregations.EMPTY, false) + new InternalBucket(new BytesRef("2"), 1, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("3"), 3, InternalAggregations.EMPTY) ), false, Map.of() @@ -128,9 +128,9 @@ public void testReduceSimple() { InternalTimeSeries third = new InternalTimeSeries( "ts", List.of( - new InternalBucket(new BytesRef("1"), 2, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("3"), 4, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("9"), 4, InternalAggregations.EMPTY, false) + new InternalBucket(new BytesRef("1"), 2, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("3"), 4, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("9"), 4, InternalAggregations.EMPTY) ), false, Map.of() diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregatorTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregatorTests.java index d9a4023457126..493b4bdc81860 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregatorTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregatorTests.java @@ -176,19 +176,19 @@ public void testMultiBucketAggregationAsSubAggregation() throws IOException { InternalDateHistogram byTimeStampBucket = ts.getBucketByKey("{dim1=aaa, dim2=xxx}").getAggregations().get("by_timestamp"); assertThat( byTimeStampBucket.getBuckets(), - contains(new InternalDateHistogram.Bucket(startTime, 2, false, null, InternalAggregations.EMPTY)) + contains(new InternalDateHistogram.Bucket(startTime, 2, null, InternalAggregations.EMPTY)) ); assertThat(ts.getBucketByKey("{dim1=aaa, dim2=yyy}").docCount, equalTo(2L)); byTimeStampBucket = ts.getBucketByKey("{dim1=aaa, dim2=yyy}").getAggregations().get("by_timestamp"); assertThat( byTimeStampBucket.getBuckets(), - contains(new InternalDateHistogram.Bucket(startTime, 2, false, null, InternalAggregations.EMPTY)) + contains(new InternalDateHistogram.Bucket(startTime, 2, null, InternalAggregations.EMPTY)) ); assertThat(ts.getBucketByKey("{dim1=bbb, dim2=zzz}").docCount, equalTo(4L)); byTimeStampBucket = ts.getBucketByKey("{dim1=bbb, dim2=zzz}").getAggregations().get("by_timestamp"); assertThat( byTimeStampBucket.getBuckets(), - contains(new InternalDateHistogram.Bucket(startTime, 4, false, null, InternalAggregations.EMPTY)) + contains(new InternalDateHistogram.Bucket(startTime, 4, null, InternalAggregations.EMPTY)) ); }; diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/random_sampler.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/random_sampler.yml index 5b2c2dc379cb9..4d8efe2a6f9d8 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/random_sampler.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/random_sampler.yml @@ -142,6 +142,66 @@ setup: } - match: { aggregations.sampled.mean.value: 1.0 } --- +"Test random_sampler aggregation with scored subagg": + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ random_sampler_with_scored_subaggs ] + test_runner_features: capabilities + reason: "Support for random sampler with scored subaggs capability required" + - do: + search: + index: data + size: 0 + body: > + { + "query": { + "function_score": { + "random_score": {} + } + }, + "aggs": { + "sampled": { + "random_sampler": { + "probability": 0.5 + }, + "aggs": { + "top": { + "top_hits": {} + } + } + } + } + } + - is_true: aggregations.sampled.top.hits + - do: + search: + index: data + size: 0 + body: > + { + "query": { + "function_score": { + "random_score": {} + } + }, + "aggs": { + "sampled": { + "random_sampler": { + "probability": 1.0 + }, + "aggs": { + "top": { + "top_hits": {} + } + } + } + } + } + - match: { aggregations.sampled.top.hits.total.value: 6 } + - is_true: aggregations.sampled.top.hits.hits.0._score +--- "Test random_sampler aggregation with poor settings": - requires: cluster_features: ["gte_v8.2.0"] diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java index b2b7f86ce34e6..e091f0175009e 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java @@ -28,6 +28,8 @@ */ public class HyphenationCompoundWordTokenFilterFactory extends AbstractCompoundWordTokenFilterFactory { + private final boolean noSubMatches; + private final boolean noOverlappingMatches; private final HyphenationTree hyphenationTree; HyphenationCompoundWordTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { @@ -46,6 +48,9 @@ public class HyphenationCompoundWordTokenFilterFactory extends AbstractCompoundW } catch (Exception e) { throw new IllegalArgumentException("Exception while reading hyphenation_patterns_path.", e); } + + noSubMatches = settings.getAsBoolean("no_sub_matches", false); + noOverlappingMatches = settings.getAsBoolean("no_overlapping_matches", false); } @Override @@ -57,7 +62,9 @@ public TokenStream create(TokenStream tokenStream) { minWordSize, minSubwordSize, maxSubwordSize, - onlyLongestMatch + onlyLongestMatch, + noSubMatches, + noOverlappingMatches ); } } diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java index ad98c2f8ffe1e..69dd8e91b52b2 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java @@ -31,6 +31,9 @@ import org.hamcrest.MatcherAssert; import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -42,6 +45,7 @@ import static org.hamcrest.Matchers.instanceOf; public class CompoundAnalysisTests extends ESTestCase { + public void testDefaultsCompoundAnalysis() throws Exception { Settings settings = getJsonSettings(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); @@ -63,6 +67,44 @@ public void testDictionaryDecompounder() throws Exception { assertWarnings("Setting [version] on analysis component [custom7] has no effect and is deprecated"); } + public void testHyphenationDecompoundingAnalyzerOnlyLongestMatch() throws Exception { + Settings[] settingsArr = new Settings[] { getJsonSettings(), getYamlSettings() }; + for (Settings settings : settingsArr) { + List terms = analyze(settings, "hyphenationDecompoundingAnalyzerOnlyLongestMatch", "kaffeemaschine fussballpumpe"); + MatcherAssert.assertThat( + terms, + hasItems("kaffeemaschine", "kaffee", "fee", "maschine", "fussballpumpe", "fussball", "ballpumpe", "pumpe") + ); + } + assertWarnings("Setting [version] on analysis component [custom7] has no effect and is deprecated"); + } + + /** + * For example given a word list of: ["kaffee", "fee", "maschine"] + * no_sub_matches should prevent the token "fee" as a token in "kaffeemaschine". + */ + public void testHyphenationDecompoundingAnalyzerNoSubMatches() throws Exception { + Settings[] settingsArr = new Settings[] { getJsonSettings(), getYamlSettings() }; + for (Settings settings : settingsArr) { + List terms = analyze(settings, "hyphenationDecompoundingAnalyzerNoSubMatches", "kaffeemaschine fussballpumpe"); + MatcherAssert.assertThat(terms, hasItems("kaffeemaschine", "kaffee", "maschine", "fussballpumpe", "fussball", "ballpumpe")); + } + assertWarnings("Setting [version] on analysis component [custom7] has no effect and is deprecated"); + } + + /** + * For example given a word list of: ["fuss", "fussball", "ballpumpe", "ball", "pumpe"] + * no_overlapping_matches should prevent the token "ballpumpe" as a token in "fussballpumpe. + */ + public void testHyphenationDecompoundingAnalyzerNoOverlappingMatches() throws Exception { + Settings[] settingsArr = new Settings[] { getJsonSettings(), getYamlSettings() }; + for (Settings settings : settingsArr) { + List terms = analyze(settings, "hyphenationDecompoundingAnalyzerNoOverlappingMatches", "kaffeemaschine fussballpumpe"); + MatcherAssert.assertThat(terms, hasItems("kaffeemaschine", "kaffee", "maschine", "fussballpumpe", "fussball", "pumpe")); + } + assertWarnings("Setting [version] on analysis component [custom7] has no effect and is deprecated"); + } + private List analyze(Settings settings, String analyzerName, String text) throws IOException { IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); AnalysisModule analysisModule = createAnalysisModule(settings); @@ -92,20 +134,25 @@ public Map> getTokenFilters() { } private Settings getJsonSettings() throws IOException { - String json = "/org/elasticsearch/analysis/common/test1.json"; - return Settings.builder() - .loadFromStream(json, getClass().getResourceAsStream(json), false) - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); + return getSettings("/org/elasticsearch/analysis/common/test1.json"); } private Settings getYamlSettings() throws IOException { - String yaml = "/org/elasticsearch/analysis/common/test1.yml"; + return getSettings("/org/elasticsearch/analysis/common/test1.yml"); + } + + private Settings getSettings(String filePath) throws IOException { + String hypenationRulesFileName = "de_DR.xml"; + InputStream hypenationRules = getClass().getResourceAsStream(hypenationRulesFileName); + Path home = createTempDir(); + Path config = home.resolve("config"); + Files.createDirectory(config); + Files.copy(hypenationRules, config.resolve(hypenationRulesFileName)); + return Settings.builder() - .loadFromStream(yaml, getClass().getResourceAsStream(yaml), false) + .loadFromStream(filePath, getClass().getResourceAsStream(filePath), false) .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), home.toString()) .build(); } } diff --git a/modules/analysis-common/src/test/resources/org/elasticsearch/analysis/common/de_DR.xml b/modules/analysis-common/src/test/resources/org/elasticsearch/analysis/common/de_DR.xml new file mode 100644 index 0000000000000..37bcde1246a81 --- /dev/null +++ b/modules/analysis-common/src/test/resources/org/elasticsearch/analysis/common/de_DR.xml @@ -0,0 +1,1130 @@ + + + + + + + + + + + + aA + bB + cC + dD + eE + fF + gG + hH + iI + jJ + kK + lL + mM + nN + oO + pP + qQ + rR + sS + tT + uU + vV + wW + xX + yY + zZ + �� + �� + �� + �� + � + + + + .aa6l .ab3a4s .ab3ei .abi2 .ab3it .ab1l .ab1r .ab3u .ad3o4r .alti6 + .ana3c .an5alg .an1e + .ang8s2t1 + .an1s .ap1p .ar6sc .ar6ta .ar6tei .as2z + .au2f1 .au2s3 .be5erb .be3na .ber6t5r .bie6r5 .bim6s5t .brot3 .bru6s + .ch6 .che6f5 .da8c .da2r .dar5in .dar5u .den6ka .de5r6en .des6pe + .de8spo .de3sz .dia3s4 .dien4 .dy2s1 .ehren5 .eine6 .ei6n5eh .ei8nen + .ein5sa .en6der .en6d5r .en3k4 .en8ta8 .en8tei .en4t3r .epo1 .er6ban + .er6b5ei .er6bla .er6d5um .er3ei .er5er .er3in .er3o4b .erwi5s .es1p + .es8t1l .es8t1n + .ex1a2 .ex3em .fal6sc .fe6st5a .flu4g3 .furch8 .ga6ner .ge3n4a + .ge5r� + .ges6 + .halb5 .halbe6 .hal6br .haup4 .hau4t .heima6 .he4r3e + .her6za .he5x .hin3 .hir8sc .ho4c .hu3sa .hy5o .ibe5 .ima6ge .in1 + .ini6 .is5chi .jagd5 .kal6k5o .ka6ph .ki4e .kop6f3 .kraf6 .k�5ra + .lab6br .liie6 .lo6s5k .l�4s3t .ma5d .mi2t1 .no6th .no6top + .obe8ri .ob1l .obs2 .ob6st5e .or3c .ort6s5e .ost3a .oste8r .pe4re + .pe3ts .ph6 .po8str .rau4m3 .re5an .ro8q .ru5the .r�5be + + .sch8 .se6e .se5n6h .se5ra .si2e .spi6ke .st4 .sy2n + .tages5 .tan6kl .ta8th .te6e .te8str .to6der .to8nin .to6we .um1 + .umpf4 .un1 .une6 .unge5n .ur1c .ur5en .ve6rin .vora8 .wah6l5 .we8ges + .we8s2t .wes3te + .wo6r .wor3a .wun4s .zi4e .zuch8 .�nde8re .�ch8 aa1c aa2gr + aal5e aa6r5a a5arti aa2s1t aat2s 6aba ab3art 1abdr 6abel aben6dr + ab5erk ab5err ab5esse 1abf 1abg 1abh� ab1ir 1abko a1bl ab1la + 5ablag a6bla� ab4ler ab1lu a8bl� 5a6bl� abma5c + 1abn ab1ra ab1re 5a6brec ab1ro + ab1s + ab8sk abs2z 3abtei ab1ur 1abw + 5abze 5abzu ab1�n ab�u8 a4ce. a5chal ach5art ach5au a1che + a8chent ach6er. a6ch5erf a1chi ach1l ach3m ach5n a1cho ach3re a1chu + ach1w a1chy ach5�f ack1o acks6t ack5sta a1d 8ad. a6d5ac ad3ant + ad8ar 5addi a8dein ade5o8 adi5en 1adj 1adle ad1op a2dre 3adres adt1 + 1adv a6d� a1e2d ae1r a1er. 1aero 8afa a3fal af1an a5far a5fat + af1au a6fentl a2f1ex af1fr af5rau af1re 1afri af6tent af6tra aft5re + a6f5um 8af� ag5abe 5a4gent ag8er ages5e 1aggr ag5las ag1lo a1gn + ag2ne 1agog a6g5und a1ha a1he ah5ein a4h3erh a1hi ahl1a ah1le ah4m3ar + ahn1a a5ho ahra6 ahr5ab ah1re ah8rei ahren8s ahre4s3 ahr8ti ah1ru a1hu + ah8� ai3d2s ai1e aif6 a3inse ai4re. a5isch. ais8e a3ismu ais6n + aiso6 a1j 1akad a4kade a1ke a1ki 1akko 5akro1 a5lal al5ans 3al8arm + al8beb al8berw alb5la 3album al1c a1le a6l5e6be a4l3ein a8lel a8lerb + a8lerh a6lert 5a6l5eth 1algi al4gli al3int al4lab al8lan al4l3ar + alle3g a1lo a4l5ob al6schm al4the + + al4t3re 8a1lu alu5i a6lur + alu3ta a1l� a6mate 8ame. 5a6meise am6m5ei am6mum am2n ampf3a + am6schw am2ta a1mu a1m� a3nac a1nad anadi5e an3ako an3alp 3analy + an3ame an3ara a1nas an5asti a1nat anat5s an8dent ande4s3 an1ec an5eis + an1e2k 4aner. a6n5erd a8nerf a6n5erke 1anfa 5anfert 1anf� 3angab + 5angebo an3gli ang6lis an2gn 3angri ang5t6 5anh� ani5g ani4ka + an5i8on an1kl an6kno an4kro 1anl anma5c anmar4 3annah anne4s3 a1no + 5a6n1o2d 5a6n3oma 5a6nord 1anr an1sa 5anschl an4soz an1st 5anstal + an1s2z 5antenn an1th 5anw� a5ny an4z3ed 5anzeig 5anzieh 3anzug + an1� 5an�s a1n� an�8d a1os a1pa 3apfel a2ph1t + aph5�6 a1pi 8apl apo1c apo1s + a6pos2t + a6poth 1appa ap1pr a1pr + a5p� a3p� a1ra a4r3af ar3all 3arbei 2arbt ar1c 2a1re ar3ein + ar2gl 2a1ri ari5es ar8kers ar6les ar4nan ar5o6ch ar1o2d a1rol ar3ony + a8ror a3ros ar5ox ar6schl 8artei ar6t5ri a1ru a1ry 1arzt arz1w + ar8z� ar�8m ar�6 ar5�m ar1�2 a1sa a6schec + asch5l asch3m a6schn a3s4hi as1pa asp5l + + as5tev 1asth + + a1str ast3re 8a1ta ata5c ata3la a6tapf ata5pl a1te a6teli aten5a + ate5ran 6atf 6atg a1th at3hal 1athl 2a1ti 5atlant 3atlas 8atmus 6atn + a1to a6t5ops ato6ra a6t5ort. 4a1tr a6t5ru at2t1h at5t6h� 6a1tu + atz1w a1t� a1t� au1a au6bre auch3a au1e aue4l 5aufent + 3auff� 3aufga 1aufn auf1t 3auftr 1aufw 3auge. au4kle aule8s 6aum + au8mar aum5p 1ausb 3ausd 1ausf 1ausg au8sin + + au4sta 1ausw 1ausz + aut5eng au1th 1auto au�e8 a1v ave5r6a aver6i a1w a6wes a1x + a2xia a6xio a1ya a1z azi5er. 8a� 1ba 8ba8del ba1la ba1na + ban6k5r ba5ot bardi6n ba1ro basten6 bau3sp 2b1b bb6le b2bli 2b1c 2b1d + 1be be1a be8at. be1ch 8becht 8becke. be5el be1en bee8rei be5eta bef2 + 8beff be1g2 beh�8 bei1s 6b5eisen bei3tr b8el bel8o belu3t be3nac + bend6o be6ners be6nerw be4nor ben4se6 bens5el be1n� be1n� + be1o2 b8er. be1ra be8rac ber8gab. ber1r be1r� bes8c bes5erh + bes2p be5tha bet5sc be1un be1ur 8bex be6zwec 2b1f8 + + 2b1g2 + bga2s5 bge1 2b1h bhole6 1bi bi1bl b6ie bi1el bi1la bil�5 bi1na + bi4nok + + bi6stu bi5tr bit4t5r b1j 2b1k2 bk�6 bl8 b6la. + 6b1lad 6blag 8blam 1blat b8latt 3blau. b6lav 3ble. b1leb b1led + 8b1leg 8b1leh 8bleid 8bleih 6b3lein + + ble4m3o 4blich b4lind + 8bling b2lio 5blit b4litz b1loh 8b1los 1blu 5blum 2blun blut3a blut5sc + 3bl� bl�s5c 5bl� 3bl� bl�8sc 2b1m 2b1n 1bo + bo1ch bo5d6s boe5 8boff 8bonk bo1ra b1ort 2b1p2 b1q 1br brail6 brast8 + bre4a b5red 8bref 8b5riem b6riga bro1s b1rup b2ruz 8br�h + br�s5c 8bs b1sa b8sang b2s1ar b1sc bs3erl bs3erz b8sof b1s2p + bst1h b3stru b5st� b6sun 2b1t b2t1h 1bu bu1ie bul6k b8ure bu6sin + 6b1v 2b1w 1by1 by6te. 8b1z + + 1b� b5�6s5 1b� + b6�5bere b�ge6 b�gel5e b�r6sc 1ca cag6 ca5la ca6re + ca5y c1c 1ce celi4c celich5 ce1ro c8h 2ch. 1chae ch1ah ch3akt cha6mer + 8chanz 5chara 3chari 5chato 6chb 1chef 6chei ch3eil ch3eis 6cherkl + 6chf 4chh 5chiad 5chias 6chins 8chj chl6 5chlor 6ch2m 2chn6 ch8nie + 5cho. 8chob choi8d 6chp ch3ren ch6res ch3r� 2chs 2cht cht5ha + cht3hi 5chthon ch6tin 6chuh chu4la 6ch3unt chut6t 8chw 1ci ci5tr c2k + 2ck. ck1ei 4ckh ck3l ck3n ck5o8f ck1r 2cks ck5stra ck6s5u c2l 1c8o + con6ne 8corb cos6t c3q 1c6r 8c1t 1cu 1cy 5c�1 c�5 1da. + 8daas 2dabg 8dabr 6dabt 6dabw 1dac da2gr 6d5alk 8d5amt dan6ce. + dani5er dan8ker 2danl danla6 6dans 8danzi 6danzu d1ap da2r1a8 2d1arb + d3arc dar6men 4d3art 8darz 1dat 8datm 2d1auf 2d1aus 2d1b 2d1c 2d1d + d5de d3d2h dd�mme8 1de 2deal de5an de3cha de1e defe6 6deff 2d1ehr + 5d4eic de5isc de8lar del6s5e del6spr de4mag de8mun de8nep dene6r + 8denge. 8dengen de5o6d 2deol de5ram 8derdb der5ein de1ro der1r d8ers + der5um de4s3am de4s3an de4sau de6sil de4sin de8sor de4spr de2su 8deul + de5us. 2d1f df2l 2d1g 2d1h 1di dia5c di5ara dice5 di3chr di5ena di1gn + di1la dil8s di1na 8dind 6dinf 4d3inh 2d1ins di5o6d di3p4t di8sen dis1p + di5s8per di6s5to + dis3tr + di8tan di8tin d1j 6dje 2dju 2d1k 2d1l 2d1m + 2d1n6 dni6 dnje6 1do 6d5obe do6berf 6d5ony do3ran 6dord 2d1org dor4t3h + + 6doth dott8e 2d1p d5q dr4 1drah 8drak d5rand 6dre. 4drech + d6reck 4d3reg 8d3reic d5reife 8drem 8d1ren 2drer 8dres. 6d5rh 1dria + d1ric 8drind droi6 dro5x 1dru 8drut dr�s5c 1dr� dr�5b + dr�8sc 2ds d1sa d6san dsat6 d1sc 5d6scha. 5dschik dse8e d8serg + 8dsl d1sp d4spak ds2po d8sp� d1st d1s� 2dt d1ta d1te d1ti + d1to dt1s6 d1tu d5t� 1du du5als du1b6 du1e duf4t3r 4d3uh du5ie + 8duml 8dumw 2d1und du8ni 6d5unt dur2c durch3 6durl 6dursa 8durt + dus1t + du8schr 2d1v 2d1w dwa8l 2d1z 1d� 6d�h 8d�nd d�6r + d�8bl d5�l d�r6fl d�8sc d5�4st + + 1d� ea4ben e1ac e1ah e1akt e1al. e5alf e1alg e5a8lin e1alk e1all + e5alp e1alt e5alw e1am e1and ea6nim e1ar. e5arf e1ark e5arm e3art + e5at. e6ate e6a5t6l e8ats e5att e6au. e1aus e1b e6b5am ebens5e + eb4lie eb4ser eb4s3in e1che e8cherz e1chi ech3m 8ech3n ech1r ech8send + ech4su e1chu eck5an e5cl e1d ee5a ee3e ee5g e1ei ee5isc eei4s3t + ee6lend e1ell ee5l� e1erd ee3r4e ee8reng eere6s5 ee5r� + ee6tat e1ex e1f e6fau e8fe8b 3effek ef3rom ege6ra eglo6si 1egy e1ha + e6h5ach eh5ans e6hap eh5auf e1he e1hi ehl3a eh1le ehl5ein eh1mu ehn5ec + e1ho ehr1a eh1re ehre6n eh1ri eh1ru ehr5um e1hu eh1w e1hy e1h� + e1h� e3h�t ei1a eia6s ei6bar eich3a eich5r ei4dar ei6d5ei + ei8derf ei3d4sc ei1e 8eifen 3eifri 1eign eil1d ei6mab ei8mag ein1a4 + ei8nat ei8nerh ei8ness ei6nete ein1g e8ini ein1k ei6n5od ei8nok ei4nor + e3ins� ei1o e1irr ei5ru ei8sab ei5schn ei6s5ent ei8sol ei4t3al + eit3ar eit1h ei6thi ei8tho eit8samt ei6t5um e1j 1ekd e1ke e1ki e1k2l + e1kn ekni4 e1la e2l1al 6elan e6lanf e8lanl e6l5ans el3arb el3arm + e6l3art 5e6lasti e6lauge elbst5a e1le 6elef ele6h e6l5ehe e8leif + e6l5einh 1elek e8lel 3eleme e6lemen e6lente el5epi e4l3err e6l5ersc + elf2l elg2 e6l5ins ell8er 4e1lo e4l3ofe el8soh el8tent 5eltern e1lu + elut2 e1l� e1l� em8dei em8meis 4emo emo5s 1emp1f 1empt 1emto + e1mu emurk4 emurks5 e1m� en5a6ben en5achs en5ack e1nad en5af + en5all en3alt en1am en3an. en3ant en3anz en1a6p en1ar en1a6s 6e1nat + en3auf en3aus en2ce enda6l end5erf end5erg en8dess 4ene. en5eck + e8neff e6n5ehr e6n5eim en3eis 6enem. 6enen e4nent 4ener. e8nerd + e6n3erf e4nerg 5energi e6n5erla en5ers e6nerst en5erw 6enes e6n5ess + e2nex en3glo 2eni enni6s5 ennos4 enns8 e1no e6nober eno8f en5opf + e4n3ord en8sers ens8kl en1sp ens6por en5t6ag enta5go en8terbu en6tid + 3entla ent5ric 5entwic 5entwu 1entz enu5i e3ny en8zan en1�f + e1n�s e1n�g eo1c e5o6fe e5okk e1on. e3onf e5onk e5onl e5onr + e5opf e5ops e5or. e1ord e1org eo5r6h eo1t e1pa e8pee e6p5e6g ep5ent + e1p2f e1pi 5epid e6pidem e1pl 5epos e6pos. ep4p3a e1pr e1p� e1q + e1ra. er5aal 8eraba e5rabel er5a6ben e5rabi er3abs er3ach era5e + era5k6l er3all er3amt e3rand e3rane er3ans e5ranz. e1rap er3arc + e3rari er3a6si e1rat erat3s er3auf e3raum 3erbse er1c e1re 4e5re. + er3eck er5egg er5e2h 2erei e3rei. e8reine er5einr 6eren. e4r3enm + 4erer. e6r5erm er5ero er5erst e4r3erz er3ess 5erf�l er8gan. + 5ergebn er2g5h 5erg�nz 5erh�hu 2e1ri eri5ak e6r5iat e4r3ind + e6r5i6n5i6 er5ins e6r5int er5itio er1kl 3erkl� 5erl�s. + ermen6s er6nab 3ernst 6e1ro. e1rod er1o2f e1rog 6e3roi ero8ide e3rol + e1rom e1ron e3rop8 e2r1or e1ros e1rot er5ox ersch4 5erstat er6t5ein + er2t1h er5t6her 2e1ru eruf4s3 e4r3uhr er3ums e5rus 5erwerb e1ry er5zwa + er3zwu er�8m er5�s er�8 e3r�s. e6r1�2b e1sa + esa8b e8sap e6s5a6v e1sc esch4l ese1a es5ebe eserve5 e8sh es5ill + es3int es4kop e2sl eso8b e1sp espei6s5 es2po es2pu 5essenz e6stabs + e6staf e6st5ak est3ar e8stob e1str est5res es3ur e2sz e1s� e1ta + et8ag etari5e eta8ta e1te eten6te et5hal e5thel e1ti 1etn e1to e1tr + et3rec e8tscha et8se et6tei et2th et2t1r e1tu etu1s et8zent et8zw + e1t� e1t� e1t� eu1a2 eu1e eue8rei eu5fe euin5 euk2 + e1um. eu6nio e5unter eu1o6 eu5p 3europ eu1sp eu5str eu8zo e1v eval6s + eve5r6en ever4i e1w e2wig ex1or 1exp 1extr ey3er. e1z e1�2 + e5�8 e1� e8�es fa6ch5i fade8 fa6del fa5el. + fal6lo falt8e fa1na fan4gr 6fanl 6fap far6ba far4bl far6r5a 2f1art + fa1sc fau8str fa3y 2f1b2 6f1c 2f1d 1fe 2f1eck fe6dr feh6lei f6eim + 8feins f5eis fel5en 8feltern 8femp fe5rant 4ferd. ferri8 fe8stof + fe6str fe6stum fe8tag fet6ta fex1 2ff f1fa f6f5arm f5fe ffe5in ffe6la + ffe8ler ff1f f1fla ff3lei ff4lie ff8sa ff6s5ta 2f1g2 fgewen6 4f1h 1fi + fid4 fi3ds fieb4 fi1la fi8lei fil4m5a f8in. fi1na 8finf fi8scho fi6u + 6f1j 2f1k2 f8lanz fl8e 4f3lein 8flib 4fling f2lix 6f3lon 5flop 1flor + 5f8l�c 3fl�t 2f1m 2f1n 1fo foh1 f2on fo6na 2f1op fo5ra + for8mei for8str for8th for6t5r fo5ru 6f5otte 2f1p8 f1q fr6 f5ram + 1f8ran f8ra� f8re. frei1 5frei. f3reic f3rest f1rib + 8f1ric 6frig 1fris fro8na fr�s5t 2fs f1sc f2s1er f5str + fs3t�t 2ft f1tak f1te ft5e6h ftere6 ft1h f1ti f5to f1tr ft5rad + ft1sc ft2so f1tu ftwi3d4 ft1z 1fu 6f5ums 6funf fun4ka fu8�end + 6f1v 2f1w 2f1z 1f� f�1c 8f�rm 6f�ug + f�8� f�de3 8f�f 3f�r 1f� + f�n4f3u 1ga ga6bl 6gabw 8gabz g3a4der ga8ho ga5isc 4gak ga1la + 6g5amt ga1na gan5erb gan6g5a ga5nj 6ganl 8gansc 6garb 2g1arc 2g1arm + ga5ro 6g3arti ga8sa ga8sc ga6stre 2g1atm 6g5auf gau5fr g5aus 2g1b g5c + 6gd g1da 1ge ge1a2 ge6an ge8at. ge1e2 ge6es gef2 8geff ge1g2l ge1im + 4g3eise geist5r gel8bra gelt8s ge5l� ge8nin gen3k 6g5entf + ge3n� ge1or ge1ra ge6rab ger8au 8gerh� ger8ins ge1ro 6g5erz. + ge1r� ge1r� ge1s ges2p + ge2s7te. ge2s7ten ge2s7ter ge2s7tik + ge5unt 4g3ex3 2g1f8 2g1g g1ha 6g1hei + 5ghel. g5henn 6g1hi g1ho 1ghr g1h� 1gi gi5la gi8me. gi1na + 4g3ins + gis1tr + g1j 2g1k 8gl. 1glad g5lag glan4z3 1glas 6glass 5glaub + g3lauf 1gle. g5leb 3gleic g3lein 5gleis 1glem 2gler 8g3leu gli8a + g2lie 3glied 1g2lik 1g2lim g6lio 1gloa 5glom 1glon 1glop g1los g4loss + g5luf 1g2ly 1gl� 2g1m gn8 6gn. 1gna 8gnach 2gnah g1nas g8neu + g2nie g3nis 1gno 8gnot 1go goe1 8gof 2gog 5gogr 6g5oh goni5e 6gonist + go1ra 8gord 2g1p2 g1q 1gr4 g5rahm gra8m gra4s3t 6g1rec gre6ge 4g3reic + g5reit 8grenn gri4e g5riem 5grif 2grig g5ring 6groh 2grot gro6� + 4grut 2gs gs1ab g5sah gs1ak gs1an gs8and gs1ar gs1au g1sc + gs1ef g5seil gs5ein g2s1er gs1in g2s1o gso2r gs1pr g2s1u 2g1t g3te + g2t1h 1gu gu5as gu2e 2gue. 6gued 4g3uh 8gums 6g5unt + + gut3h gu2tu + 4g1v 2g1w gy1n g1z 1g� 8g�8m 6g�rm 1g� 1g� + 6g�b 1haa hab8r ha8del hade4n 8hae ha5el. haf6tr 2hal. ha1la + hal4b5a 6hale 8han. ha1na han6dr han6ge. 2hani h5anth 6hanz 6harb + h3arbe h3arme ha5ro ha2t1h h1atm hau6san ha8� h1b2 h1c h1d + he2bl he3cho h3echt he5d6s 5heft h5e6he. hei8ds h1eif 2hein he3ism + he5ist. heit8s3 hek6ta hel8lau 8helt he6mer 1hemm 6h1emp hen5end + hen5klo hen6tri he2nu 8heo he8q her3ab he5rak her3an 4herap her3au + h3erbi he1ro he8ro8b he4r3um her6z5er he4spe he1st heta6 het5am he5th + heu3sc he1xa hey5e h1f2 h1g hgol8 h1h h1iat hie6r5i hi5kt hil1a2 + hil4fr hi5nak hin4ta hi2nu hi5ob hirn5e hir6ner hi1sp hi1th hi5tr + 5hitz h1j h6jo h1k2 hlabb4 hla4ga hla6gr h5lai hl8am h1las h1la� + hl1c h1led h3lein h5ler. h2lif h2lim h8linf hl5int h2lip + h2lit h4lor h3lose h1l�s hme5e h2nee h2nei hn3eig h2nel hne8n + hne4p3f hn8erz h6netz h2nip h2nit h1nol hn5sp h2nuc h2nud h2nul hoch1 + 1hoh hoh8lei 2hoi ho4l3ar 1holz h2on ho1ra 6horg 5horn. ho3sl hos1p + ho4spi h1p hpi6 h1q 6hr h1rai h8rank h5raum hr1c hrcre8 h1red h3reg + h8rei. h4r3erb h8rert hrg2 h1ric hr5ins h2rom hr6t5erl hr2t1h hr6t5ra + hr8tri h6rum hr1z hs3ach h6s5amt h1sc h6s5ec h6s5erl hs8erle h4sob + h1sp h8spa� h8spel hs6po h4spun h1str h4s3tum hs3und + h1s� h5ta. h5tab ht3ac ht1ak ht3ang h5tanz ht1ar ht1at h5taub + h1te h2t1ec ht3eff ht3ehe h4t3eif h8teim h4t3ein ht3eis h6temp h8tentf + hte8ren h6terf� h8tergr h4t3erh h6t5ersc h8terst h8tese h8tess + h2t1eu h4t3ex ht1he ht5hu h1ti ht5rak hts3ah ht1sc ht6sex ht8sk ht8so + h1tu htz8 h5t�m hub5l hu6b5r huh1l h5uhr. huld5a6 hu8lent + hu8l� h5up. h1v h5weib h3weis h1z h�8kl h�l8s + h�ma8tu8 h�8sche. h�t1s h�u4s3c 2h�. + 2h�e 8h�i h�6s h�s5c h�hne6 h�l4s3t + h�tte8re i5adn i1af i5ak. i1al. i1al1a i1alb i1ald i5alei i1alf + i1alg i3alh i1alk i1all i1alp i1alr i1als i1alt i1alv i5alw i3alz + i1an. ia5na i3and ian8e ia8ne8b i1ang i3ank i5ann i1ant i1anz i6apo + i1ar. ia6rab i5arr i1as. i1asm i1ass i5ast. i1at. i5ats i1au i5azz + i6b5eig i6b5eis ib2le i4blis i6brig i6b5unt i6b�b i1che ich5ei + i6cherb i1chi ich5ins ich1l ich3m ich1n i1cho icht5an icht3r i1chu + ich1w ick6s5te ic5l i1d id3arm 3ideal ide8na 3ideol ide5r� i6diot + id5rec id1t ie1a ie6b5ar iebe4s3 ie2bl ieb1r ie8bra ie4bre ie8b� + ie2dr ie1e8 ie6f5ad ief5f ie2f1l ie4fro ief1t i1ei ie4l3ec ie8lei + ie4lek i3ell i1en. i1end ien6e i3enf i5enn ien6ne. i1enp i1enr + i5ensa ien8stal i5env i1enz ie5o ier3a4b ie4rap i2ere ie4rec ie6r5ein + ie6r5eis ier8er i3ern. ie8rum ie8rund ie6s5che ie6tau ie8tert ie5the + ie6t5ri i1ett ie5un iex5 2if i1fa if5ang i6fau if1fr if5lac i5f6lie + i1fre ift5a if6t5r ig3art 2ige i8gess ig5he i5gla ig2ni i5go ig3rot + ig3s2p i1ha i8ham i8hans i1he i1hi ih1n ih1r i1hu i8hum ih1w 8i1i ii2s + ii2t i1j i1k i6kak i8kerz i6kes ik4ler i6k5unt 2il i5lac i1lag il3ans + i5las i1lau il6auf i1le ile8h i8lel il2fl il3ipp il6l5enn i1lo ilt8e + i1lu i1l� i8mart imb2 i8mele i8mid imme6l5a i1mu i1m� + i5m� ina5he i1nat in1au inau8s 8ind. in4d3an 5index ind2r 3indus + i5nec i2n1ei i8nerw 3infek 1info 5ingeni ing5s6o 5inhab ini5er. 5inj + in8k�t in8nan i1no inoi8d in3o4ku in5sau in1sp 5inspe 5instit + 5instru ins4ze 5intere 5interv in3the in5t2r i5ny in�2 i1n�r + in1�s in�8 in5�d i1n�s 2io io1a8 io1c iode4 io2di + ioi8 i1ol. i1om. i1on. i5onb ion2s1 i1ont i5ops i5o8pt i1or. + i3oral io3rat i5orc i1os. i1ot. i1o8x 2ip i1pa i1pi i1p2l i1pr i1q + i1ra ir6bl i1re i1ri ir8me8d ir2m1o2 ir8nak i1ro ir5rho ir6schl + ir6sch5r i5rus i5ry i5r� i1sa i8samt i6sar i2s1au i8scheh i8schei + isch5m isch3r isch�8 is8ele ise3ra i4s3erh is3err isi6de i8sind + is4kop ison5e is6por i8s5tum i5sty i5s� i1ta it5ab. i2t1a2m + i8tax i1te i8tersc i1thi i1tho i5thr it8h� i1ti i8ti8d iti6kl + itmen4 i1to i8tof it3ran it3rau i1tri itri5o it1sc it2se it5spa it8tru + i1tu it6z5erg it6z1w i1t� it�6r5e it�t2 it�ts5 + i1t� i1u iu6r 2i1v i6vad iva8tin i8vei i6v5ene i8verh i2vob i8vur + i1w iwi2 i5xa i1xe i1z ize8n i8zir i6z5w i�8m i1�6r + i5�t. i5�v i1�8 i�8 i6�5ers ja5la + je2t3r 6jm 5jo jo5as jo1ra jou6l ju5cha jugen4 jugend5 jung5s6 + + 3j� 1ka 8kachs 8kakz ka1la kal5d kam5t ka1na 2kanl 8kapf ka6pl + ka5r6a 6k3arbe ka1ro kar6p5f 4k3arti 8karz ka1r� kasi5e ka6teb + kat8ta kauf6s kau3t2 2k1b 2k1c 4k1d kehr6s kehrs5a 8keic 2k1eig 6k5ein + 6k5eis ke6lar ke8leis ke8lo 8kemp k5ente. k3entf 8k5ents 6kentz ke1ra + k5erlau 2k1f8 2k1g 2k1h ki5fl 8kik king6s5 6kinh ki5os ki5sp ki5th + 8ki8� 2k1k2 kl8 1kla 8klac k5lager kle4br k3leib 3kleid kle5isc + 4k3leit k3lek 6k5ler. 5klet 2klic 8klig k2lim k2lin 5klip 5klop k3lor + 1kl� 2k1m kmani5e kn8 6kner k2ni kn�8 1k2o ko1a2 ko6de. + ko1i koi8t ko6min ko1op ko1or ko6pht ko3ra kor6d5er ko5ru ko5t6sc k3ou + 3kow 6k5ox 2k1p2 k1q 1kr8 4k3rad 2k1rec 4k3reic kre5ie 2krib 6krig + 2krip 6kroba 2ks k1sa k6sab ksal8s k8samt k6san k1sc k2s1ex k5spat + k5spe k8spil ks6por k1spr kst8 k2s1uf 2k1t kta8l kt5a6re k8tein kte8re + k2t1h k8tinf kt3rec kt1s 1ku ku1ch kuck8 k3uhr ku5ie kum2s1 kunfts5 + kun2s kunst3 ku8rau ku4ro kurz1 + + 4kusti ku1ta ku8� + 6k1v 2k1w ky5n 2k1z 1k� k�4m 4k3�mi k�se5 1k� + k�1c k�1s 1k� k�1c k�r6sc + + 1la. + 8labf 8labh lab2r 2l1abs lach3r la8dr 5ladu 8ladv 6laff laf5t la2gn + 5laken 8lamb la6mer 5lampe. 2l1amt la1na 1land lan4d3a lan4d3r lan4gr + 8lanme 6lann 8lanw 6lan� 8lappa lap8pl lap6pr l8ar. la5ra lar4af + la8rag la8ran la6r5a6s l3arbe la8rei 6larm. la8sa la1sc la8sta lat8i + 6l5atm 4lauss 4lauto 1law 2lb l8bab l8bauf l8bede l4b3ins l5blo + lbst5an lbst3e 8lc l1che l8chert l1chi lch3m l5cho lch5w 6ld l4d3ei + ld1re l6d�b le2bl le8bre lecht6s5 led2r 6leff le4gas 1lehr lei6br + le8inf 8leinn 5leistu 4lektr le6l5ers lemo2 8lemp l8en. 8lends + 6lendun le8nend len8erw 6l5ents 4l3entw 4lentz 8lenzy 8leoz 6lepi + le6pip 8lepo 1ler l6er. 8lerbs 6l5erde le8reis le8rend le4r3er 4l3erg + l8ergr 6lerkl 6l5erzie 8ler� 8lesel lesi5e le3sko le3tha let1s + 5leuc 4leuro leu4s3t le5xe 6lexp l1f 2l1g lgend8 l8gh lglie3 lglied6 + 6l1h 1li li1ar li1as 2lick li8dr li1en lien6n li8ers li8ert 2lie� + 3lig li8ga8b li1g6n li1l8a 8limb li1na 4l3indu lings5 + 4l3inh 6linj link4s3 4linkt 2lint 8linv + + 4lipp 5lipt 4lisam + livi5e 6l1j 6l1k l8keim l8kj lk2l lko8f lkor8 lk2sa lk2se 6ll l1la + ll3a4be l8labt ll8anl ll1b ll1c ll1d6 l1le l4l3eim l6l5eise ller3a + l4leti l5lip l1lo ll3ort ll5ov ll6spr llte8 l1lu ll3urg l1l� + l5l� l6l�b 2l1m l6m5o6d 6ln l1na l1no 8lobl lo6br 3loch. + l5o4fen 5loge. 5lohn 4l3ohr 1lok l2on 4l3o4per lo1ra 2l1ord 6lorg + 4lort lo1ru 1los. lo8sei 3losig lo6ve lowi5 6l1p lp2f l8pho l8pn + lp4s3te l2pt l1q 8l1r 2ls l1sa l6sarm l1sc l8sec l6s5erg l4s3ers l8sh + l5s6la l1sp ls4por ls2pu l1str l8suni l1s� 2l1t lt5amp l4t3ein + l5ten l6t5eng l6t5erp l4t3hei lt3her l2t1ho l6t5i6b lti1l l8tr� + lt1sc lt6ser lt4s3o lt5ums lu8br lu2dr lu1en8 8lu8fe luft3a luf8tr + lu6g5r 2luh l1uhr lu5it 5luk 2l1umf 2l1umw 1lun 6l5u6nio 4l3unte lu5ol + 4lurg 6lurs l3urt lu4sto + lus1tr + lu6st5re lu8su lu6tal lu6t5e6g lu8terg + lu3the lu6t5or lu2t1r lu6�5 l1v lve5r6u 2l1w 1ly lya6 + 6lymp ly1no l8zess l8zo8f l3zwei lz5wu 3l�nd l�5on + l�6sc l�t1s 5l�uf 2l�ug l�u6s5c l�5v + l1�l 1l�s l�1�6t 6l1�be 1ma + 8mabg ma5chan mad2 ma5el 4magg mag8n ma1la ma8lau mal5d 8malde mali5e + malu8 ma8lut 2m1amp 3man mand2 man3ds 8mangr mani5o 8m5anst 6mappa + 4m3arbe mar8kr ma1r4o mar8schm 3mas ma1sc ma1t� 4m5auf ma5yo 2m1b + mb6r 2m1c 2m1d md6s� 1me me1ch me5isc 5meld mel8sa 8memp me5nal + men4dr men8schl men8schw 8mentsp me1ra mer4gl me1ro 3mes me6s5ei me1th + me8� 2m1f6 2m1g 2m1h 1mi mi1a mi6ale mi1la 2m1imm mi1na + mi5n� mi4s3an mit1h mi5t6ra 3mitt mitta8 mi6�5 6mj + 2m1k8 2m1l 2m1m m6mad m6m5ak m8menth m8mentw mme6ra m2mn mm5sp mm5ums + mmut5s m8m�n m1n8 m5ni 1mo mo5ar mo4dr 8mof mo8gal mo4kla mol5d + m2on mon8do mo4n3od + mon2s1tr + mont8a 6m5ony mopa6 mo1ra mor8d5a mo1sc mo1sp 5mot + moy5 2mp m1pa mpfa6 mpf3l mphe6 m1pi mpin6 m1pl mp2li m2plu mpo8ste + m1pr mpr�5 mp8th mput6 mpu5ts m1p� 8m1q 2m1r 2ms ms5au m1sc + msch4l ms6po m3spri m1str 2m1t mt1ar m8tein m2t1h mt6se mt8s� + mu5e 6m5uh mumi1 1mun mun6dr muse5e mu1ta 2m1v mvol2 mvoll3 2m1w 1my + 2m1z m�6kl 1m�n m�1s m�5tr m�u4s3c 3m�� + m�b2 6m�l 1m� 5m�n 3m�t 1na. + n5ab. 8nabn n1abs n1abz na6b� na2c nach3e 3nacht 1nae na5el + n1afr 1nag 1n2ah na8ha na8ho 1nai 6nair na4kol n1akt nal1a 8naly 1nama + na4mer na1mn n1amp 8n1amt 5nanc nan6ce n1and n6and. 2n1ang 1nani + 1nann n1ans 8nanw 5napf. 1n2ar. na2ra 2n1arc n8ard 1nari n8ark + 6n1arm 5n6ars 2n1art n8arv 6natm nat6s5e 1naue 4nauf n3aug 5naui n5auk + na5um 6nausb 6nauto 1nav 2nax 3naz 1na� n1b2 nbau5s n1c + nche5e nch5m 2n1d nda8d n2d1ak nd5ans n2d1ei nde8lac ndel6sa n8derhi + nde4se nde8stal n2dj ndnis5 n6d5or6t nd3rec nd3rot nd8samt nd6sau + ndt1h n8dumd 1ne ne5as ne2bl 6n5ebn 2nec 5neei ne5en ne1g4l 2negy + 4n1ein 8neis 4n3e4lem 8nemb 2n1emp nen1a 6n5energ nen3k 8nentb + 4n3en3th 8nentl 8n5entn 8n5ents ne1ra ne5r8al ne8ras 8nerbi 6n5erde. + nere5i6d nerfor6 6n5erh� 8nerl� 2n1err n8ers. 6n5ertra + 2n1erz nesi3e net1h neu4ra neu5sc 8neu� n1f nf5f nf2l + nflei8 nf5lin nft8st n8g5ac ng5d ng8en nge8ram ngg2 ng1h n6glic ng3rip + ng8ru ng2se4 ng2si n2g1um n1gy n8g�l n1h nhe6r5e 1ni ni1bl + ni5ch� ni8dee n6ie ni1en nie6s5te niet5h ni8etn 4n3i6gel n6ik + ni1la 2n1imp ni5na 2n1ind 8ninf 6n5inh ni8nit 6n5inn 2n1ins 4n1int + n6is + nis1tr + ni1th ni1tr n1j n6ji n8kad nk5ans n1ke n8kerla n1ki nk5inh + n5kl� n1k2n n8k5not nk3rot n8kr� nk5spo nk6t5r n8kuh + n6k�b n5l6 nli4mi n1m nmen4s n1na n8nerg nni5o n1no nn4t3ak nnt1h + nnu1e n1ny n1n� n1n� n1n� no5a no4b3la 4n3obs 2nobt + noche8 no6die no4dis no8ia no5isc 6n5o6leu no4mal noni6er 2n1onk n1ony + 4n3o4per 6nopf 6nopti no3ra no4ram nor6da 4n1org 2n1ort n6os no1st + 8nost. no8tan no8ter noty6pe 6n5ox n1p2 n1q n1r nr�s3 6ns n1sac + ns3ang n1sc n8self n8s5erf n8serg n6serk ns5erw n8sint n1s2pe n1spr + n6s5tat. + + n6stob n1str n1ta n4t3a4go nt5anh nt3ark nt3art + n1te nt3eis nte5n6ar nte8nei nter3a nte6rei nt1ha nt6har n3ther nt5hie + n3thus n1ti nti1c n8tinh nti1t ntlo6b ntmen8 n1to nt3o4ti n1tr ntra5f + ntra5ut nt8rea nt3rec nt8rep n4t3rin nt8rop n4t3rot n4tr� nt1s + nts6an nt2sk n1tu nt1z n1t� n1t� n8t�l n1t� 1nu + nu1a nu5el nu5en 4n1uhr nu5ie 8numl 6n5ums 6n5umw 2n1und 6nuni 6n5unr + 2n1unt 2nup 2nu6r n5uri nu3skr nu5ta n1v 8n1w 1nys n1za n6zab n2z1ar + n6zaus nzi4ga n8zof n6z5unt n1zw n6zwir 1n�c 5n�e 5n�i + n8�l n�6m n�6re n5�rz 5n�us n1�l + 1n�t n5�z 5n�. 6n1�2b 5n�� + o5ab. oa2l o8ala o1a2m o1an ob1ac obe4ra o6berh 5o4bers o4beru + obe6ser 1obj o1bl o2bli ob5sk 3obst. ob8sta obst5re ob5sz o1che + oche8b o8chec o3chi och1l och3m ocho8f o3chro och3to o3chu och1w o1d + o2d1ag od2dr ode5i ode6n5e od1tr o5e6b o5e6der. oe8du o1ef o1e2l + o1e2p o1er. o5e8x o1fa of8fan 1offi of8fin of6f5la o5fla o1fr 8o1g + og2n o1ha o1he o6h5eis o1hi ohl1a oh1le oh4l3er 5ohm. oh2ni o1ho + oh1re oh1ru o1hu oh1w o1hy o1h� o5ia o1id. o8idi oi8dr o5ids + o5isch. oiset6 o1ism o3ist. o5i6tu o1j o1k ok2l ok3lau o8kl� + 1okta o1la old5am old5r o1le ole5in ole1r ole3u ol6gl ol2kl olk4s1 + ol8lak ol8lauf. ol6lel ol8less o1lo + ol1s ol2ster + ol6sk o1lu oly1e2 5olym + o2mab om6an o8mau ombe4 o8merz om5sp o1mu o8munt o1m� o1m� + o1na ona8m on1ax on8ent o6n5erb 8oni oni5er. on1k on6n5a6b o1no ono1c + o4nokt 1ons onts8 o1n� oo8f 1oog oo2pe oo2sa o1pa 3o4pera o3pfli + opf3lo opf3r o1pi o1pl o2pli o5p6n op8pa op6pl o1pr o3p4ter 1opti + o1p� o5p� o1q o1ra. o3rad o8radd 1oram o6rang o5ras o8rauf + or5cha or4d3a4m or8dei or8deu 1ordn or4dos o1re o5re. ore2h o8r5ein + ore5isc or6enn or8fla or8fli 1orga 5orgel. or2gl o1ri 5o6rient or8nan + or8n� o1ro or1r2h or6t5an or8tau or8tere o1rus o1ry o1r� + or1�2 o1sa osa3i 6ose o8serk o1sk o6ske o6ski os2kl os2ko os2kr + osni5e o2s1o2d o3s4per o4stam o6stau o3stra ost3re osu6 o6s5ur o5s6ze + o1ta ot3auf o6taus o1te o6terw o1th othe5u o2th1r o1ti o1to oto1a + ot1re o1tri o1tro ot1sc o3tsu ot6t5erg ot2t3h ot2t5r ot8t� o1tu + ou3e ouf1 ou5f6l o5u6gr ou5ie ou6rar ou1t6a o1v o1wa o1we o6wer. o1wi + owid6 o1wo o5wu o1xe oy5al. oy1e oy1i o5yo o1z oza2r 1o2zea ozo3is + o�8 o�5elt o�1t 3paa pa6ce 5pad pag2 1pak + pa1la pa8na8t pani5el pa4nor pan1s2 1pap pap8s pa8rei par8kr paro8n + par5o6ti part8e 5partei 3partn pas6sep pa4tha 1pau 6paug pau3sc p1b + 8p5c 4p1d 1pe 4peic pe5isc 2pek pen3k pen8to8 p8er pe1ra pere6 per5ea + per5eb pe4rem 2perr per8ran 3pers 4persi pe3r� pe4sta pet2s + p2f1ec p4fei pf1f pf2l 5pflanz pf8leg pf3lei 2pft pf3ta p1g 1ph 2ph. + 2p1haf 6phb 8phd 6p5heit ph5eme 6phg phi6e 8phk 6phn p5holl pht2 + ph3tha 4ph3the phu6 6phz pi1en pi5err pi1la pi1na 5pinse pioni8e 1pis + pi1s2k pi1th p1k pl8 5pla p2lau 4plei p3lein 2pler 6p5les 2plig p6lik + 6p5ling p2liz plo8min 6p1m p1n 1p2o 8poh 5pol po8lan poly1 po3ny po1ra + 2porn por4t3h po5r� 5poti p1pa p6p5ei ppe6la pp5f p2p1h p1pi pp1l + ppp6 pp5ren + pp1s pp2ste + p5p� pr6 3preis 1pres 2p3rig 5prinz 1prob 1prod + 5prog pro8pt pro6t5a prote5i 8pro� pr�3l 1pr�s + pr�te4 1pr�f p5schl 2pst 1p2sy p1t p8to8d pt1s 5p6ty 1pu + pu1b2 2puc pu2dr puf8fr 6p5uh pun8s pu8rei pu5s6h pu1ta p1v p3w 5py + py5l p1z p�6der p5�6m p�8nu 8p�r p�t5h + p�t1s qu6 1qui 8rabk ra6bla 3rable ra2br r1abt 6rabz ra4dan ra2dr + 5rafal ra4f3er ra5gla ra2g3n 6raha ral5am 5rald 4ralg ra8lins 2rall + ral5t 8ramei r3anal r6and ran8der ran4dr 8ranf 6ranga 5rangi ran8gli + r3angr rans5pa 8ranw r8anz. ra5or 6rapf ra5pl rap6s5er 2r1arb 1rarh + r1arm ra5ro 2r1art 6r1arz ra8tei ra6t5he 6ratl ra4t3ro r5atta raue4n + 6raus. r5austa rau8tel raut5s ray1 r1b rb5lass r6bler rb4lie rbon6n + r8brecht rb6s5t� r8ces r1che rch1l rch3m rch3re rch3tr rch1w 8rd + r1da r8dachs r8dap rda5ro rde5ins rdio5 r8dir rd3ost r1dr r8drau 1re. + re1ak 3reakt re3als re6am. re1as 4reben re6bl rech5a r8edi re3er + 8reff 3refl 2reh 5reha r4ei. reich6s5 8reier 6reign re5imp 4r3eina + 6r3einb 6reing 6r5einn 6reinr 4r3eins r3eint reli3e 8r5elt 6rempf + 2remt ren5a6b ren8gl r3enni 1reno 5rente 4r3enth 8rentl 4r3entw 8rentz + ren4zw re1on requi5 1rer rer4bl 6rerbs 4r3erd 8rerh� 8rerkl + 4r3erla 8rerl� 4r3erns 6r5ern� rer5o 6r5erreg r5ertr r5erwec + r5er� re2sa re8schm 2ress re5u8ni 6rewo 2r1ex r1f r8ferd rf4lie + 8r1g r8gah rge4bl rge5na rgest4 rg6ne r2gni2 r8gob r4g3ret rg8sel r1h8 + r2hy 5rhyt ri1ar ri5cha rid2g r2ie rieg4s5 ri8ei ri1el ri6ele ri1en + ri3er. ri5ers. ri6fan ri8fer ri8fr 1r2ig ri8kn ri5la rim�8 + ri1na r8inde rin4ga rin6gr 1rinn 6rinner rino1 r8insp 4rinst + ri1n� ri5o6ch ri1o2d ri3o6st 2r1ir r2is ri3sko ri8spr + + ri5sv r2it 6r5i6tal ri5tr ri6ve. 8r1j 6rk r1ke rkehrs5 r1ki r3klin + r1k2n rk3str rk4t3an rk6to r6kuh rk�4s3t r1l r5li rline5a 6r1m + r6manl rma4p r4m3aph r8minf r8mob rm5sa 2rn r1na rna8be r5ne rn2ei + r6neif r6nex r6nh rn1k r1no r6n5oc rn1sp r1n� r1n� ro6bern + 6robs ro1ch 3rock. ro5de ro1e 4rofe ro8hert 1rohr ro5id ro1in ro5isc + 6rolym r2on 6roog ro6phan r3ort ro1s2p ro5s6w ro4tau ro1tr ro6ts 5rout + r1p rpe8re rp2f r2ps r2pt r1q 2rr r1ra r1re rrer6 + rr6hos r5rh� + r1ri r1ro rro8f rr8or rror5a r1ru r3ry r1r� r1r� r1r� + 2r1s + r2ste r2sti + r6sab r4sanf rse6e rse5na r2sh r6ska r6ski rs2kl r8sko r2sl rs2p + r6stauf r8sterw r8stran rswi3d4 r2sz 2r1t rt3art r8taut r5tei rt5eige + r8tepe r4t3erh r8terla r4t3hei r5t6hu r4t3int rt5reif rt1sc rt6ser + rt6s5o rt6s5u rt5und r8turt rube6 ru1en 1r4uf ruf4st ru1ie 2r1umg + 2r1uml 2rums run8der run4d5r 6rundz 6runf 8runs 2r1unt 2r1ur r6us + ru6sta + rus1tr + ru6tr 1ruts r1v rven1 rvi2c r1w r1x r1za rz5ac r6z5al + r8z1ar r8zerd r6z5erf rz8erh rz4t3h r8zum r�4ste r�u8sc + r1�f 5r�hr r�5le 3r�ll 5r�mis r1�r + r�2sc 3r�mp 1sa. 1saa s3a4ben sa2bl 2s1abs 6s1abt 6sabw + 3sack. 6s3a4der 1saf sa1fa 4s1aff sa5fr 1sag 1sai sa1i2k1 4s1akt 1sal + sa1la 4s3alpi 6salter salz3a 1sam s5anb san2c 1sand s5angeh 6sanl + 2s1ans 6s3antr 8s1anw s1ap s6aph 8sapo sap5p6 s8ar. 2s1arb 3sarg + s1arm sa5ro 2s1art 6s1arz 1sas 1sat sat8a 2s1atl sa8tom 3s8aue s5auff + sau5i s6aur 2s1aus 5s6ause 2s1b2 2sca s4ce 8sch. 3scha. 5schade + 3schaf 3schal sch5ame 8schanc 8schb 1sche 6schef 8schex 2schf 2schg + 2schh 1schi 2schk 5schlag 5schlu 6schm�� + 6schna� 1scho 6schord 6schp 3schri 8schric 8schrig + 8schrou 6schs 2scht sch3ta sch3tr 1schu 8schunt 6schv 2schz 5sch� + 5sch� 2sco scre6 6scu 2s1d 1se se5an se1ap se6ben se5ec see5i6g + se3erl 8seff se6han se8hi se8h� 6s5eid. 2s1eig s8eil 5sein. + sei5n6e 6s5einh 3s8eit 3sel. se4lar selb4 6s3e4lem se8lerl 2s1emp + sen3ac se5nec 6s5ents 4sentz s8er. se8reim ser5inn 8serm� + 8s5erzi 6ser�f se1um 8sexa 6sexp 2s1f2 sfal8ler 2s3g2 sge5b2 s1h + s8hew 5s6hip 5s4hop 1si 2siat si1b sicht6s 6s5i6dee siege6s5 si1en + si5err si1f2 si1g2n si6g5r si8kau sik1i si4kin si2kl si8k� si1la + sil6br si1na 2s1inf sin5gh 2s1inh sinne6s5 2s1ins si5ru si5str 4s1j + s1k2 6sk. 2skau skel6c skelch5 s6kele 1s2ki. 3s4kin. s6kiz s8kj + 6skn 2skow 3skrib 3skrip 2sku 8sk� s1l s8lal slei3t s4low 2s1m + s1n 6sna 6snot 1so so1ch 2s1odo so4dor 6s5o4fen solo3 s2on so5of 4sope + so1ra 2s1ord 4sorga sou5c so3un 4s3ox sp2 8spaa 5spal 1span 2spap + s2pec s4peis 1spek s6perg 4spers s6pes 2s1pf 8sphi 1s2ph� 1spi + spi4e 6s5pig 6spinse 2spis 2spla 2spol 5s6pom 6s5pos 6spoti 1spra + 3s8prec 6spreis 5spring 6sprob 1spru s2pul 1s2pur 6spy 5sp�n + 1sp� s1q 2s1r + + + 2ssa 2sse 2ssi 2sso 2ss� 2ss� 2ss� 2s1sch + sse8nu ssini6s ssoi6r 2st. + 1sta 4stafe 2stag + sta3la 6stale + 4s2talg + 8stalk 8stamt 6st5anf 4stans 6stanw 6starb sta4te + 6staus 2stb 6stc 6std + s1te + 4steil + + 6steppi + + 8stesse 6stf 2stg 2sth st1ha st3hei s8t1hi st1ho st5hu + s1ti + s2ti4el + 4s2tigm + + 6s2tind + 4s2tinf + s2ti8r + 2stk 2stl 2stm + 1sto 6stoll. 4st3ope + 6stopf. 6stord 6stp + + 4strai + s3tral + 6s5traum 3stra� + 3strec 6s3tref 8streib 5streif 6streno 6stres 6strev + + 2st5rig + + 8s2t1ris + + s8troma st5rose 4struf 3strum + 6str�g 2st1s6 2stt + 1stu stu5a 4stuc 2stue 8stun. 2stv 2stw s2tyl + 6stz 1st� 8st�g + 1st� + 1st� 8st�ch 4st�r. + 1su su2b1 3suc su1e su2fe su8mar 6sumfa 8sumk 2s1unt sup1p2 6s5u6ran + 6surte 2s1v 2s1w 1sy 8syl. sy5la syn1 sy2na syne4 s1z s4zend 5s6zene. + 8szu 1s� 6s5�nd 6s�ugi 6s�u� + 5s�m 2s1�2b 1s�c s�8di 1s�n 5s�� + taats3 4tab. taba6k ta8ban tab2l ta6bre 4tabs t3absc + 8tabz 6t3acht ta6der 6tadr tad6s tad2t 1tafe4 1tag ta6ga6 ta8gei + tage4s tag6s5t tah8 tahl3 tai6ne. ta5ir. tak8ta tal3au 1tale ta8leng + tal5ert 6t5a6mer 6tamp tampe6 2t1amt tan5d6a tan8dr tands5a tani5e + 6tanl 2tanr t3ans 8t5antr tanu6 t5anw 8tanwa tan8zw ta8rau 6tarbe + 1tari 2tark 2t1arm ta1ro 2tart t3arti 6tarz ta1sc ta6sien ta8stem + ta8sto t5aufb 4taufn 8taus. 5tause 8tausf 6tausg t5ausl 2t1b2 2t1c + t6chu 2t1d te2am tea4s te8ben 5techn 4teff te4g3re te6hau 2tehe te4hel + 2t1ehr te5id. teig5l 6teign tei8gr 1teil 4teinh t5einhe 4teis t5eisen + 8teiw te8lam te4lar 4telek 8telem te6man te6n5ag ten8erw ten5k tens4p + ten8tro 4t3entw 8tentz te6pli 5teppi ter5a6b te3ral ter5au 8terbar + t5erbe. 6terben 8terbs 4t3erbt t5erde. ter5ebe ter5ein te8rers terf4 + 8terh� 6terkl� ter8nor ter6re. t8erscha t5e6sel te8stau + t3euro te1xa tex3e 8texp tex6ta 2t1f2 2t1g2 2th. th6a 5tha. 2thaa + 6t1hab 6t5haf t5hah 8thak 3thal. 6thals 6t3hand 2t1hau 1the. 3t4hea + t1heb t5heil t3heit t3helf 1theo 5therap 5therf 6t5herz 1thes 1thet + 5thi. 2t1hil t3him 8thir 3this t5hj 2th1l 2th1m th1n t5hob t5hof + 4tholz 6thopti 1thr6 4ths t1hum 1thy 4t1h� 2t1h� t1h� + ti1a2m ti1b tie6fer ti1en ti8gerz tig3l ti8kin ti5lat 1tilg t1ind + tin4k3l ti3spa ti5str 5tite ti5tr ti8vel ti8vr 2t1j 2t1k2 2t1l tl8a + 2t1m8 2t1n 3tobe 8tobj to3cha 5tocht 8tock tode4 to8del to8du to1e + 6t5o6fen to1in toi6r 5toll. to8mene t2ons 2t1ony to4per 5topf. 6topt + to1ra + to1s to2ste + to6ska tos2l 2toti to1tr t8ou 2t1p2 6t1q tr6 tra5cha + tra8far traf5t 1trag tra6gl tra6gr t3rahm 1trai t6rans tra3sc tra6st + 3traue t4re. 2trec t3rech t8reck 6t1red t8ree 4t1reg 3treib 4treif + 8t3reis 8trepo tre6t5r t3rev 4t3rez 1trib t6rick tri6er 2trig t8rink + tri6o5d trizi5 tro1a 3troc trocke6 troi8d tro8man. tro3ny 5tropf + 6t5rosa t5ro� 5trub 5trup trut5 1tr�g 6t1r�h + 5tr�b tr�3bu t1r�c t1r�s 2ts ts1ab t1sac tsa8d + ts1ak t6s5alt ts1an ts1ar ts3auf t3schr t5sch� tse6e tsee5i + tsein6s ts3ent ts1er t8serf t4serk t8sh 5t6sik t4s3int ts5ort. + t5s6por t6sprei + t1st t2ste + t6s5tanz ts1th t6stit t4s3tor 1t2sua t2s1uf + t8sum. t2s1u8n t2s1ur 2t1t tt5eif tte6sa tt1ha tt8ret tt1sc tt8ser + tt5s6z 1tuc tuch5a 1tu1e 6tuh t5uhr tu1i tu6it 1tumh 6t5umr 1tums + 8tumt 6tund 6tunf 2t1unt tu5ra tu6rau tu6re. tu4r3er 2t1v 2t1w 1ty1 + ty6a ty8la 8tym 6ty6o 2tz tz5al tz1an tz1ar t8zec tzeh6 tzehn5 t6z5ei. + t6zor t4z3um t6z�u 5t�g 6t�h t5�lt t8�n + t�re8 8t�8st 6t�u� t5�ffen + 8t�8k 1t�n 4t�b t6�5ber. 5t�ch 1t�r. + u3al. u5alb u5alf u3alh u5alk u3alp u3an. ua5na u3and u5ans u5ar. + ua6th u1au ua1y u2bab ubi5er. u6b5rit ubs2k u5b� u8b�b 2uc + u1che u6ch5ec u1chi uch1l uch3m uch5n uch1r uch5to ucht5re u1chu uch1w + uck1a uck5in u1d ud4a u1ei u6ela uene8 u6ep u1er uer1a ue8rerl uer5o + u8esc u2est u8ev u1fa u2f1ei u4f3ent u8ferh uf1fr uf1l uf1ra uf1re + uf1r� uf1r� uf1s2p uf1st uft1s u8gabt u8gad u6gap ugeb8 u8gn + ugo3s4 u1ha u1he u1hi uh1le u1ho uh1re u1hu uh1w u1h� u1h� + 6ui ui5en u1ig u3ins uin8tes u5isch. u1j 6uk u1ke u1ki u1kl u8klu + u1k6n u5ky u1la uld8se u1le ul8lac ul6lau ul6le6l ul6lo ulni8 u1lo + ulo6i ult6a ult8e u1lu ul2vr u1l� u1l� 3umfan 5umlau umo8f + um8pho u1mu umu8s u5m� u1n1a un2al un6at unau2 6und. 5undein + un4d3um 3undzw und�8 un8d�b une2b un1ec une2h un3eis 3unfal + 1unf� 5ungea 3ungl� ung2s1 un8g� 1u2nif un4it un8kro + unk5s u1no unpa2 uns2p unvol4 unvoll5 u5os. u1pa u1pi u1p2l u1pr + up4s3t up2t1a u1q u1ra ur5abs ura8d ur5ah u6rak ur3alt u6rana u6r5ans + u8rap ur5a6ri u8ratt u1re ur3eig ur8gri u1ri ur5ins 3urlau urmen6 + ur8nan u1ro 3ursac ur8sau ur8sei ur4sk 3urtei u1ru uru5i6 uru6r u1ry + ur2za ur6z� ur5�6m u5r� u1r� ur�ck3 u1sa + usa4gi u2s1ar u2s1au u8schec usch5wi u2s1ei use8kel u8sl u4st3a4b + us3tau + + u2s1uf u8surn ut1ac u1tal uta8m u1tan ut1ar u1tas ut1au + u1te u8teic u4tent u8terf u6terin u4t3hei ut5ho ut1hu u1ti utine5 + uti6q u1to uto5c u1tr ut1sa ut1s6p ut6stro u1tu utz5w u1u u1v uve5n + uve3r4� u1w u1xe u5ya uy5e6 u1yi u2z1eh u8zerh u5� u�e6n + u�en5e 8vanb 6vang 6varb var8d va6t5a va8tei + va2t1r 2v1b 6v5c 6vd 1ve 6ve5g6 ver1 ver5b verb8l ve2re2 verg8 ve2ru8 + ve1s ve2s3p ve3xe 2v1f 2v1g 6v5h vi6el vie6w5 vi1g4 vi8leh vil6le. + 8vint vi1ru vi1tr 2v1k 2v1l 2v1m 4v5n 8vo8f voi6le vol8lend vol8li + v2or1 vo2re vo8rin vo2ro 2v1p 8vra v6re + 2v2s + 2v1t 2v1v 4v3w 2v1z + waffe8 wa6g5n 1wah wah8n wa5la wal8din wal6ta wan4dr 5ware wa8ru + war4za 1was w5c w1d 5wech we6fl 1weg we8geng weg5h weg3l we2g1r + weh6r5er 5weise weit3r wel2t welt3r we6rat 8werc 5werdu wer4fl 5werk. + wer4ka wer8ku wer4ta wer8term we2sp + we8s4tend + + we8str + we8st� wet8ta wich6s5t 1wid wi2dr wiede4 wieder5 wik6 wim6ma + win4d3r 5wirt wisch5l 1wj 6wk 2w1l 8w1n wo1c woche6 wol6f wor6t5r 6ws2 + w1sk 6w5t 5wunde. wun6gr wu1sc wu2t1 6w5w wy5a w�rme5 w�1sc + 1xag x1ak x3a4men 8xamt x1an 8x1b x1c 1xe. x3e4g 1xen xe1ro x1erz + 1xes 8xf x1g 8x1h 1xi 8xid xi8so 4xiste x1k 6x1l x1m 8xn 1xo 8x5o6d + 8x3p2 x1r x1s6 8x1t x6tak x8terf x2t1h 1xu xu1e x5ul 6x3w x1z 5ya. + y5an. y5ank y1b y1c y6cha y4chia y1d yen6n y5ern y1g y5h y5in y1j + y1k2 y1lak yl1al yla8m y5lax y1le y1lo y5lu y8mn ym1p2 y3mu y1na yno2d + yn1t y1on. y1o4p y5ou ypo1 y1pr y8ps y1r yri3e yr1r2 + + ys5iat ys8ty + y1t y3w y1z y�8m z5a6b zab5l 8za6d 1zah za5is 4z3ak 6z1am 5zange. + 8zanl 2z1ara 6z5as z5auf 3zaun 2z1b 6z1c 6z1d 1ze ze4dik 4z3eff 8zein + zei4ta zei8ters ze6la ze8lec zel8th 4zemp 6z5engel zen8zin 8zerg� + zer8i ze1ro zers8 zerta8 zer8tab zer8tag 8zerz ze8ste zeu6gr 2z1ex + 2z1f8 z1g 4z1h 1zi zi1en zi5es. 4z3imp zi1na 6z5inf 6z5inni zin6s5er + 8zinsuf zist5r zi5th zi1tr 6z1j 2z1k 2z1l 2z1m 6z1n 1zo zo6gl 4z3oh + zo1on zor6na8 4z1p z5q 6z1r 2z1s8 2z1t z4t3end z4t3hei z8thi 1zu zu3al + zu1b4 zu1f2 6z5uhr zun2a 8zunem zunf8 8zungl zu1o zup8fi zu1s8 zu1z + 2z1v zw8 z1wal 5zweck zwei3s z1wel z1wer z6werg 8z5wes 1zwi zwi1s + 6z1wo 1zy 2z1z zz8a zzi1s 1z� 1z� 6z�l. z�1le + 1z� 2z1�2b �1a6 �b1l �1che �3chi + �ch8sc �ch8sp �5chu �ck5a �d1a �d5era + �6d5ia �1e �5fa �f1l �ft6s �g1h + �g3le �6g5nan �g5str �1he �1hi �h1le + �h5ne 1�hnl �h1re �h5ri �h1ru �1hu + �h1w 6�i �1isc �6ische �5ism �5j + �1k �l1c �1le �8lei �l6schl �mi1e + �m8n �m8s �5na 5�nderu �ne5i8 �ng3l + �nk5l �1no �n6s5c �1pa �p6s5c 3�q + �r1c �1re �re8m 5�rgern �r6gl �1ri + 3�rmel �1ro �rt6s5 �1ru 3�rztl �5r� + �6s5chen �sen8s �s1th �ta8b �1te �teri4 + �ter5it �6thy �1ti 3�tk �1to �t8schl + �ts1p �5tu �ub1l �u1e 1�ug �u8ga + �u5i �1um. �1us. 1�u� �1z + �1b �1che �5chi + �ch8s2tei + �ch8str �cht6 + 5�6dem 5�ffn �1he �h1l8 �h1re �1hu + �1is �1ke 1�2ko 1�l. �l6k5l �l8pl + �1mu �5na �nig6s3 �1no �5o6t �pf3l + �p6s5c �1re �r8gli �1ri �r8tr �1ru + 5�sterr �1te �5th �1ti �1tu �1v �1w + �we8 �2z �b6e2 3�4ber1 �b1l �b1r + 5�2bu �1che �1chi �8ch3l �ch6s5c �8ck + �ck1a �ck5ers �d1a2 �6deu �di8t �2d1o4 + �d5s6 �ge4l5a �g1l �h5a �1he �8heh + �6h5erk �h1le �h1re �h1ru �1hu �h1w + �3k �1le �l4l5a �l8lo �l4ps �l6s5c + �1lu �n8da �n8fei �nk5l �n8za �n6zw + �5pi �1re �8rei �r8fl �r8fr �r8geng + �1ri �1ro �r8sta + + �1ru �se8n + �8sta �8stes + + �3ta �1te �1ti + �t8tr �1tu �t8zei �1v �1a8 5�a. + �8as �1b8 �1c �1d + 1�e �5ec 8�e8g 8�e8h + 2�1ei 8�em �1f8 �1g �1h + 1�i �1k �1l �1m + + �1n �1o �1p8 �5q + �1r �1s2 �st8 �1ta + �1te �t3hei �1ti �5to + �1tr 1�u8 6�5um �1v �1w + �1z + + 2s1ta. + i2s1tal + 2s1tani 2s1tan. + fe2s1ta + te2s1ta + + nd2ste + ve2ste + 3s2tec + 4s3techn + 3s2teg + 3s2teh + 3s2tein 3s2teig 3s2teif + 3s2tell 3s2telz + a4s3tel + 3s2temm + 3s2temp + 3s2tep + s3s2ter t3s2tern + 3s2teue + 6s4teuro + + bs2ti + te2s3ti + ve2sti + 3s2tic + + 3s2tieb + 3s2tieg + + 3s2tif + 3s2til + 3s2tim + 3s2tink + 3s2titu + + a2s1to + gu2s1to + ku2s1to + i2s1tol i2s1tor + ve2s1to + + 2s1tung + 2s7tus + o2s1tul + + + + aus3s4 + ens3s4 + gs3s4 + .mis2s1 + s2s1b8 + + s2s3chen + s2s3d + s2s5ec + + + 2s2s1ei + s2s3f + s2s1g + s2s3h + s2s3k + s2s3l + s2s3m + + s2s3n + s2s3p8 + s2s5q + s2s3r + s2s3s2 + sss2t8 + + + as2s3te + is2s3te + us2s3te + �s2s3te + s2st3hei + s2s3ti + s2s1to + s2s1tr + + 6ss5um + s2s3v + s2s3w + s2s3z + + + + 1cker. + 1ckert + 1ckad + 1cke. + 1ckel + 1cken + 4ck1ent + 1ckere + 1ckern + 1ckeru + 1ckie + 1ckig + 1ckun + + + diff --git a/modules/data-streams/build.gradle b/modules/data-streams/build.gradle index b6fc1e3722ccd..b017ae9921b0e 100644 --- a/modules/data-streams/build.gradle +++ b/modules/data-streams/build.gradle @@ -1,4 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.test-with-dependencies' diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportActionIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportActionIT.java new file mode 100644 index 0000000000000..fdc96892d4b27 --- /dev/null +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportActionIT.java @@ -0,0 +1,152 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.admin.indices.rollover.RolloverRequestBuilder; +import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; +import org.elasticsearch.action.datastreams.CreateDataStreamAction; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamRequest; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.datastreams.DataStreamsPlugin; +import org.elasticsearch.datastreams.task.ReindexDataStreamTask; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.XContentType; + +import java.util.Collection; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class ReindexDataStreamTransportActionIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return List.of(DataStreamsPlugin.class); + } + + public void testNonExistentDataStream() { + String nonExistentDataStreamName = randomAlphaOfLength(50); + ReindexDataStreamRequest reindexDataStreamRequest = new ReindexDataStreamRequest(nonExistentDataStreamName); + assertThrows( + ResourceNotFoundException.class, + () -> client().execute(new ActionType(ReindexDataStreamAction.NAME), reindexDataStreamRequest) + .actionGet() + ); + } + + public void testAlreadyUpToDateDataStream() throws Exception { + String dataStreamName = randomAlphaOfLength(50).toLowerCase(Locale.ROOT); + ReindexDataStreamRequest reindexDataStreamRequest = new ReindexDataStreamRequest(dataStreamName); + createDataStream(dataStreamName); + ReindexDataStreamResponse response = client().execute( + new ActionType(ReindexDataStreamAction.NAME), + reindexDataStreamRequest + ).actionGet(); + String persistentTaskId = response.getTaskId(); + assertThat(persistentTaskId, equalTo("reindex-data-stream-" + dataStreamName)); + AtomicReference runningTask = new AtomicReference<>(); + for (TransportService transportService : internalCluster().getInstances(TransportService.class)) { + TaskManager taskManager = transportService.getTaskManager(); + Map tasksMap = taskManager.getCancellableTasks(); + Optional> optionalTask = taskManager.getCancellableTasks() + .entrySet() + .stream() + .filter(entry -> entry.getValue().getType().equals("persistent")) + .filter( + entry -> entry.getValue() instanceof ReindexDataStreamTask + && persistentTaskId.equals((((ReindexDataStreamTask) entry.getValue()).getPersistentTaskId())) + ) + .findAny(); + optionalTask.ifPresent( + longCancellableTaskEntry -> runningTask.compareAndSet(null, (ReindexDataStreamTask) longCancellableTaskEntry.getValue()) + ); + } + ReindexDataStreamTask task = runningTask.get(); + assertNotNull(task); + assertThat(task.getStatus().complete(), equalTo(true)); + assertNull(task.getStatus().exception()); + assertThat(task.getStatus().pending(), equalTo(0)); + assertThat(task.getStatus().inProgress(), equalTo(0)); + assertThat(task.getStatus().errors().size(), equalTo(0)); + } + + private void createDataStream(String dataStreamName) { + final TransportPutComposableIndexTemplateAction.Request putComposableTemplateRequest = + new TransportPutComposableIndexTemplateAction.Request("my-template"); + putComposableTemplateRequest.indexTemplate( + ComposableIndexTemplate.builder() + .indexPatterns(List.of(dataStreamName)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .template(Template.builder().build()) + .build() + ); + final AcknowledgedResponse putComposableTemplateResponse = safeGet( + client().execute(TransportPutComposableIndexTemplateAction.TYPE, putComposableTemplateRequest) + ); + assertThat(putComposableTemplateResponse.isAcknowledged(), is(true)); + + final CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + dataStreamName + ); + final AcknowledgedResponse createDataStreamResponse = safeGet( + client().execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest) + ); + assertThat(createDataStreamResponse.isAcknowledged(), is(true)); + indexDocs(dataStreamName); + safeGet(new RolloverRequestBuilder(client()).setRolloverTarget(dataStreamName).lazy(false).execute()); + indexDocs(dataStreamName); + safeGet(new RolloverRequestBuilder(client()).setRolloverTarget(dataStreamName).lazy(false).execute()); + } + + private void indexDocs(String dataStreamName) { + int docs = randomIntBetween(5, 10); + CountDownLatch countDownLatch = new CountDownLatch(docs); + for (int i = 0; i < docs; i++) { + var indexRequest = new IndexRequest(dataStreamName).opType(DocWriteRequest.OpType.CREATE); + final String doc = "{ \"@timestamp\": \"2099-05-06T16:21:15.000Z\", \"message\": \"something cool happened\" }"; + indexRequest.source(doc, XContentType.JSON); + client().index(indexRequest, new ActionListener<>() { + @Override + public void onResponse(DocWriteResponse docWriteResponse) { + countDownLatch.countDown(); + } + + @Override + public void onFailure(Exception e) { + fail("Indexing request should have succeeded eventually, failed with " + e.getMessage()); + } + }); + } + safeAwait(countDownLatch); + } + +} diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java index 01c63be448e62..2dc6fd84fdfe7 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java @@ -45,6 +45,7 @@ public abstract class AbstractDataStreamIT extends ESRestTestCase { // tests such as testIgnoreDynamicBeyondLimit. .setting("xpack.apm_data.enabled", "false") .setting("xpack.otel_data.registry.enabled", "false") + .setting("cluster.logsdb.enabled", "false") .build(); protected RestClient client; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java index f60a3e5c47a7f..f090186480b76 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java @@ -9,7 +9,6 @@ package org.elasticsearch.datastreams; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.rollover.LazyRolloverAction; import org.elasticsearch.action.datastreams.autosharding.DataStreamAutoShardingService; import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; @@ -17,7 +16,6 @@ import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import java.util.Map; import java.util.Set; /** @@ -25,14 +23,8 @@ */ public class DataStreamFeatures implements FeatureSpecification { - public static final NodeFeature DATA_STREAM_LIFECYCLE = new NodeFeature("data_stream.lifecycle"); public static final NodeFeature DATA_STREAM_FAILURE_STORE_TSDB_FIX = new NodeFeature("data_stream.failure_store.tsdb_fix"); - @Override - public Map getHistoricalFeatures() { - return Map.of(DATA_STREAM_LIFECYCLE, Version.V_8_11_0); - } - @Override public Set getFeatures() { return Set.of( diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java index cb7445705537a..2f3b63d27ca35 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java @@ -19,19 +19,23 @@ import org.elasticsearch.action.datastreams.MigrateToDataStreamAction; import org.elasticsearch.action.datastreams.ModifyDataStreamsAction; import org.elasticsearch.action.datastreams.PromoteDataStreamAction; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction; import org.elasticsearch.action.datastreams.lifecycle.ExplainDataStreamLifecycleAction; import org.elasticsearch.action.datastreams.lifecycle.GetDataStreamLifecycleAction; import org.elasticsearch.action.datastreams.lifecycle.PutDataStreamLifecycleAction; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.datastreams.action.CreateDataStreamTransportAction; @@ -40,6 +44,7 @@ import org.elasticsearch.datastreams.action.MigrateToDataStreamTransportAction; import org.elasticsearch.datastreams.action.ModifyDataStreamsTransportAction; import org.elasticsearch.datastreams.action.PromoteDataStreamTransportAction; +import org.elasticsearch.datastreams.action.ReindexDataStreamTransportAction; import org.elasticsearch.datastreams.action.TransportGetDataStreamsAction; import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleErrorStore; import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService; @@ -73,14 +78,27 @@ import org.elasticsearch.datastreams.rest.RestMigrateToDataStreamAction; import org.elasticsearch.datastreams.rest.RestModifyDataStreamsAction; import org.elasticsearch.datastreams.rest.RestPromoteDataStreamAction; +import org.elasticsearch.datastreams.task.ReindexDataStreamPersistentTaskExecutor; +import org.elasticsearch.datastreams.task.ReindexDataStreamPersistentTaskState; +import org.elasticsearch.datastreams.task.ReindexDataStreamStatus; +import org.elasticsearch.datastreams.task.ReindexDataStreamTask; +import org.elasticsearch.datastreams.task.ReindexDataStreamTaskParams; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.health.HealthIndicatorService; import org.elasticsearch.index.IndexSettingProvider; +import org.elasticsearch.persistent.PersistentTaskParams; +import org.elasticsearch.persistent.PersistentTaskState; +import org.elasticsearch.persistent.PersistentTasksExecutor; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.HealthPlugin; +import org.elasticsearch.plugins.PersistentTaskPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.ParseField; import java.io.IOException; import java.time.Clock; @@ -93,7 +111,7 @@ import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.DATA_STREAM_LIFECYCLE_ORIGIN; -public class DataStreamsPlugin extends Plugin implements ActionPlugin, HealthPlugin { +public class DataStreamsPlugin extends Plugin implements ActionPlugin, HealthPlugin, PersistentTaskPlugin { public static final Setting TIME_SERIES_POLL_INTERVAL = Setting.timeSetting( "time_series.poll_interval", @@ -244,6 +262,7 @@ public Collection createComponents(PluginServices services) { actions.add(new ActionHandler<>(PutDataStreamOptionsAction.INSTANCE, TransportPutDataStreamOptionsAction.class)); actions.add(new ActionHandler<>(DeleteDataStreamOptionsAction.INSTANCE, TransportDeleteDataStreamOptionsAction.class)); } + actions.add(new ActionHandler<>(ReindexDataStreamAction.INSTANCE, ReindexDataStreamTransportAction.class)); return actions; } @@ -302,4 +321,48 @@ public void close() throws IOException { public Collection getHealthIndicatorServices() { return List.of(dataStreamLifecycleHealthIndicatorService.get()); } + + @Override + public List getNamedXContent() { + return List.of( + new NamedXContentRegistry.Entry( + PersistentTaskState.class, + new ParseField(ReindexDataStreamPersistentTaskState.NAME), + ReindexDataStreamPersistentTaskState::fromXContent + ), + new NamedXContentRegistry.Entry( + PersistentTaskParams.class, + new ParseField(ReindexDataStreamTaskParams.NAME), + ReindexDataStreamTaskParams::fromXContent + ) + ); + } + + @Override + public List getNamedWriteables() { + return List.of( + new NamedWriteableRegistry.Entry( + PersistentTaskState.class, + ReindexDataStreamPersistentTaskState.NAME, + ReindexDataStreamPersistentTaskState::new + ), + new NamedWriteableRegistry.Entry( + PersistentTaskParams.class, + ReindexDataStreamTaskParams.NAME, + ReindexDataStreamTaskParams::new + ), + new NamedWriteableRegistry.Entry(Task.Status.class, ReindexDataStreamStatus.NAME, ReindexDataStreamStatus::new) + ); + } + + @Override + public List> getPersistentTasksExecutor( + ClusterService clusterService, + ThreadPool threadPool, + Client client, + SettingsModule settingsModule, + IndexNameExpressionResolver expressionResolver + ) { + return List.of(new ReindexDataStreamPersistentTaskExecutor(client, clusterService, ReindexDataStreamTask.TASK_NAME, threadPool)); + } } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportAction.java new file mode 100644 index 0000000000000..0a86985c6c7b2 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportAction.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.action; + +import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamRequest; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.datastreams.task.ReindexDataStreamTask; +import org.elasticsearch.datastreams.task.ReindexDataStreamTaskParams; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +/* + * This transport action creates a new persistent task for reindexing the source data stream given in the request. On successful creation + * of the persistent task, it responds with the persistent task id so that the user can monitor the persistent task. + */ +public class ReindexDataStreamTransportAction extends HandledTransportAction { + private final PersistentTasksService persistentTasksService; + private final TransportService transportService; + private final ClusterService clusterService; + + @Inject + public ReindexDataStreamTransportAction( + TransportService transportService, + ActionFilters actionFilters, + PersistentTasksService persistentTasksService, + ClusterService clusterService + ) { + super( + ReindexDataStreamAction.NAME, + true, + transportService, + actionFilters, + ReindexDataStreamRequest::new, + transportService.getThreadPool().executor(ThreadPool.Names.GENERIC) + ); + this.transportService = transportService; + this.persistentTasksService = persistentTasksService; + this.clusterService = clusterService; + } + + @Override + protected void doExecute(Task task, ReindexDataStreamRequest request, ActionListener listener) { + String sourceDataStreamName = request.getSourceDataStream(); + Metadata metadata = clusterService.state().metadata(); + DataStream dataStream = metadata.dataStreams().get(sourceDataStreamName); + if (dataStream == null) { + listener.onFailure(new ResourceNotFoundException("Data stream named [{}] does not exist", sourceDataStreamName)); + return; + } + int totalIndices = dataStream.getIndices().size(); + int totalIndicesToBeUpgraded = (int) dataStream.getIndices() + .stream() + .filter(index -> metadata.index(index).getCreationVersion().isLegacyIndexVersion()) + .count(); + ReindexDataStreamTaskParams params = new ReindexDataStreamTaskParams( + sourceDataStreamName, + transportService.getThreadPool().absoluteTimeInMillis(), + totalIndices, + totalIndicesToBeUpgraded + ); + String persistentTaskId = getPersistentTaskId(sourceDataStreamName); + persistentTasksService.sendStartRequest( + persistentTaskId, + ReindexDataStreamTask.TASK_NAME, + params, + null, + ActionListener.wrap(startedTask -> listener.onResponse(new ReindexDataStreamResponse(persistentTaskId)), listener::onFailure) + ); + } + + private String getPersistentTaskId(String dataStreamName) throws ResourceAlreadyExistsException { + return "reindex-data-stream-" + dataStreamName; + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskExecutor.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskExecutor.java new file mode 100644 index 0000000000000..f10d2e7b356fb --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskExecutor.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.GetDataStreamAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.Index; +import org.elasticsearch.persistent.AllocatedPersistentTask; +import org.elasticsearch.persistent.PersistentTaskState; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.persistent.PersistentTasksExecutor; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.List; +import java.util.Map; + +public class ReindexDataStreamPersistentTaskExecutor extends PersistentTasksExecutor { + private static final TimeValue TASK_KEEP_ALIVE_TIME = TimeValue.timeValueDays(1); + private final Client client; + private final ClusterService clusterService; + private final ThreadPool threadPool; + + public ReindexDataStreamPersistentTaskExecutor(Client client, ClusterService clusterService, String taskName, ThreadPool threadPool) { + super(taskName, threadPool.generic()); + this.client = client; + this.clusterService = clusterService; + this.threadPool = threadPool; + } + + @Override + protected ReindexDataStreamTask createTask( + long id, + String type, + String action, + TaskId parentTaskId, + PersistentTasksCustomMetadata.PersistentTask taskInProgress, + Map headers + ) { + ReindexDataStreamTaskParams params = taskInProgress.getParams(); + return new ReindexDataStreamTask( + params.startTime(), + params.totalIndices(), + params.totalIndicesToBeUpgraded(), + threadPool, + id, + type, + action, + "id=" + taskInProgress.getId(), + parentTaskId, + headers + ); + } + + @Override + protected void nodeOperation(AllocatedPersistentTask task, ReindexDataStreamTaskParams params, PersistentTaskState state) { + String sourceDataStream = params.getSourceDataStream(); + GetDataStreamAction.Request request = new GetDataStreamAction.Request(TimeValue.MAX_VALUE, new String[] { sourceDataStream }); + assert task instanceof ReindexDataStreamTask; + final ReindexDataStreamTask reindexDataStreamTask = (ReindexDataStreamTask) task; + client.execute(GetDataStreamAction.INSTANCE, request, ActionListener.wrap(response -> { + List dataStreamInfos = response.getDataStreams(); + if (dataStreamInfos.size() == 1) { + List indices = dataStreamInfos.getFirst().getDataStream().getIndices(); + List indicesToBeReindexed = indices.stream() + .filter(index -> clusterService.state().getMetadata().index(index).getCreationVersion().isLegacyIndexVersion()) + .toList(); + reindexDataStreamTask.setPendingIndices(indicesToBeReindexed.stream().map(Index::getName).toList()); + for (Index index : indicesToBeReindexed) { + // TODO This is just a placeholder. This is where the real data stream reindex logic will go + } + + completeSuccessfulPersistentTask(reindexDataStreamTask); + } else { + completeFailedPersistentTask(reindexDataStreamTask, new ElasticsearchException("data stream does not exist")); + } + }, reindexDataStreamTask::markAsFailed)); + } + + private void completeSuccessfulPersistentTask(ReindexDataStreamTask persistentTask) { + persistentTask.reindexSucceeded(); + threadPool.schedule(persistentTask::markAsCompleted, getTimeToLive(persistentTask), threadPool.generic()); + } + + private void completeFailedPersistentTask(ReindexDataStreamTask persistentTask, Exception e) { + persistentTask.reindexFailed(e); + threadPool.schedule(() -> persistentTask.markAsFailed(e), getTimeToLive(persistentTask), threadPool.generic()); + } + + private TimeValue getTimeToLive(ReindexDataStreamTask reindexDataStreamTask) { + PersistentTasksCustomMetadata persistentTasksCustomMetadata = clusterService.state() + .getMetadata() + .custom(PersistentTasksCustomMetadata.TYPE); + PersistentTasksCustomMetadata.PersistentTask persistentTask = persistentTasksCustomMetadata.getTask( + reindexDataStreamTask.getPersistentTaskId() + ); + PersistentTaskState state = persistentTask.getState(); + final long completionTime; + if (state == null) { + completionTime = threadPool.absoluteTimeInMillis(); + reindexDataStreamTask.updatePersistentTaskState( + new ReindexDataStreamPersistentTaskState(completionTime), + ActionListener.noop() + ); + } else { + completionTime = ((ReindexDataStreamPersistentTaskState) state).completionTime(); + } + return TimeValue.timeValueMillis(TASK_KEEP_ALIVE_TIME.millis() - (threadPool.absoluteTimeInMillis() - completionTime)); + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskState.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskState.java new file mode 100644 index 0000000000000..d6f32a3d34a7a --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskState.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.persistent.PersistentTaskState; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public record ReindexDataStreamPersistentTaskState(long completionTime) implements Task.Status, PersistentTaskState { + public static final String NAME = ReindexDataStreamTask.TASK_NAME; + private static final String COMPLETION_TIME_FIELD = "completion_time"; + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + true, + args -> new ReindexDataStreamPersistentTaskState((long) args[0]) + ); + static { + PARSER.declareLong(constructorArg(), new ParseField(COMPLETION_TIME_FIELD)); + } + + public ReindexDataStreamPersistentTaskState(StreamInput in) throws IOException { + this(in.readLong()); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(completionTime); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(COMPLETION_TIME_FIELD, completionTime); + builder.endObject(); + return builder; + } + + public static ReindexDataStreamPersistentTaskState fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatus.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatus.java new file mode 100644 index 0000000000000..10dfded853a13 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatus.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public record ReindexDataStreamStatus( + long persistentTaskStartTime, + int totalIndices, + int totalIndicesToBeUpgraded, + boolean complete, + Exception exception, + int inProgress, + int pending, + List> errors +) implements Task.Status { + public ReindexDataStreamStatus { + Objects.requireNonNull(errors); + } + + public static final String NAME = "ReindexDataStreamStatus"; + + public ReindexDataStreamStatus(StreamInput in) throws IOException { + this( + in.readLong(), + in.readInt(), + in.readInt(), + in.readBoolean(), + in.readException(), + in.readInt(), + in.readInt(), + in.readCollectionAsList(in1 -> Tuple.tuple(in1.readString(), in1.readException())) + ); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(persistentTaskStartTime); + out.writeInt(totalIndices); + out.writeInt(totalIndicesToBeUpgraded); + out.writeBoolean(complete); + out.writeException(exception); + out.writeInt(inProgress); + out.writeInt(pending); + out.writeCollection(errors, (out1, tuple) -> { + out1.writeString(tuple.v1()); + out1.writeException(tuple.v2()); + }); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("start_time", persistentTaskStartTime); + builder.field("complete", complete); + builder.field("total_indices", totalIndices); + builder.field("total_indices_requiring_upgrade", totalIndicesToBeUpgraded); + builder.field("successes", totalIndicesToBeUpgraded - (inProgress + pending + errors.size())); + builder.field("in_progress", inProgress); + builder.field("pending", pending); + builder.startArray("errors"); + for (Tuple error : errors) { + builder.startObject(); + builder.field("index", error.v1()); + builder.field("message", error.v2().getMessage()); + builder.endObject(); + } + builder.endArray(); + if (exception != null) { + builder.field("exception", exception.getMessage()); + } + builder.endObject(); + return builder; + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTask.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTask.java new file mode 100644 index 0000000000000..2ae244679659f --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTask.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.core.Tuple; +import org.elasticsearch.persistent.AllocatedPersistentTask; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class ReindexDataStreamTask extends AllocatedPersistentTask { + public static final String TASK_NAME = "reindex-data-stream"; + private final long persistentTaskStartTime; + private final int totalIndices; + private final int totalIndicesToBeUpgraded; + private final ThreadPool threadPool; + private boolean complete = false; + private Exception exception; + private List inProgress = new ArrayList<>(); + private List pending = List.of(); + private List> errors = new ArrayList<>(); + + public ReindexDataStreamTask( + long persistentTaskStartTime, + int totalIndices, + int totalIndicesToBeUpgraded, + ThreadPool threadPool, + long id, + String type, + String action, + String description, + TaskId parentTask, + Map headers + ) { + super(id, type, action, description, parentTask, headers); + this.persistentTaskStartTime = persistentTaskStartTime; + this.totalIndices = totalIndices; + this.totalIndicesToBeUpgraded = totalIndicesToBeUpgraded; + this.threadPool = threadPool; + } + + @Override + public ReindexDataStreamStatus getStatus() { + return new ReindexDataStreamStatus( + persistentTaskStartTime, + totalIndices, + totalIndicesToBeUpgraded, + complete, + exception, + inProgress.size(), + pending.size(), + errors + ); + } + + public void reindexSucceeded() { + this.complete = true; + } + + public void reindexFailed(Exception e) { + this.complete = true; + this.exception = e; + } + + public void setInProgressIndices(List inProgressIndices) { + this.inProgress = inProgressIndices; + } + + public void setPendingIndices(List pendingIndices) { + this.pending = pendingIndices; + } + + public void addErrorIndex(String index, Exception error) { + this.errors.add(Tuple.tuple(index, error)); + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParams.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParams.java new file mode 100644 index 0000000000000..5efbc6b672216 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParams.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.persistent.PersistentTaskParams; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public record ReindexDataStreamTaskParams(String sourceDataStream, long startTime, int totalIndices, int totalIndicesToBeUpgraded) + implements + PersistentTaskParams { + + public static final String NAME = ReindexDataStreamTask.TASK_NAME; + private static final String SOURCE_DATA_STREAM_FIELD = "source_data_stream"; + private static final String START_TIME_FIELD = "start_time"; + private static final String TOTAL_INDICES_FIELD = "total_indices"; + private static final String TOTAL_INDICES_TO_BE_UPGRADED_FIELD = "total_indices_to_be_upgraded"; + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + true, + args -> new ReindexDataStreamTaskParams((String) args[0], (long) args[1], (int) args[2], (int) args[3]) + ); + static { + PARSER.declareString(constructorArg(), new ParseField(SOURCE_DATA_STREAM_FIELD)); + PARSER.declareLong(constructorArg(), new ParseField(START_TIME_FIELD)); + PARSER.declareInt(constructorArg(), new ParseField(TOTAL_INDICES_FIELD)); + PARSER.declareInt(constructorArg(), new ParseField(TOTAL_INDICES_TO_BE_UPGRADED_FIELD)); + } + + public ReindexDataStreamTaskParams(StreamInput in) throws IOException { + this(in.readString(), in.readLong(), in.readInt(), in.readInt()); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.REINDEX_DATA_STREAMS; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(sourceDataStream); + out.writeLong(startTime); + out.writeInt(totalIndices); + out.writeInt(totalIndicesToBeUpgraded); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field(SOURCE_DATA_STREAM_FIELD, sourceDataStream) + .field(START_TIME_FIELD, startTime) + .field(TOTAL_INDICES_FIELD, totalIndices) + .field(TOTAL_INDICES_TO_BE_UPGRADED_FIELD, totalIndicesToBeUpgraded) + .endObject(); + } + + public String getSourceDataStream() { + return sourceDataStream; + } + + public static ReindexDataStreamTaskParams fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } +} diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskStateTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskStateTests.java new file mode 100644 index 0000000000000..be11bff131909 --- /dev/null +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskStateTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +public class ReindexDataStreamPersistentTaskStateTests extends AbstractXContentSerializingTestCase { + @Override + protected ReindexDataStreamPersistentTaskState doParseInstance(XContentParser parser) throws IOException { + return ReindexDataStreamPersistentTaskState.fromXContent(parser); + } + + @Override + protected Writeable.Reader instanceReader() { + return ReindexDataStreamPersistentTaskState::new; + } + + @Override + protected ReindexDataStreamPersistentTaskState createTestInstance() { + return new ReindexDataStreamPersistentTaskState(randomNegativeLong()); + } + + @Override + protected ReindexDataStreamPersistentTaskState mutateInstance(ReindexDataStreamPersistentTaskState instance) throws IOException { + return new ReindexDataStreamPersistentTaskState(instance.completionTime() + 1); + } +} diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatusTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatusTests.java new file mode 100644 index 0000000000000..8f0fabc2ce7ee --- /dev/null +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatusTests.java @@ -0,0 +1,157 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static java.util.Map.entry; +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.equalTo; + +public class ReindexDataStreamStatusTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return ReindexDataStreamStatus::new; + } + + @Override + protected ReindexDataStreamStatus createTestInstance() { + return new ReindexDataStreamStatus( + randomLong(), + randomNegativeInt(), + randomNegativeInt(), + randomBoolean(), + nullableTestException(), + randomNegativeInt(), + randomNegativeInt(), + randomErrorList() + ); + } + + private Exception nullableTestException() { + if (randomBoolean()) { + return testException(); + } + return null; + } + + private Exception testException() { + /* + * Unfortunately ElasticsearchException doesn't have an equals and just falls back to Object::equals. So we can't test for equality + * when we're using an exception. So always just use null. + */ + return null; + } + + private List randomList() { + return randomList(0); + } + + private List randomList(int minSize) { + return randomList(minSize, Math.max(minSize, 100), () -> randomAlphaOfLength(50)); + } + + private List> randomErrorList() { + return randomErrorList(0); + } + + private List> randomErrorList(int minSize) { + return randomList(minSize, Math.max(minSize, 100), () -> Tuple.tuple(randomAlphaOfLength(30), testException())); + } + + @Override + protected ReindexDataStreamStatus mutateInstance(ReindexDataStreamStatus instance) throws IOException { + long startTime = instance.persistentTaskStartTime(); + int totalIndices = instance.totalIndices(); + int totalIndicesToBeUpgraded = instance.totalIndicesToBeUpgraded(); + boolean complete = instance.complete(); + Exception exception = instance.exception(); + int inProgress = instance.inProgress(); + int pending = instance.pending(); + List> errors = instance.errors(); + switch (randomIntBetween(0, 6)) { + case 0 -> startTime = randomLong(); + case 1 -> totalIndices = totalIndices + 1; + case 2 -> totalIndicesToBeUpgraded = totalIndicesToBeUpgraded + 1; + case 3 -> complete = complete == false; + case 4 -> inProgress = inProgress + 1; + case 5 -> pending = pending + 1; + case 6 -> errors = randomErrorList(errors.size() + 1); + default -> throw new UnsupportedOperationException(); + } + return new ReindexDataStreamStatus( + startTime, + totalIndices, + totalIndicesToBeUpgraded, + complete, + exception, + inProgress, + pending, + errors + ); + } + + public void testToXContent() throws IOException { + ReindexDataStreamStatus status = new ReindexDataStreamStatus( + 1234L, + 200, + 100, + true, + new ElasticsearchException("the whole task failed"), + 12, + 8, + List.of( + Tuple.tuple("index7", new ElasticsearchException("index7 failed")), + Tuple.tuple("index8", new ElasticsearchException("index8 " + "failed")) + ) + ); + try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent)) { + builder.humanReadable(true); + status.toXContent(builder, EMPTY_PARAMS); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + Map parserMap = parser.map(); + assertThat( + parserMap, + equalTo( + Map.ofEntries( + entry("start_time", 1234), + entry("total_indices", 200), + entry("total_indices_requiring_upgrade", 100), + entry("complete", true), + entry("exception", "the whole task failed"), + entry("successes", 78), + entry("in_progress", 12), + entry("pending", 8), + entry( + "errors", + List.of( + Map.of("index", "index7", "message", "index7 failed"), + Map.of("index", "index8", "message", "index8 failed") + ) + ) + ) + ) + ); + } + } + } +} diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParamsTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParamsTests.java new file mode 100644 index 0000000000000..55098bf4a68d5 --- /dev/null +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParamsTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.equalTo; + +public class ReindexDataStreamTaskParamsTests extends AbstractXContentSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return ReindexDataStreamTaskParams::new; + } + + @Override + protected ReindexDataStreamTaskParams createTestInstance() { + return new ReindexDataStreamTaskParams(randomAlphaOfLength(50), randomLong(), randomNonNegativeInt(), randomNonNegativeInt()); + } + + @Override + protected ReindexDataStreamTaskParams mutateInstance(ReindexDataStreamTaskParams instance) { + String sourceDataStream = instance.sourceDataStream(); + long startTime = instance.startTime(); + int totalIndices = instance.totalIndices(); + int totalIndicesToBeUpgraded = instance.totalIndicesToBeUpgraded(); + switch (randomIntBetween(0, 3)) { + case 0 -> sourceDataStream = randomAlphaOfLength(50); + case 1 -> startTime = randomLong(); + case 2 -> totalIndices = totalIndices + 1; + case 3 -> totalIndices = totalIndicesToBeUpgraded + 1; + default -> throw new UnsupportedOperationException(); + } + return new ReindexDataStreamTaskParams(sourceDataStream, startTime, totalIndices, totalIndicesToBeUpgraded); + } + + @Override + protected ReindexDataStreamTaskParams doParseInstance(XContentParser parser) { + return ReindexDataStreamTaskParams.fromXContent(parser); + } + + public void testToXContent() throws IOException { + ReindexDataStreamTaskParams params = createTestInstance(); + try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent)) { + builder.humanReadable(true); + params.toXContent(builder, EMPTY_PARAMS); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + Map parserMap = parser.map(); + assertThat(parserMap.get("source_data_stream"), equalTo(params.sourceDataStream())); + assertThat(((Number) parserMap.get("start_time")).longValue(), equalTo(params.startTime())); + } + } + } +} diff --git a/modules/ingest-attachment/build.gradle b/modules/ingest-attachment/build.gradle index 821de8f834a44..8fe2b82fe21fb 100644 --- a/modules/ingest-attachment/build.gradle +++ b/modules/ingest-attachment/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/modules/ingest-geoip/qa/full-cluster-restart/build.gradle b/modules/ingest-geoip/qa/full-cluster-restart/build.gradle index 8e7d20108a869..29cc6d7184bf2 100644 --- a/modules/ingest-geoip/qa/full-cluster-restart/build.gradle +++ b/modules/ingest-geoip/qa/full-cluster-restart/build.gradle @@ -9,7 +9,6 @@ import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java index 61ca050d91c13..2f96aa3cbc69a 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java @@ -268,7 +268,7 @@ private static Set pipelinesWithGeoIpProcessor(ClusterState clusterState Set ids = new HashSet<>(); // note: this loop is unrolled rather than streaming-style because it's hot enough to show up in a flamegraph for (PipelineConfiguration configuration : configurations) { - List> processors = (List>) configuration.getConfigAsMap().get(Pipeline.PROCESSORS_KEY); + List> processors = (List>) configuration.getConfig().get(Pipeline.PROCESSORS_KEY); if (hasAtLeastOneGeoipProcessor(processors, downloadDatabaseOnPipelineCreation)) { ids.add(configuration.getId()); } diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt index e76db7cfb1d26..5a1d8c002aa17 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt @@ -50,5 +50,7 @@ static_import { double cosineSimilarity(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$CosineSimilarity double dotProduct(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$DotProduct double hamming(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$Hamming + double maxSimDotProduct(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.MultiVectorScoreScriptUtils$MaxSimDotProduct + double maxSimInvHamming(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.MultiVectorScoreScriptUtils$MaxSimInvHamming } diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/141_multi_dense_vector_max_sim.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/141_multi_dense_vector_max_sim.yml new file mode 100644 index 0000000000000..caa7c59ab4c42 --- /dev/null +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/141_multi_dense_vector_max_sim.yml @@ -0,0 +1,206 @@ +setup: + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ multi_dense_vector_script_max_sim ] + test_runner_features: capabilities + reason: "Support for multi dense vector max-sim functions capability required" + - skip: + features: headers + + - do: + indices.create: + index: test-index + body: + settings: + number_of_shards: 1 + mappings: + properties: + vector: + type: multi_dense_vector + dims: 5 + byte_vector: + type: multi_dense_vector + dims: 5 + element_type: byte + bit_vector: + type: multi_dense_vector + dims: 40 + element_type: bit + - do: + index: + index: test-index + id: "1" + body: + vector: [[230.0, 300.33, -34.8988, 15.555, -200.0], [-0.5, 100.0, -13, 14.8, -156.0]] + byte_vector: [[8, 5, -15, 1, -7], [-1, 115, -3, 4, -128]] + bit_vector: [[8, 5, -15, 1, -7], [-1, 115, -3, 4, -128]] + + - do: + index: + index: test-index + id: "3" + body: + vector: [[0.5, 111.3, -13.0, 14.8, -156.0]] + byte_vector: [[2, 18, -5, 0, -124]] + bit_vector: [[2, 18, -5, 0, -124]] + + - do: + indices.refresh: {} +--- +"Test max-sim dot product scoring": + - skip: + features: close_to + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimDotProduct(params.query_vector, 'vector')" + params: + query_vector: [[1, 2, 1, 1, 1]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 611.316, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 68.90001, error: 0.01}} + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimDotProduct(params.query_vector, 'byte_vector')" + params: + query_vector: [[1, 2, 1, 1, 0]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 230, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 33, error: 0.01}} + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimDotProduct(params.query_vector, 'bit_vector')" + params: + query_vector: [[1, 2, 1, 1, 0]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 3, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 2, error: 0.01}} + +# doing max-sim dot product with a vector where the stored bit vectors are used as masks + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimDotProduct(params.query_vector, 'bit_vector')" + params: + query_vector: [[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]] + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 190, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 125, error: 0.01}} +--- +"Test max-sim inv hamming scoring": + - skip: + features: close_to + + # inv hamming doesn't apply to float vectors + - do: + catch: bad_request + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimInvHamming(params.query_vector, 'vector')" + params: + query_vector: [[1, 2, 1, 1, 1]] + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimInvHamming(params.query_vector, 'byte_vector')" + params: + query_vector: [[1, 2, 1, 1, 1]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "3"} + - close_to: {hits.hits.0._score: {value: 0.675, error: 0.01}} + + - match: {hits.hits.1._id: "1"} + - close_to: {hits.hits.1._score: {value: 0.65, error: 0.01}} + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimInvHamming(params.query_vector, 'bit_vector')" + params: + query_vector: [[1, 2, 1, 1, 1]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "3"} + - close_to: {hits.hits.0._score: {value: 0.675, error: 0.01}} + + - match: {hits.hits.1._id: "1"} + - close_to: {hits.hits.1._score: {value: 0.65, error: 0.01}} diff --git a/modules/legacy-geo/build.gradle b/modules/legacy-geo/build.gradle index 1b4fd9d52bbaf..55171221396a3 100644 --- a/modules/legacy-geo/build.gradle +++ b/modules/legacy-geo/build.gradle @@ -7,8 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-cluster-test' esplugin { diff --git a/modules/mapper-extras/build.gradle b/modules/mapper-extras/build.gradle index a7bdc11e15550..992f39a22b28c 100644 --- a/modules/mapper-extras/build.gradle +++ b/modules/mapper-extras/build.gradle @@ -7,10 +7,8 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams - -apply plugin: 'elasticsearch.legacy-yaml-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' +apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { @@ -23,9 +21,3 @@ restResources { include '_common', 'cluster', 'field_caps', 'nodes', 'indices', 'index', 'search', 'get' } } - -if (buildParams.isSnapshotBuild() == false) { - tasks.named("test").configure { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } -} diff --git a/modules/mapper-extras/src/yamlRestTest/java/org/elasticsearch/index/mapper/MapperExtrasClientYamlTestSuiteIT.java b/modules/mapper-extras/src/yamlRestTest/java/org/elasticsearch/index/mapper/MapperExtrasClientYamlTestSuiteIT.java index b325c81616257..80953af5a4cbb 100644 --- a/modules/mapper-extras/src/yamlRestTest/java/org/elasticsearch/index/mapper/MapperExtrasClientYamlTestSuiteIT.java +++ b/modules/mapper-extras/src/yamlRestTest/java/org/elasticsearch/index/mapper/MapperExtrasClientYamlTestSuiteIT.java @@ -12,8 +12,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; /** Runs yaml rest tests */ public class MapperExtrasClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -26,4 +28,12 @@ public MapperExtrasClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate t public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("mapper-extras").build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/modules/parent-join/build.gradle b/modules/parent-join/build.gradle index c714e52512b2e..0d34b5f6e3b40 100644 --- a/modules/parent-join/build.gradle +++ b/modules/parent-join/build.gradle @@ -6,8 +6,8 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -apply plugin: 'elasticsearch.legacy-yaml-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' +apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregator.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregator.java index 6985f6da98cf1..12489ad37aabd 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregator.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregator.java @@ -9,6 +9,7 @@ package org.elasticsearch.join.aggregations; import org.apache.lucene.search.Query; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.CardinalityUpperBound; @@ -44,7 +45,7 @@ public ChildrenToParentAggregator( } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalParent( diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java index 60412179807a5..1b99d2b34046c 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.aggregations.AggregationExecutionContext; @@ -115,7 +116,7 @@ public void postCollection() throws IOException { } @Override - protected void prepareSubAggs(long[] ordsToCollect) throws IOException { + protected void prepareSubAggs(LongArray ordsToCollect) throws IOException { IndexReader indexReader = searcher().getIndexReader(); for (LeafReaderContext ctx : indexReader.leaves()) { Scorer childDocsScorer = outFilter.scorer(ctx); @@ -153,9 +154,10 @@ public float score() { * structure that maps a primitive long to a list of primitive * longs. */ - for (long owningBucketOrd : ordsToCollect) { - if (collectionStrategy.exists(owningBucketOrd, globalOrdinal)) { - collectBucket(sub, docId, owningBucketOrd); + for (long ord = 0; ord < ordsToCollect.size(); ord++) { + long ordToCollect = ordsToCollect.get(ord); + if (collectionStrategy.exists(ordToCollect, globalOrdinal)) { + collectBucket(sub, docId, ordToCollect); } } } diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java index d8a061a2de6d9..939107f87715d 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java @@ -9,6 +9,7 @@ package org.elasticsearch.join.aggregations; import org.apache.lucene.search.Query; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.CardinalityUpperBound; @@ -40,7 +41,7 @@ public ParentToChildrenAggregator( } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalChildren( diff --git a/modules/parent-join/src/yamlRestTest/java/org/elasticsearch/join/ParentChildClientYamlTestSuiteIT.java b/modules/parent-join/src/yamlRestTest/java/org/elasticsearch/join/ParentChildClientYamlTestSuiteIT.java index 7b02f87691841..ecfc464f0739c 100644 --- a/modules/parent-join/src/yamlRestTest/java/org/elasticsearch/join/ParentChildClientYamlTestSuiteIT.java +++ b/modules/parent-join/src/yamlRestTest/java/org/elasticsearch/join/ParentChildClientYamlTestSuiteIT.java @@ -12,8 +12,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; public class ParentChildClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public ParentChildClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -24,4 +26,12 @@ public ParentChildClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate te public static Iterable parameters() throws Exception { return createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("parent-join").build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/modules/percolator/build.gradle b/modules/percolator/build.gradle index e7865a086a003..2d2f6767f5e62 100644 --- a/modules/percolator/build.gradle +++ b/modules/percolator/build.gradle @@ -6,8 +6,8 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -apply plugin: 'elasticsearch.legacy-yaml-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' +apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { diff --git a/modules/percolator/src/yamlRestTest/java/org/elasticsearch/percolator/PercolatorClientYamlTestSuiteIT.java b/modules/percolator/src/yamlRestTest/java/org/elasticsearch/percolator/PercolatorClientYamlTestSuiteIT.java index 21bc2d8a4ae10..d71e758525085 100644 --- a/modules/percolator/src/yamlRestTest/java/org/elasticsearch/percolator/PercolatorClientYamlTestSuiteIT.java +++ b/modules/percolator/src/yamlRestTest/java/org/elasticsearch/percolator/PercolatorClientYamlTestSuiteIT.java @@ -12,8 +12,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; public class PercolatorClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public PercolatorClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -24,4 +26,12 @@ public PercolatorClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate tes public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("percolator").build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/modules/rank-eval/build.gradle b/modules/rank-eval/build.gradle index 511dd7be9ae68..c9016798c18b9 100644 --- a/modules/rank-eval/build.gradle +++ b/modules/rank-eval/build.gradle @@ -25,7 +25,3 @@ testClusters.configureEach { // Modules who's integration is explicitly tested in integration tests module ':modules:lang-mustache' } - -tasks.named("yamlRestCompatTestTransform").configure({ task -> - task.skipTest("rank_eval/30_failures/Response format", "warning does not exist for compatibility") -}) diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index bb1500ba55664..7281c161e2c4a 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -10,15 +10,14 @@ import org.elasticsearch.gradle.Architecture import org.elasticsearch.gradle.OS import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.AntFixture import org.elasticsearch.gradle.transform.UnzipTransform apply plugin: 'elasticsearch.test-with-dependencies' apply plugin: 'elasticsearch.jdk-download' -apply plugin: 'elasticsearch.legacy-yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.legacy-java-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' +apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { @@ -33,7 +32,6 @@ testClusters.configureEach { module ':modules:rest-root' // Whitelist reindexing from the local node so we can test reindex-from-remote. setting 'reindex.remote.whitelist', '127.0.0.1:*' - requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } dependencies { @@ -42,6 +40,10 @@ dependencies { // for parent/child testing testImplementation project(':modules:parent-join') testImplementation project(':modules:rest-root') + + clusterModules project(':modules:lang-painless') + clusterModules project(':modules:parent-join') + clusterModules project(":modules:rest-root") } restResources { diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/ReindexNodeShutdownIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/ReindexNodeShutdownIT.java index 4a001bb2d0969..a4b030e3c793f 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/ReindexNodeShutdownIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/ReindexNodeShutdownIT.java @@ -35,7 +35,7 @@ * The test works as follows: * 1. Start a large (reasonably long running) reindexing request on the coordinator-only node. * 2. Check that the reindexing task appears on the coordinating node - * 3. With a 10s timeout value for MAXIMUM_REINDEXING_TIMEOUT_SETTING, + * 3. With a 60s timeout value for MAXIMUM_REINDEXING_TIMEOUT_SETTING, * wait for the reindexing task to complete before closing the node * 4. Confirm that the reindexing task succeeds with the wait (it will fail without it) */ @@ -58,8 +58,9 @@ public void testReindexWithShutdown() throws Exception { final String masterNodeName = internalCluster().startMasterOnlyNode(); final String dataNodeName = internalCluster().startDataOnlyNode(); + /* Maximum time to wait for reindexing tasks to complete before shutdown */ final Settings COORD_SETTINGS = Settings.builder() - .put(MAXIMUM_REINDEXING_TIMEOUT_SETTING.getKey(), TimeValue.timeValueSeconds(10)) + .put(MAXIMUM_REINDEXING_TIMEOUT_SETTING.getKey(), TimeValue.timeValueSeconds(60)) .build(); final String coordNodeName = internalCluster().startCoordinatingOnlyNode(Settings.EMPTY); @@ -118,7 +119,7 @@ public void onFailure(Exception e) { internalCluster().stopNode(coordNodeName); } - // Make sure all documents from the source index have been reindexed into the destination index + // Make sure all documents from the source index have been re-indexed into the destination index private void checkDestinationIndex(String dataNodeName, int numDocs) throws Exception { assertTrue(indexExists(DEST_INDEX)); flushAndRefresh(DEST_INDEX); diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java index 3905edae46c2f..a4aa0514bb47a 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java @@ -208,7 +208,7 @@ public void testMigrateInternalManagedSystemIndex() throws Exception { assertIndexHasCorrectProperties( finalMetadata, - ".int-man-old-reindexed-for-8", + ".int-man-old-reindexed-for-9", INTERNAL_MANAGED_FLAG_VALUE, true, true, @@ -216,7 +216,7 @@ public void testMigrateInternalManagedSystemIndex() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".int-unman-old-reindexed-for-8", + ".int-unman-old-reindexed-for-9", INTERNAL_UNMANAGED_FLAG_VALUE, false, true, @@ -224,7 +224,7 @@ public void testMigrateInternalManagedSystemIndex() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-man-old-reindexed-for-8", + ".ext-man-old-reindexed-for-9", EXTERNAL_MANAGED_FLAG_VALUE, true, false, @@ -232,7 +232,7 @@ public void testMigrateInternalManagedSystemIndex() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-unman-old-reindexed-for-8", + ".ext-unman-old-reindexed-for-9", EXTERNAL_UNMANAGED_FLAG_VALUE, false, false, diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java index 1ee5519593569..3442e9dc43925 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java @@ -218,7 +218,7 @@ public void testMultipleFeatureMigration() throws Exception { // Finally, verify that all the indices exist and have the properties we expect. assertIndexHasCorrectProperties( finalMetadata, - ".int-man-old-reindexed-for-8", + ".int-man-old-reindexed-for-9", INTERNAL_MANAGED_FLAG_VALUE, true, true, @@ -226,7 +226,7 @@ public void testMultipleFeatureMigration() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".int-unman-old-reindexed-for-8", + ".int-unman-old-reindexed-for-9", INTERNAL_UNMANAGED_FLAG_VALUE, false, true, @@ -234,7 +234,7 @@ public void testMultipleFeatureMigration() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-man-old-reindexed-for-8", + ".ext-man-old-reindexed-for-9", EXTERNAL_MANAGED_FLAG_VALUE, true, false, @@ -242,7 +242,7 @@ public void testMultipleFeatureMigration() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-unman-old-reindexed-for-8", + ".ext-unman-old-reindexed-for-9", EXTERNAL_UNMANAGED_FLAG_VALUE, false, false, @@ -251,7 +251,7 @@ public void testMultipleFeatureMigration() throws Exception { assertIndexHasCorrectProperties( finalMetadata, - ".second-int-man-old-reindexed-for-8", + ".second-int-man-old-reindexed-for-9", SECOND_FEATURE_IDX_FLAG_VALUE, true, true, diff --git a/modules/reindex/src/javaRestTest/java/org/elasticsearch/index/reindex/ReindexWithoutContentIT.java b/modules/reindex/src/javaRestTest/java/org/elasticsearch/index/reindex/ReindexWithoutContentIT.java index 99be7123040cc..a0212a937f27b 100644 --- a/modules/reindex/src/javaRestTest/java/org/elasticsearch/index/reindex/ReindexWithoutContentIT.java +++ b/modules/reindex/src/javaRestTest/java/org/elasticsearch/index/reindex/ReindexWithoutContentIT.java @@ -18,7 +18,6 @@ import static org.hamcrest.CoreMatchers.containsString; public class ReindexWithoutContentIT extends ESRestTestCase { - public void testReindexMissingBody() throws IOException { ResponseException responseException = expectThrows( ResponseException.class, diff --git a/modules/reindex/src/yamlRestTest/java/org/elasticsearch/index/reindex/ReindexClientYamlTestSuiteIT.java b/modules/reindex/src/yamlRestTest/java/org/elasticsearch/index/reindex/ReindexClientYamlTestSuiteIT.java index a44a2150bf07c..0378a63ed5481 100644 --- a/modules/reindex/src/yamlRestTest/java/org/elasticsearch/index/reindex/ReindexClientYamlTestSuiteIT.java +++ b/modules/reindex/src/yamlRestTest/java/org/elasticsearch/index/reindex/ReindexClientYamlTestSuiteIT.java @@ -12,8 +12,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; public class ReindexClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public ReindexClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -24,4 +26,18 @@ public ReindexClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCa public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .module("reindex") + .module("lang-painless") + .module("parent-join") + .module("rest-root") + .setting("reindex.remote.whitelist", "127.0.0.1:*") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/modules/repository-azure/build.gradle b/modules/repository-azure/build.gradle index 4babac68f1e71..8c1ca3891bc1e 100644 --- a/modules/repository-azure/build.gradle +++ b/modules/repository-azure/build.gradle @@ -1,8 +1,3 @@ -import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams -import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin -import org.elasticsearch.gradle.internal.test.RestIntegTestTask - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License @@ -11,6 +6,11 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ + +import org.apache.tools.ant.filters.ReplaceTokens +import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin +import org.elasticsearch.gradle.internal.test.RestIntegTestTask + apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java index e049d4cd372e6..61940be247861 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java @@ -112,7 +112,7 @@ public void testThrottleResponsesAreCountedInMetrics() throws IOException { blobContainer.blobExists(purpose, blobName); // Correct metrics are recorded - metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES).expectMetrics() + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES, repository).expectMetrics() .withRequests(numThrottles + 1) .withThrottles(numThrottles) .withExceptions(numThrottles) @@ -137,7 +137,7 @@ public void testRangeNotSatisfiedAreCountedInMetrics() throws IOException { assertThrows(RequestedRangeNotSatisfiedException.class, () -> blobContainer.readBlob(purpose, blobName)); // Correct metrics are recorded - metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB).expectMetrics() + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB, repository).expectMetrics() .withRequests(1) .withThrottles(0) .withExceptions(1) @@ -170,7 +170,7 @@ public void testErrorResponsesAreCountedInMetrics() throws IOException { blobContainer.blobExists(purpose, blobName); // Correct metrics are recorded - metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES).expectMetrics() + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES, repository).expectMetrics() .withRequests(numErrors + 1) .withThrottles(throttles.get()) .withExceptions(numErrors) @@ -191,7 +191,7 @@ public void testRequestFailuresAreCountedInMetrics() { assertThrows(IOException.class, () -> blobContainer.listBlobs(purpose)); // Correct metrics are recorded - metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.LIST_BLOBS).expectMetrics() + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.LIST_BLOBS, repository).expectMetrics() .withRequests(4) .withThrottles(0) .withExceptions(4) @@ -322,14 +322,20 @@ private void clearMetrics(String discoveryNode) { .forEach(TestTelemetryPlugin::resetMeter); } - private MetricsAsserter metricsAsserter(String dataNodeName, OperationPurpose operationPurpose, AzureBlobStore.Operation operation) { - return new MetricsAsserter(dataNodeName, operationPurpose, operation); + private MetricsAsserter metricsAsserter( + String dataNodeName, + OperationPurpose operationPurpose, + AzureBlobStore.Operation operation, + String repository + ) { + return new MetricsAsserter(dataNodeName, operationPurpose, operation, repository); } private class MetricsAsserter { private final String dataNodeName; private final OperationPurpose purpose; private final AzureBlobStore.Operation operation; + private final String repository; enum Result { Success, @@ -355,10 +361,11 @@ List getMeasurements(TestTelemetryPlugin testTelemetryPlugin, Strin abstract List getMeasurements(TestTelemetryPlugin testTelemetryPlugin, String name); } - private MetricsAsserter(String dataNodeName, OperationPurpose purpose, AzureBlobStore.Operation operation) { + private MetricsAsserter(String dataNodeName, OperationPurpose purpose, AzureBlobStore.Operation operation, String repository) { this.dataNodeName = dataNodeName; this.purpose = purpose; this.operation = operation; + this.repository = repository; } private class Expectations { @@ -451,6 +458,7 @@ private void assertMatchingMetricRecorded(MetricType metricType, String metricNa .filter( m -> m.attributes().get("operation").equals(operation.getKey()) && m.attributes().get("purpose").equals(purpose.getKey()) + && m.attributes().get("repo_name").equals(repository) && m.attributes().get("repo_type").equals("azure") ) .findFirst() @@ -462,6 +470,8 @@ private void assertMatchingMetricRecorded(MetricType metricType, String metricNa + operation.getKey() + " and purpose=" + purpose.getKey() + + " and repo_name=" + + repository + " in " + measurements ) diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java index ab3f3ee4f3728..bd21f208faac4 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -402,7 +402,10 @@ public void testMetrics() throws Exception { ) ); metrics.forEach(metric -> { - assertThat(metric.attributes(), allOf(hasEntry("repo_type", AzureRepository.TYPE), hasKey("operation"), hasKey("purpose"))); + assertThat( + metric.attributes(), + allOf(hasEntry("repo_type", AzureRepository.TYPE), hasKey("repo_name"), hasKey("operation"), hasKey("purpose")) + ); final AzureBlobStore.Operation operation = AzureBlobStore.Operation.fromKey((String) metric.attributes().get("operation")); final AzureBlobStore.StatsKey statsKey = new AzureBlobStore.StatsKey( operation, diff --git a/modules/repository-gcs/build.gradle b/modules/repository-gcs/build.gradle index 605d886a71056..811645d154c7a 100644 --- a/modules/repository-gcs/build.gradle +++ b/modules/repository-gcs/build.gradle @@ -9,7 +9,6 @@ import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin import java.nio.file.Files diff --git a/modules/repository-s3/build.gradle b/modules/repository-s3/build.gradle index c1cd1a13719a7..1301d17606d63 100644 --- a/modules/repository-s3/build.gradle +++ b/modules/repository-s3/build.gradle @@ -1,7 +1,3 @@ -import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams -import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License @@ -10,6 +6,9 @@ import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ +import org.apache.tools.ant.filters.ReplaceTokens +import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin + apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index d9480abf21687..bb8a452e21771 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -300,7 +300,10 @@ public void testMetrics() throws Exception { ) ); metrics.forEach(metric -> { - assertThat(metric.attributes(), allOf(hasEntry("repo_type", S3Repository.TYPE), hasKey("operation"), hasKey("purpose"))); + assertThat( + metric.attributes(), + allOf(hasEntry("repo_type", S3Repository.TYPE), hasKey("repo_name"), hasKey("operation"), hasKey("purpose")) + ); final S3BlobStore.Operation operation = S3BlobStore.Operation.parse((String) metric.attributes().get("operation")); final S3BlobStore.StatsKey statsKey = new S3BlobStore.StatsKey( operation, diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java index ead2cb36ad150..dcd29c6d26c6e 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java @@ -51,8 +51,9 @@ protected String getTestRestCluster() { return cluster.getHttpAddresses(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/116811") public void testReloadCredentialsFromKeystore() throws IOException { + assumeFalse("doesn't work in a FIPS JVM, but that's ok", inFipsJvm()); + // Register repository (?verify=false because we don't have access to the blob store yet) final var repositoryName = randomIdentifier(); registerRepository( diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index fde15d5d6e6bc..591350c34ab85 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -318,8 +318,7 @@ class S3Repository extends MeteredBlobStoreRepository { deprecationLogger.critical( DeprecationCategory.SECURITY, "s3_repository_secret_settings", - "Using s3 access/secret key from repository settings. Instead " - + "store these in named clients and the elasticsearch keystore for secure settings." + INSECURE_CREDENTIALS_DEPRECATION_WARNING ); } @@ -336,6 +335,11 @@ class S3Repository extends MeteredBlobStoreRepository { ); } + static final String INSECURE_CREDENTIALS_DEPRECATION_WARNING = Strings.format(""" + This repository's settings include a S3 access key and secret key, but repository settings are stored in plaintext and must not be \ + used for security-sensitive information. Instead, store all secure settings in the keystore. See [%s] for more information.\ + """, ReferenceDocs.SECURE_SETTINGS); + private static Map buildLocation(RepositoryMetadata metadata) { return Map.of("base_path", BASE_PATH_SETTING.get(metadata.settings()), "bucket", BUCKET_SETTING.get(metadata.settings())); } diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java index 7407522651e55..da357dc09ab95 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java @@ -327,6 +327,8 @@ private Map metricAttributes(String action) { return Map.of( "repo_type", S3Repository.TYPE, + "repo_name", + blobStore.getRepositoryMetadata().name(), "operation", Operation.GET_OBJECT.getKey(), "purpose", diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java index 52fe152ba41e3..8e5f6634372db 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java @@ -107,10 +107,9 @@ public void testRepositoryCredentialsOverrideSecureCredentials() { assertThat(credentials.getAWSSecretKey(), is("insecure_aws_secret")); assertCriticalWarnings( + "[access_key] setting was deprecated in Elasticsearch and will be removed in a future release.", "[secret_key] setting was deprecated in Elasticsearch and will be removed in a future release.", - "Using s3 access/secret key from repository settings. Instead store these in named clients and" - + " the elasticsearch keystore for secure settings.", - "[access_key] setting was deprecated in Elasticsearch and will be removed in a future release." + S3Repository.INSECURE_CREDENTIALS_DEPRECATION_WARNING ); } @@ -194,10 +193,9 @@ public void testReinitSecureCredentials() { if (hasInsecureSettings) { assertCriticalWarnings( + "[access_key] setting was deprecated in Elasticsearch and will be removed in a future release.", "[secret_key] setting was deprecated in Elasticsearch and will be removed in a future release.", - "Using s3 access/secret key from repository settings. Instead store these in named clients and" - + " the elasticsearch keystore for secure settings.", - "[access_key] setting was deprecated in Elasticsearch and will be removed in a future release." + S3Repository.INSECURE_CREDENTIALS_DEPRECATION_WARNING ); } } @@ -238,10 +236,7 @@ public void sendResponse(RestResponse response) { throw error.get(); } - assertWarnings( - "Using s3 access/secret key from repository settings. Instead store these in named clients and" - + " the elasticsearch keystore for secure settings." - ); + assertWarnings(S3Repository.INSECURE_CREDENTIALS_DEPRECATION_WARNING); } private void createRepository(final String name, final Settings repositorySettings) { diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index ac49cffc1e0da..b292dc5872994 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -1106,7 +1106,7 @@ private List getRetryHistogramMeasurements() { } private Map metricAttributes(String action) { - return Map.of("repo_type", "s3", "operation", "GetObject", "purpose", "Indices", "action", action); + return Map.of("repo_type", "s3", "repo_name", "repository", "operation", "GetObject", "purpose", "Indices", "action", action); } /** diff --git a/modules/rest-root/build.gradle b/modules/rest-root/build.gradle index 05a545a1ed671..adb8aeb02863f 100644 --- a/modules/rest-root/build.gradle +++ b/modules/rest-root/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/modules/runtime-fields-common/build.gradle b/modules/runtime-fields-common/build.gradle index e743939cbf79e..e8e06f0a9c4c7 100644 --- a/modules/runtime-fields-common/build.gradle +++ b/modules/runtime-fields-common/build.gradle @@ -22,7 +22,3 @@ dependencies { api project(':libs:grok') api project(':libs:dissect') } - -tasks.named("yamlRestCompatTestTransform").configure({ task -> - task.skipTestsByFilePattern("**/runtime_fields/110_composite.yml", "warning does not exist for compatibility") -}) diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java index 3095139ca4685..4bb27af4bd0f5 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java @@ -699,11 +699,6 @@ public Collection getRestHandlers( Predicate clusterSupportsFeature ) { return List.of(new BaseRestHandler() { - @Override - public boolean allowsUnsafeBuffers() { - return true; - } - @Override public String getName() { return ROUTE; diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4TrashingAllocatorIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4TrashingAllocatorIT.java new file mode 100644 index 0000000000000..18c91068ff4f9 --- /dev/null +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4TrashingAllocatorIT.java @@ -0,0 +1,122 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.http.netty4; + +import io.netty.handler.codec.http.HttpResponseStatus; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ESNetty4IntegTestCase; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.function.Predicate; +import java.util.function.Supplier; + +public class Netty4TrashingAllocatorIT extends ESNetty4IntegTestCase { + + @Override + protected Collection> nodePlugins() { + return CollectionUtils.concatLists(List.of(Handler.class), super.nodePlugins()); + } + + @Override + protected boolean addMockHttpTransport() { + return false; + } + + public void testTrashContent() throws InterruptedException { + try (var client = new Netty4HttpClient()) { + var addr = randomFrom(internalCluster().getInstance(HttpServerTransport.class).boundAddress().boundAddresses()).address(); + var content = randomAlphaOfLength(between(1024, 2048)); + var responses = client.post(addr, List.of(new Tuple<>(Handler.ROUTE, content))); + assertEquals(HttpResponseStatus.OK, responses.stream().findFirst().get().status()); + } + } + + public static class Handler extends Plugin implements ActionPlugin { + static final String ROUTE = "/_test/trashing-alloc"; + + @Override + public Collection getRestHandlers( + Settings settings, + NamedWriteableRegistry namedWriteableRegistry, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster, + Predicate clusterSupportsFeature + ) { + return List.of(new BaseRestHandler() { + @Override + public String getName() { + return ROUTE; + } + + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.POST, ROUTE)); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + var content = request.releasableContent(); + var iter = content.iterator(); + return (chan) -> { + request.getHttpRequest().release(); + assertFalse(content.hasReferences()); + BytesRef br; + while ((br = iter.next()) != null) { + for (int i = br.offset; i < br.offset + br.length; i++) { + if (br.bytes[i] != 0) { + fail( + new AssertionError( + "buffer is not trashed, off=" + + br.offset + + " len=" + + br.length + + " pos=" + + i + + " ind=" + + (i - br.offset) + ) + ); + } + } + } + chan.sendResponse(new RestResponse(RestStatus.OK, "")); + }; + } + }); + } + } +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java index a1aa211814520..2662ddf7e1440 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java @@ -9,7 +9,6 @@ package org.elasticsearch.http.netty4; -import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.handler.codec.http.DefaultFullHttpRequest; import io.netty.handler.codec.http.EmptyHttpHeaders; @@ -128,39 +127,6 @@ public void release() { } } - @Override - public HttpRequest releaseAndCopy() { - assert released.get() == false; - if (pooled == false) { - return this; - } - try { - final ByteBuf copiedContent = Unpooled.copiedBuffer(request.content()); - HttpBody newContent; - if (content.isStream()) { - newContent = content; - } else { - newContent = Netty4Utils.fullHttpBodyFrom(copiedContent); - } - return new Netty4HttpRequest( - sequence, - new DefaultFullHttpRequest( - request.protocolVersion(), - request.method(), - request.uri(), - copiedContent, - request.headers(), - request.trailingHeaders() - ), - new AtomicBoolean(false), - false, - newContent - ); - } finally { - release(); - } - } - @Override public final Map> getHeaders() { return headers; diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java index 459b6c77be8c3..81b4fd3fbb9ee 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java @@ -179,7 +179,7 @@ public boolean hasReferences() { } public static HttpBody.Full fullHttpBodyFrom(final ByteBuf buf) { - return new HttpBody.ByteRefHttpBody(toBytesReference(buf)); + return new HttpBody.ByteRefHttpBody(toReleasableBytesReference(buf)); } public static Recycler createRecycler(Settings settings) { diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java index ab38b5f0c4c8c..1eb7e13889338 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java @@ -24,9 +24,11 @@ import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.core.Assertions; import org.elasticsearch.core.Booleans; import org.elasticsearch.monitor.jvm.JvmInfo; +import java.util.Arrays; import java.util.concurrent.atomic.AtomicBoolean; public class NettyAllocator { @@ -44,8 +46,9 @@ public class NettyAllocator { private static final String USE_NETTY_DEFAULT_CHUNK = "es.unsafe.use_netty_default_chunk_and_page_size"; static { + ByteBufAllocator allocator; if (Booleans.parseBoolean(System.getProperty(USE_NETTY_DEFAULT), false)) { - ALLOCATOR = ByteBufAllocator.DEFAULT; + allocator = ByteBufAllocator.DEFAULT; SUGGESTED_MAX_ALLOCATION_SIZE = 1024 * 1024; DESCRIPTION = "[name=netty_default, suggested_max_allocation_size=" + ByteSizeValue.ofBytes(SUGGESTED_MAX_ALLOCATION_SIZE) @@ -127,7 +130,12 @@ public class NettyAllocator { + g1gcRegionSize + "}]"; } - ALLOCATOR = new NoDirectBuffers(delegate); + allocator = new NoDirectBuffers(delegate); + } + if (Assertions.ENABLED) { + ALLOCATOR = new TrashingByteBufAllocator(allocator); + } else { + ALLOCATOR = allocator; } RECYCLER = new Recycler<>() { @@ -353,4 +361,105 @@ public ByteBufAllocator getDelegate() { return delegate; } } + + static class TrashingByteBuf extends WrappedByteBuf { + + private boolean trashed = false; + + protected TrashingByteBuf(ByteBuf buf) { + super(buf); + } + + @Override + public boolean release() { + if (refCnt() == 1) { + // see [NOTE on racy trashContent() calls] + trashContent(); + } + return super.release(); + } + + @Override + public boolean release(int decrement) { + if (refCnt() == decrement && refCnt() > 0) { + // see [NOTE on racy trashContent() calls] + trashContent(); + } + return super.release(decrement); + } + + // [NOTE on racy trashContent() calls]: We trash the buffer content _before_ reducing the ref + // count to zero, which looks racy because in principle a concurrent caller could come along + // and successfully retain() this buffer to keep it alive after it's been trashed. Such a + // caller would sometimes get an IllegalReferenceCountException ofc but that's something it + // could handle - see for instance org.elasticsearch.transport.netty4.Netty4Utils.ByteBufRefCounted.tryIncRef. + // Yet in practice this should never happen, we only ever retain() these buffers while we + // know them to be alive (i.e. via RefCounted#mustIncRef or its moral equivalents) so it'd + // be a bug for a caller to retain() a buffer whose ref count is heading to zero and whose + // contents we've already decided to trash. + private void trashContent() { + if (trashed == false) { + trashed = true; + TrashingByteBufAllocator.trashBuffer(buf); + } + } + } + + static class TrashingCompositeByteBuf extends CompositeByteBuf { + + TrashingCompositeByteBuf(ByteBufAllocator alloc, boolean direct, int maxNumComponents) { + super(alloc, direct, maxNumComponents); + } + + @Override + protected void deallocate() { + TrashingByteBufAllocator.trashBuffer(this); + super.deallocate(); + } + } + + static class TrashingByteBufAllocator extends NoDirectBuffers { + + static int DEFAULT_MAX_COMPONENTS = 16; + + static void trashBuffer(ByteBuf buf) { + for (var nioBuf : buf.nioBuffers()) { + if (nioBuf.hasArray()) { + var from = nioBuf.arrayOffset() + nioBuf.position(); + var to = from + nioBuf.remaining(); + Arrays.fill(nioBuf.array(), from, to, (byte) 0); + } + } + } + + TrashingByteBufAllocator(ByteBufAllocator delegate) { + super(delegate); + } + + @Override + public ByteBuf heapBuffer() { + return new TrashingByteBuf(super.heapBuffer()); + } + + @Override + public ByteBuf heapBuffer(int initialCapacity) { + return new TrashingByteBuf(super.heapBuffer(initialCapacity)); + } + + @Override + public ByteBuf heapBuffer(int initialCapacity, int maxCapacity) { + return new TrashingByteBuf(super.heapBuffer(initialCapacity, maxCapacity)); + } + + @Override + public CompositeByteBuf compositeHeapBuffer() { + return new TrashingCompositeByteBuf(this, false, DEFAULT_MAX_COMPONENTS); + } + + @Override + public CompositeByteBuf compositeHeapBuffer(int maxNumComponents) { + return new TrashingCompositeByteBuf(this, false, maxNumComponents); + } + + } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/WrappedByteBuf.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/WrappedByteBuf.java new file mode 100644 index 0000000000000..50841cec000f1 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/WrappedByteBuf.java @@ -0,0 +1,1036 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.transport.netty4; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufAllocator; +import io.netty.util.ByteProcessor; +import io.netty.util.internal.ObjectUtil; +import io.netty.util.internal.StringUtil; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.channels.FileChannel; +import java.nio.channels.GatheringByteChannel; +import java.nio.channels.ScatteringByteChannel; +import java.nio.charset.Charset; + +/** + * A copy of Netty's WrappedByteBuf. + */ +class WrappedByteBuf extends ByteBuf { + + protected final ByteBuf buf; + + protected WrappedByteBuf(ByteBuf buf) { + this.buf = ObjectUtil.checkNotNull(buf, "buf"); + } + + @Override + public final boolean hasMemoryAddress() { + return buf.hasMemoryAddress(); + } + + @Override + public boolean isContiguous() { + return buf.isContiguous(); + } + + @Override + public final long memoryAddress() { + return buf.memoryAddress(); + } + + @Override + public final int capacity() { + return buf.capacity(); + } + + @Override + public ByteBuf capacity(int newCapacity) { + buf.capacity(newCapacity); + return this; + } + + @Override + public final int maxCapacity() { + return buf.maxCapacity(); + } + + @Override + public final ByteBufAllocator alloc() { + return buf.alloc(); + } + + @Override + public final ByteOrder order() { + return buf.order(); + } + + @Override + public ByteBuf order(ByteOrder endianness) { + return buf.order(endianness); + } + + @Override + public final ByteBuf unwrap() { + return buf; + } + + @Override + public ByteBuf asReadOnly() { + return buf.asReadOnly(); + } + + @Override + public boolean isReadOnly() { + return buf.isReadOnly(); + } + + @Override + public final boolean isDirect() { + return buf.isDirect(); + } + + @Override + public final int readerIndex() { + return buf.readerIndex(); + } + + @Override + public final ByteBuf readerIndex(int readerIndex) { + buf.readerIndex(readerIndex); + return this; + } + + @Override + public final int writerIndex() { + return buf.writerIndex(); + } + + @Override + public final ByteBuf writerIndex(int writerIndex) { + buf.writerIndex(writerIndex); + return this; + } + + @Override + public ByteBuf setIndex(int readerIndex, int writerIndex) { + buf.setIndex(readerIndex, writerIndex); + return this; + } + + @Override + public final int readableBytes() { + return buf.readableBytes(); + } + + @Override + public final int writableBytes() { + return buf.writableBytes(); + } + + @Override + public final int maxWritableBytes() { + return buf.maxWritableBytes(); + } + + @Override + public int maxFastWritableBytes() { + return buf.maxFastWritableBytes(); + } + + @Override + public final boolean isReadable() { + return buf.isReadable(); + } + + @Override + public final boolean isWritable() { + return buf.isWritable(); + } + + @Override + public final ByteBuf clear() { + buf.clear(); + return this; + } + + @Override + public final ByteBuf markReaderIndex() { + buf.markReaderIndex(); + return this; + } + + @Override + public final ByteBuf resetReaderIndex() { + buf.resetReaderIndex(); + return this; + } + + @Override + public final ByteBuf markWriterIndex() { + buf.markWriterIndex(); + return this; + } + + @Override + public final ByteBuf resetWriterIndex() { + buf.resetWriterIndex(); + return this; + } + + @Override + public ByteBuf discardReadBytes() { + buf.discardReadBytes(); + return this; + } + + @Override + public ByteBuf discardSomeReadBytes() { + buf.discardSomeReadBytes(); + return this; + } + + @Override + public ByteBuf ensureWritable(int minWritableBytes) { + buf.ensureWritable(minWritableBytes); + return this; + } + + @Override + public int ensureWritable(int minWritableBytes, boolean force) { + return buf.ensureWritable(minWritableBytes, force); + } + + @Override + public boolean getBoolean(int index) { + return buf.getBoolean(index); + } + + @Override + public byte getByte(int index) { + return buf.getByte(index); + } + + @Override + public short getUnsignedByte(int index) { + return buf.getUnsignedByte(index); + } + + @Override + public short getShort(int index) { + return buf.getShort(index); + } + + @Override + public short getShortLE(int index) { + return buf.getShortLE(index); + } + + @Override + public int getUnsignedShort(int index) { + return buf.getUnsignedShort(index); + } + + @Override + public int getUnsignedShortLE(int index) { + return buf.getUnsignedShortLE(index); + } + + @Override + public int getMedium(int index) { + return buf.getMedium(index); + } + + @Override + public int getMediumLE(int index) { + return buf.getMediumLE(index); + } + + @Override + public int getUnsignedMedium(int index) { + return buf.getUnsignedMedium(index); + } + + @Override + public int getUnsignedMediumLE(int index) { + return buf.getUnsignedMediumLE(index); + } + + @Override + public int getInt(int index) { + return buf.getInt(index); + } + + @Override + public int getIntLE(int index) { + return buf.getIntLE(index); + } + + @Override + public long getUnsignedInt(int index) { + return buf.getUnsignedInt(index); + } + + @Override + public long getUnsignedIntLE(int index) { + return buf.getUnsignedIntLE(index); + } + + @Override + public long getLong(int index) { + return buf.getLong(index); + } + + @Override + public long getLongLE(int index) { + return buf.getLongLE(index); + } + + @Override + public char getChar(int index) { + return buf.getChar(index); + } + + @Override + public float getFloat(int index) { + return buf.getFloat(index); + } + + @Override + public double getDouble(int index) { + return buf.getDouble(index); + } + + @Override + public ByteBuf getBytes(int index, ByteBuf dst) { + buf.getBytes(index, dst); + return this; + } + + @Override + public ByteBuf getBytes(int index, ByteBuf dst, int length) { + buf.getBytes(index, dst, length); + return this; + } + + @Override + public ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) { + buf.getBytes(index, dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf getBytes(int index, byte[] dst) { + buf.getBytes(index, dst); + return this; + } + + @Override + public ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) { + buf.getBytes(index, dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf getBytes(int index, ByteBuffer dst) { + buf.getBytes(index, dst); + return this; + } + + @Override + public ByteBuf getBytes(int index, OutputStream out, int length) throws IOException { + buf.getBytes(index, out, length); + return this; + } + + @Override + public int getBytes(int index, GatheringByteChannel out, int length) throws IOException { + return buf.getBytes(index, out, length); + } + + @Override + public int getBytes(int index, FileChannel out, long position, int length) throws IOException { + return buf.getBytes(index, out, position, length); + } + + @Override + public CharSequence getCharSequence(int index, int length, Charset charset) { + return buf.getCharSequence(index, length, charset); + } + + @Override + public ByteBuf setBoolean(int index, boolean value) { + buf.setBoolean(index, value); + return this; + } + + @Override + public ByteBuf setByte(int index, int value) { + buf.setByte(index, value); + return this; + } + + @Override + public ByteBuf setShort(int index, int value) { + buf.setShort(index, value); + return this; + } + + @Override + public ByteBuf setShortLE(int index, int value) { + buf.setShortLE(index, value); + return this; + } + + @Override + public ByteBuf setMedium(int index, int value) { + buf.setMedium(index, value); + return this; + } + + @Override + public ByteBuf setMediumLE(int index, int value) { + buf.setMediumLE(index, value); + return this; + } + + @Override + public ByteBuf setInt(int index, int value) { + buf.setInt(index, value); + return this; + } + + @Override + public ByteBuf setIntLE(int index, int value) { + buf.setIntLE(index, value); + return this; + } + + @Override + public ByteBuf setLong(int index, long value) { + buf.setLong(index, value); + return this; + } + + @Override + public ByteBuf setLongLE(int index, long value) { + buf.setLongLE(index, value); + return this; + } + + @Override + public ByteBuf setChar(int index, int value) { + buf.setChar(index, value); + return this; + } + + @Override + public ByteBuf setFloat(int index, float value) { + buf.setFloat(index, value); + return this; + } + + @Override + public ByteBuf setDouble(int index, double value) { + buf.setDouble(index, value); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuf src) { + buf.setBytes(index, src); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuf src, int length) { + buf.setBytes(index, src, length); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) { + buf.setBytes(index, src, srcIndex, length); + return this; + } + + @Override + public ByteBuf setBytes(int index, byte[] src) { + buf.setBytes(index, src); + return this; + } + + @Override + public ByteBuf setBytes(int index, byte[] src, int srcIndex, int length) { + buf.setBytes(index, src, srcIndex, length); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuffer src) { + buf.setBytes(index, src); + return this; + } + + @Override + public int setBytes(int index, InputStream in, int length) throws IOException { + return buf.setBytes(index, in, length); + } + + @Override + public int setBytes(int index, ScatteringByteChannel in, int length) throws IOException { + return buf.setBytes(index, in, length); + } + + @Override + public int setBytes(int index, FileChannel in, long position, int length) throws IOException { + return buf.setBytes(index, in, position, length); + } + + @Override + public ByteBuf setZero(int index, int length) { + buf.setZero(index, length); + return this; + } + + @Override + public int setCharSequence(int index, CharSequence sequence, Charset charset) { + return buf.setCharSequence(index, sequence, charset); + } + + @Override + public boolean readBoolean() { + return buf.readBoolean(); + } + + @Override + public byte readByte() { + return buf.readByte(); + } + + @Override + public short readUnsignedByte() { + return buf.readUnsignedByte(); + } + + @Override + public short readShort() { + return buf.readShort(); + } + + @Override + public short readShortLE() { + return buf.readShortLE(); + } + + @Override + public int readUnsignedShort() { + return buf.readUnsignedShort(); + } + + @Override + public int readUnsignedShortLE() { + return buf.readUnsignedShortLE(); + } + + @Override + public int readMedium() { + return buf.readMedium(); + } + + @Override + public int readMediumLE() { + return buf.readMediumLE(); + } + + @Override + public int readUnsignedMedium() { + return buf.readUnsignedMedium(); + } + + @Override + public int readUnsignedMediumLE() { + return buf.readUnsignedMediumLE(); + } + + @Override + public int readInt() { + return buf.readInt(); + } + + @Override + public int readIntLE() { + return buf.readIntLE(); + } + + @Override + public long readUnsignedInt() { + return buf.readUnsignedInt(); + } + + @Override + public long readUnsignedIntLE() { + return buf.readUnsignedIntLE(); + } + + @Override + public long readLong() { + return buf.readLong(); + } + + @Override + public long readLongLE() { + return buf.readLongLE(); + } + + @Override + public char readChar() { + return buf.readChar(); + } + + @Override + public float readFloat() { + return buf.readFloat(); + } + + @Override + public double readDouble() { + return buf.readDouble(); + } + + @Override + public ByteBuf readBytes(int length) { + return buf.readBytes(length); + } + + @Override + public ByteBuf readSlice(int length) { + return buf.readSlice(length); + } + + @Override + public ByteBuf readRetainedSlice(int length) { + return buf.readRetainedSlice(length); + } + + @Override + public ByteBuf readBytes(ByteBuf dst) { + buf.readBytes(dst); + return this; + } + + @Override + public ByteBuf readBytes(ByteBuf dst, int length) { + buf.readBytes(dst, length); + return this; + } + + @Override + public ByteBuf readBytes(ByteBuf dst, int dstIndex, int length) { + buf.readBytes(dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf readBytes(byte[] dst) { + buf.readBytes(dst); + return this; + } + + @Override + public ByteBuf readBytes(byte[] dst, int dstIndex, int length) { + buf.readBytes(dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf readBytes(ByteBuffer dst) { + buf.readBytes(dst); + return this; + } + + @Override + public ByteBuf readBytes(OutputStream out, int length) throws IOException { + buf.readBytes(out, length); + return this; + } + + @Override + public int readBytes(GatheringByteChannel out, int length) throws IOException { + return buf.readBytes(out, length); + } + + @Override + public int readBytes(FileChannel out, long position, int length) throws IOException { + return buf.readBytes(out, position, length); + } + + @Override + public CharSequence readCharSequence(int length, Charset charset) { + return buf.readCharSequence(length, charset); + } + + @Override + public ByteBuf skipBytes(int length) { + buf.skipBytes(length); + return this; + } + + @Override + public ByteBuf writeBoolean(boolean value) { + buf.writeBoolean(value); + return this; + } + + @Override + public ByteBuf writeByte(int value) { + buf.writeByte(value); + return this; + } + + @Override + public ByteBuf writeShort(int value) { + buf.writeShort(value); + return this; + } + + @Override + public ByteBuf writeShortLE(int value) { + buf.writeShortLE(value); + return this; + } + + @Override + public ByteBuf writeMedium(int value) { + buf.writeMedium(value); + return this; + } + + @Override + public ByteBuf writeMediumLE(int value) { + buf.writeMediumLE(value); + return this; + } + + @Override + public ByteBuf writeInt(int value) { + buf.writeInt(value); + return this; + } + + @Override + public ByteBuf writeIntLE(int value) { + buf.writeIntLE(value); + return this; + } + + @Override + public ByteBuf writeLong(long value) { + buf.writeLong(value); + return this; + } + + @Override + public ByteBuf writeLongLE(long value) { + buf.writeLongLE(value); + return this; + } + + @Override + public ByteBuf writeChar(int value) { + buf.writeChar(value); + return this; + } + + @Override + public ByteBuf writeFloat(float value) { + buf.writeFloat(value); + return this; + } + + @Override + public ByteBuf writeDouble(double value) { + buf.writeDouble(value); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuf src) { + buf.writeBytes(src); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuf src, int length) { + buf.writeBytes(src, length); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuf src, int srcIndex, int length) { + buf.writeBytes(src, srcIndex, length); + return this; + } + + @Override + public ByteBuf writeBytes(byte[] src) { + buf.writeBytes(src); + return this; + } + + @Override + public ByteBuf writeBytes(byte[] src, int srcIndex, int length) { + buf.writeBytes(src, srcIndex, length); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuffer src) { + buf.writeBytes(src); + return this; + } + + @Override + public int writeBytes(InputStream in, int length) throws IOException { + return buf.writeBytes(in, length); + } + + @Override + public int writeBytes(ScatteringByteChannel in, int length) throws IOException { + return buf.writeBytes(in, length); + } + + @Override + public int writeBytes(FileChannel in, long position, int length) throws IOException { + return buf.writeBytes(in, position, length); + } + + @Override + public ByteBuf writeZero(int length) { + buf.writeZero(length); + return this; + } + + @Override + public int writeCharSequence(CharSequence sequence, Charset charset) { + return buf.writeCharSequence(sequence, charset); + } + + @Override + public int indexOf(int fromIndex, int toIndex, byte value) { + return buf.indexOf(fromIndex, toIndex, value); + } + + @Override + public int bytesBefore(byte value) { + return buf.bytesBefore(value); + } + + @Override + public int bytesBefore(int length, byte value) { + return buf.bytesBefore(length, value); + } + + @Override + public int bytesBefore(int index, int length, byte value) { + return buf.bytesBefore(index, length, value); + } + + @Override + public int forEachByte(ByteProcessor processor) { + return buf.forEachByte(processor); + } + + @Override + public int forEachByte(int index, int length, ByteProcessor processor) { + return buf.forEachByte(index, length, processor); + } + + @Override + public int forEachByteDesc(ByteProcessor processor) { + return buf.forEachByteDesc(processor); + } + + @Override + public int forEachByteDesc(int index, int length, ByteProcessor processor) { + return buf.forEachByteDesc(index, length, processor); + } + + @Override + public ByteBuf copy() { + return buf.copy(); + } + + @Override + public ByteBuf copy(int index, int length) { + return buf.copy(index, length); + } + + @Override + public ByteBuf slice() { + return buf.slice(); + } + + @Override + public ByteBuf retainedSlice() { + return buf.retainedSlice(); + } + + @Override + public ByteBuf slice(int index, int length) { + return buf.slice(index, length); + } + + @Override + public ByteBuf retainedSlice(int index, int length) { + return buf.retainedSlice(index, length); + } + + @Override + public ByteBuf duplicate() { + return buf.duplicate(); + } + + @Override + public ByteBuf retainedDuplicate() { + return buf.retainedDuplicate(); + } + + @Override + public int nioBufferCount() { + return buf.nioBufferCount(); + } + + @Override + public ByteBuffer nioBuffer() { + return buf.nioBuffer(); + } + + @Override + public ByteBuffer nioBuffer(int index, int length) { + return buf.nioBuffer(index, length); + } + + @Override + public ByteBuffer[] nioBuffers() { + return buf.nioBuffers(); + } + + @Override + public ByteBuffer[] nioBuffers(int index, int length) { + return buf.nioBuffers(index, length); + } + + @Override + public ByteBuffer internalNioBuffer(int index, int length) { + return buf.internalNioBuffer(index, length); + } + + @Override + public boolean hasArray() { + return buf.hasArray(); + } + + @Override + public byte[] array() { + return buf.array(); + } + + @Override + public int arrayOffset() { + return buf.arrayOffset(); + } + + @Override + public String toString(Charset charset) { + return buf.toString(charset); + } + + @Override + public String toString(int index, int length, Charset charset) { + return buf.toString(index, length, charset); + } + + @Override + public int hashCode() { + return buf.hashCode(); + } + + @Override + @SuppressWarnings("EqualsWhichDoesntCheckParameterClass") + public boolean equals(Object obj) { + return buf.equals(obj); + } + + @Override + public int compareTo(ByteBuf buffer) { + return buf.compareTo(buffer); + } + + @Override + public String toString() { + return StringUtil.simpleClassName(this) + '(' + buf.toString() + ')'; + } + + @Override + public ByteBuf retain(int increment) { + buf.retain(increment); + return this; + } + + @Override + public ByteBuf retain() { + buf.retain(); + return this; + } + + @Override + public ByteBuf touch() { + buf.touch(); + return this; + } + + @Override + public ByteBuf touch(Object hint) { + buf.touch(hint); + return this; + } + + @Override + public final boolean isReadable(int size) { + return buf.isReadable(size); + } + + @Override + public final boolean isWritable(int size) { + return buf.isWritable(size); + } + + @Override + public final int refCnt() { + return buf.refCnt(); + } + + @Override + public boolean release() { + return buf.release(); + } + + @Override + public boolean release(int decrement) { + return buf.release(decrement); + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyAllocatorTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyAllocatorTests.java new file mode 100644 index 0000000000000..a76eb9fa4875b --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyAllocatorTests.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.transport.netty4; + +import io.netty.buffer.ByteBufAllocator; +import io.netty.buffer.Unpooled; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.List; + +import static org.elasticsearch.transport.netty4.NettyAllocator.TrashingByteBuf; +import static org.elasticsearch.transport.netty4.NettyAllocator.TrashingByteBufAllocator; + +public class NettyAllocatorTests extends ESTestCase { + + static void assertBufferTrashed(BytesReference bytesRef) throws IOException { + var iter = bytesRef.iterator(); + BytesRef br; + while ((br = iter.next()) != null) { + for (var i = br.offset; i < br.offset + br.length; i++) { + assertEquals("off=" + br.offset + " len=" + br.length + " i=" + i, 0, br.bytes[i]); + } + } + } + + public void testTrashArrayByteBuf() { + var arr = randomByteArrayOfLength(between(1024, 2048)); + var buf = Unpooled.wrappedBuffer(arr); + var tBuf = new TrashingByteBuf(buf); + tBuf.release(); + var emptyArr = new byte[arr.length]; + assertArrayEquals(emptyArr, arr); + } + + public void testNioBufsTrashingByteBuf() { + var arrCnt = between(1, 16); + var byteArrs = new byte[arrCnt][]; + var byteBufs = new ByteBuffer[arrCnt]; + for (var i = 0; i < arrCnt; i++) { + byteArrs[i] = randomByteArrayOfLength(between(1024, 2048)); + byteBufs[i] = ByteBuffer.wrap(byteArrs[i]); + } + var buf = Unpooled.wrappedBuffer(byteBufs); + var tBuf = new TrashingByteBuf(buf); + tBuf.release(); + for (int i = 0; i < arrCnt; i++) { + for (int j = 0; j < byteArrs[i].length; j++) { + assertEquals(0, byteArrs[i][j]); + } + } + } + + public void testNioBufOffsetTrashingByteBuf() { + var arr = randomByteArrayOfLength(1024); + var off = 1; + var len = arr.length - 2; + arr[0] = 1; + arr[arr.length - 1] = 1; + var buf = Unpooled.wrappedBuffer(arr, off, len); + var tBuf = new TrashingByteBuf(buf); + tBuf.release(); + assertEquals(1, arr[0]); + assertEquals(1, arr[arr.length - 1]); + for (int i = 1; i < arr.length - 1; i++) { + assertEquals("at index " + i, 0, arr[i]); + } + } + + public void testTrashingByteBufAllocator() throws IOException { + var alloc = new TrashingByteBufAllocator(ByteBufAllocator.DEFAULT); + var size = between(1024 * 1024, 10 * 1024 * 1024); + + // use 3 different heap allocation methods + for (var buf : List.of(alloc.heapBuffer(), alloc.heapBuffer(1024), alloc.heapBuffer(1024, size))) { + buf.writeBytes(randomByteArrayOfLength(size)); + var bytesRef = Netty4Utils.toBytesReference(buf); + buf.release(); + assertBufferTrashed(bytesRef); + } + } + + public void testTrashingCompositeByteBuf() throws IOException { + var alloc = new TrashingByteBufAllocator(ByteBufAllocator.DEFAULT); + var compBuf = alloc.compositeHeapBuffer(); + for (var i = 0; i < between(1, 10); i++) { + var buf = alloc.heapBuffer().writeBytes(randomByteArrayOfLength(between(1024, 8192))); + compBuf.addComponent(true, buf); + } + var bytesRef = Netty4Utils.toBytesReference(compBuf); + compBuf.release(); + assertBufferTrashed(bytesRef); + } + +} diff --git a/muted-tests.yml b/muted-tests.yml index 9c56d9c87e3ca..f33ca972b7d36 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -1,7 +1,4 @@ tests: -- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT - method: test {yaml=reference/esql/esql-async-query-api/line_17} - issue: https://github.com/elastic/elasticsearch/issues/109260 - class: "org.elasticsearch.client.RestClientSingleHostIntegTests" issue: "https://github.com/elastic/elasticsearch/issues/102717" method: "testRequestResetAndAbort" @@ -14,12 +11,6 @@ tests: - class: org.elasticsearch.smoketest.WatcherYamlRestIT method: test {p0=watcher/usage/10_basic/Test watcher usage stats output} issue: https://github.com/elastic/elasticsearch/issues/112189 -- class: org.elasticsearch.xpack.esql.action.ManyShardsIT - method: testRejection - issue: https://github.com/elastic/elasticsearch/issues/112406 -- class: org.elasticsearch.xpack.esql.action.ManyShardsIT - method: testConcurrentQueries - issue: https://github.com/elastic/elasticsearch/issues/112424 - class: org.elasticsearch.ingest.geoip.IngestGeoIpClientYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/111497 - class: org.elasticsearch.packaging.test.PackagesSecurityAutoConfigurationTests @@ -67,9 +58,6 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=mtermvectors/10_basic/Tests catching other exceptions per item} issue: https://github.com/elastic/elasticsearch/issues/113325 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - method: test {p0=search/500_date_range/from, to, include_lower, include_upper deprecated} - issue: https://github.com/elastic/elasticsearch/pull/113286 - class: org.elasticsearch.integration.KibanaUserRoleIntegTests method: testFieldMappings issue: https://github.com/elastic/elasticsearch/issues/113592 @@ -115,9 +103,6 @@ tests: - class: org.elasticsearch.xpack.shutdown.NodeShutdownIT method: testStalledShardMigrationProperlyDetected issue: https://github.com/elastic/elasticsearch/issues/115697 -- class: org.elasticsearch.xpack.inference.InferenceCrudIT - method: testSupportedStream - issue: https://github.com/elastic/elasticsearch/issues/113430 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_start_stop/Verify start transform reuses destination index} issue: https://github.com/elastic/elasticsearch/issues/115808 @@ -133,12 +118,6 @@ tests: - class: org.elasticsearch.search.SearchServiceTests method: testParseSourceValidation issue: https://github.com/elastic/elasticsearch/issues/115936 -- class: org.elasticsearch.index.reindex.ReindexNodeShutdownIT - method: testReindexWithShutdown - issue: https://github.com/elastic/elasticsearch/issues/115996 -- class: org.elasticsearch.search.query.SearchQueryIT - method: testAllDocsQueryString - issue: https://github.com/elastic/elasticsearch/issues/115728 - class: org.elasticsearch.xpack.application.connector.ConnectorIndexServiceTests issue: https://github.com/elastic/elasticsearch/issues/116087 - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT @@ -168,9 +147,6 @@ tests: - class: org.elasticsearch.xpack.deprecation.DeprecationHttpIT method: testDeprecatedSettingsReturnWarnings issue: https://github.com/elastic/elasticsearch/issues/108628 -- class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT - method: testEveryActionIsEitherOperatorOnlyOrNonOperator - issue: https://github.com/elastic/elasticsearch/issues/102992 - class: org.elasticsearch.action.search.SearchQueryThenFetchAsyncActionTests method: testBottomFieldSort issue: https://github.com/elastic/elasticsearch/issues/116249 @@ -186,8 +162,6 @@ tests: - class: org.elasticsearch.xpack.downsample.ILMDownsampleDisruptionIT method: testILMDownsampleRollingRestart issue: https://github.com/elastic/elasticsearch/issues/114233 -- class: org.elasticsearch.xpack.kql.query.KqlQueryBuilderTests - issue: https://github.com/elastic/elasticsearch/issues/116487 - class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests method: testInvalidJSON issue: https://github.com/elastic/elasticsearch/issues/116521 @@ -209,9 +183,6 @@ tests: - class: org.elasticsearch.reservedstate.service.RepositoriesFileSettingsIT method: testSettingsApplied issue: https://github.com/elastic/elasticsearch/issues/116694 -- class: org.elasticsearch.snapshots.SnapshotShutdownIT - method: testRestartNodeDuringSnapshot - issue: https://github.com/elastic/elasticsearch/issues/116730 - class: org.elasticsearch.xpack.security.authc.ldap.ActiveDirectoryGroupsResolverTests issue: https://github.com/elastic/elasticsearch/issues/116182 - class: org.elasticsearch.xpack.test.rest.XPackRestIT @@ -220,37 +191,50 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=synonyms/90_synonyms_reloading_for_synset/Reload analyzers for specific synonym set} issue: https://github.com/elastic/elasticsearch/issues/116777 -- class: org.elasticsearch.repositories.s3.RepositoryS3RestIT - method: testReloadCredentialsFromKeystore - issue: https://github.com/elastic/elasticsearch/issues/116811 - class: org.elasticsearch.xpack.searchablesnapshots.hdfs.SecureHdfsSearchableSnapshotsIT issue: https://github.com/elastic/elasticsearch/issues/116851 -- class: org.elasticsearch.xpack.esql.analysis.VerifierTests - method: testCategorizeWithinAggregations - issue: https://github.com/elastic/elasticsearch/issues/116856 -- class: org.elasticsearch.xpack.esql.analysis.VerifierTests - method: testCategorizeSingleGrouping - issue: https://github.com/elastic/elasticsearch/issues/116857 -- class: org.elasticsearch.xpack.esql.analysis.VerifierTests - method: testCategorizeNestedGrouping - issue: https://github.com/elastic/elasticsearch/issues/116858 - class: org.elasticsearch.search.basic.SearchWithRandomIOExceptionsIT method: testRandomDirectoryIOExceptions issue: https://github.com/elastic/elasticsearch/issues/114824 -- class: org.elasticsearch.index.mapper.vectors.MultiDenseVectorScriptDocValuesTests - method: testFloatGetVectorValueAndGetMagnitude - issue: https://github.com/elastic/elasticsearch/issues/116863 -- class: org.elasticsearch.cluster.routing.allocation.allocator.DesiredBalanceReconcilerMetricsIT - method: testDesiredBalanceMetrics - issue: https://github.com/elastic/elasticsearch/issues/116870 -- class: org.elasticsearch.xpack.inference.InferenceRestIT - method: test {p0=inference/30_semantic_text_inference/Calculates embeddings using the default ELSER 2 endpoint} - issue: https://github.com/elastic/elasticsearch/issues/116542 -- class: org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluatorTests - method: testTermQuery - issue: https://github.com/elastic/elasticsearch/issues/116879 -- class: org.elasticsearch.xpack.inference.InferenceRestIT - issue: https://github.com/elastic/elasticsearch/issues/116899 +- class: org.elasticsearch.xpack.restart.QueryBuilderBWCIT + method: testQueryBuilderBWC {p0=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/116989 +- class: org.elasticsearch.upgrades.QueryBuilderBWCIT + method: testQueryBuilderBWC {cluster=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/116990 +- class: org.elasticsearch.xpack.apmdata.APMYamlTestSuiteIT + method: test {yaml=/10_apm/Test template reinstallation} + issue: https://github.com/elastic/elasticsearch/issues/116445 +- class: org.elasticsearch.xpack.inference.DefaultEndPointsIT + method: testMultipleInferencesTriggeringDownloadAndDeploy + issue: https://github.com/elastic/elasticsearch/issues/117208 +- class: org.elasticsearch.xpack.logsdb.qa.StandardVersusLogsStoredSourceChallengeRestIT + method: testEsqlSource + issue: https://github.com/elastic/elasticsearch/issues/117212 +- class: org.elasticsearch.ingest.geoip.EnterpriseGeoIpDownloaderIT + method: testEnterpriseDownloaderTask + issue: https://github.com/elastic/elasticsearch/issues/115163 +- class: org.elasticsearch.versioning.ConcurrentSeqNoVersioningIT + method: testSeqNoCASLinearizability + issue: https://github.com/elastic/elasticsearch/issues/117249 +- class: org.elasticsearch.discovery.ClusterDisruptionIT + method: testAckedIndexing + issue: https://github.com/elastic/elasticsearch/issues/117024 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=snapshot/10_basic/Create a source only snapshot and then restore it} + issue: https://github.com/elastic/elasticsearch/issues/117295 +- class: org.elasticsearch.xpack.searchablesnapshots.RetrySearchIntegTests + method: testRetryPointInTime + issue: https://github.com/elastic/elasticsearch/issues/117116 +- class: org.elasticsearch.xpack.inference.DefaultEndPointsIT + method: testInferDeploysDefaultElser + issue: https://github.com/elastic/elasticsearch/issues/114913 +- class: org.elasticsearch.xpack.esql.action.EsqlActionTaskIT + method: testCancelRequestWhenFailingFetchingPages + issue: https://github.com/elastic/elasticsearch/issues/117397 +- class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT + method: testEveryActionIsEitherOperatorOnlyOrNonOperator + issue: https://github.com/elastic/elasticsearch/issues/102992 # Examples: # diff --git a/plugins/analysis-icu/build.gradle b/plugins/analysis-icu/build.gradle index f9245ed32c325..05cd2cb44124c 100644 --- a/plugins/analysis-icu/build.gradle +++ b/plugins/analysis-icu/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/plugins/discovery-azure-classic/build.gradle b/plugins/discovery-azure-classic/build.gradle index 16786c6c31074..3ec2ec531ae92 100644 --- a/plugins/discovery-azure-classic/build.gradle +++ b/plugins/discovery-azure-classic/build.gradle @@ -1,6 +1,3 @@ -import org.elasticsearch.gradle.LoggedExec -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License @@ -9,6 +6,9 @@ import org.elasticsearch.gradle.internal.info.BuildParams * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ + +import org.elasticsearch.gradle.LoggedExec + apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index a166a89ad4026..980e2467206d7 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License @@ -77,6 +75,7 @@ tasks.register("writeTestJavaPolicy") { "permission java.security.SecurityPermission \"getProperty.jdk.tls.disabledAlgorithms\";", "permission java.security.SecurityPermission \"getProperty.jdk.certpath.disabledAlgorithms\";", "permission java.security.SecurityPermission \"getProperty.keystore.type.compat\";", + "permission java.security.SecurityPermission \"getProperty.org.bouncycastle.ec.max_f2m_field_size\";", "};" ].join("\n") ) diff --git a/plugins/discovery-ec2/qa/amazon-ec2/build.gradle b/plugins/discovery-ec2/qa/amazon-ec2/build.gradle index aad59be376262..5f0fee6636256 100644 --- a/plugins/discovery-ec2/qa/amazon-ec2/build.gradle +++ b/plugins/discovery-ec2/qa/amazon-ec2/build.gradle @@ -8,7 +8,6 @@ */ import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.AntFixture import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.internal.test.rest.LegacyYamlRestTestPlugin @@ -55,8 +54,9 @@ tasks.named("yamlRestTest").configure { enabled = false } ['KeyStore', 'EnvVariables', 'SystemProperties', 'ContainerCredentials', 'InstanceProfile'].forEach { action -> TaskProvider fixture = tasks.register("ec2Fixture${action}", AntFixture) { dependsOn project.sourceSets.yamlRestTest.runtimeClasspath - env 'CLASSPATH', "${-> project.sourceSets.yamlRestTest.runtimeClasspath.asPath}" - executable = "${buildParams.runtimeJavaHome.get()}/bin/java" + FileCollection cp = project.sourceSets.yamlRestTest.runtimeClasspath + env 'CLASSPATH', "${-> cp.asPath}" + executable = "${buildParams.runtimeJavaHome.get() }/bin/java" args 'org.elasticsearch.discovery.ec2.AmazonEC2Fixture', baseDir, "${buildDir}/testclusters/yamlRestTest${action}-1/config/unicast_hosts.txt" } @@ -68,9 +68,18 @@ tasks.named("yamlRestTest").configure { enabled = false } classpath = yamlRestTestSourceSet.getRuntimeClasspath() } + if(action == 'ContainerCredentials') { + def addressAndPortSource = fixture.get().addressAndPortSource + testClusters.matching { it.name == "yamlRestTestContainerCredentials" }.configureEach { + environment 'AWS_CONTAINER_CREDENTIALS_FULL_URI', + () -> addressAndPortSource.map{ addr -> "http://${addr}/ecs_credentials_endpoint" }.get(), IGNORE_VALUE + } + } + tasks.named("check").configure { dependsOn(yamlRestTestTask) } + def addressAndPortSource = fixture.get().addressAndPortSource testClusters.matching { it.name == yamlRestTestTask.name}.configureEach { numberOfNodes = ec2NumberOfNodes @@ -78,9 +87,9 @@ tasks.named("yamlRestTest").configure { enabled = false } setting 'discovery.seed_providers', 'ec2' setting 'network.host', '_ec2_' - setting 'discovery.ec2.endpoint', { "http://${-> fixture.get().addressAndPort}" }, IGNORE_VALUE + setting 'discovery.ec2.endpoint', { "http://${-> addressAndPortSource.get()}" }, IGNORE_VALUE - systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", { "http://${-> fixture.get().addressAndPort}" }, IGNORE_VALUE + systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", { "http://${-> addressAndPortSource.get()}" }, IGNORE_VALUE } } @@ -107,11 +116,6 @@ tasks.named("ec2FixtureContainerCredentials").configure { env 'ACTIVATE_CONTAINER_CREDENTIALS', true } -testClusters.matching { it.name == "yamlRestTestContainerCredentials" }.configureEach { - environment 'AWS_CONTAINER_CREDENTIALS_FULL_URI', - { "http://${-> tasks.findByName("ec2FixtureContainerCredentials").addressAndPort}/ecs_credentials_endpoint" }, IGNORE_VALUE -} - // Extra config for InstanceProfile tasks.named("ec2FixtureInstanceProfile").configure { env 'ACTIVATE_INSTANCE_PROFILE', true diff --git a/plugins/discovery-gce/qa/gce/build.gradle b/plugins/discovery-gce/qa/gce/build.gradle index a22678b9a67dc..72cb429b49072 100644 --- a/plugins/discovery-gce/qa/gce/build.gradle +++ b/plugins/discovery-gce/qa/gce/build.gradle @@ -7,9 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ - import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.AntFixture import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE diff --git a/plugins/examples/gradle/wrapper/gradle-wrapper.properties b/plugins/examples/gradle/wrapper/gradle-wrapper.properties index 6acc1431eaec1..22286c90de3d1 100644 --- a/plugins/examples/gradle/wrapper/gradle-wrapper.properties +++ b/plugins/examples/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=2ab88d6de2c23e6adae7363ae6e29cbdd2a709e992929b48b6530fd0c7133bd6 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-all.zip +distributionSha256Sum=89d4e70e4e84e2d2dfbb63e4daa53e21b25017cc70c37e4eea31ee51fb15098a +distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/plugins/mapper-annotated-text/build.gradle b/plugins/mapper-annotated-text/build.gradle index 545dfe49bfcf3..435ad83974efa 100644 --- a/plugins/mapper-annotated-text/build.gradle +++ b/plugins/mapper-annotated-text/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License @@ -8,20 +6,14 @@ import org.elasticsearch.gradle.internal.info.BuildParams * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -apply plugin: 'elasticsearch.legacy-yaml-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' +apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { description 'The Mapper Annotated_text plugin adds support for text fields with markup used to inject annotation tokens into the index.' classname 'org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextPlugin' } -if (buildParams.isSnapshotBuild() == false) { - tasks.named("test").configure { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } -} - restResources { restApi { include '_common', 'indices', 'index', 'search' diff --git a/plugins/mapper-annotated-text/src/yamlRestTest/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextClientYamlTestSuiteIT.java b/plugins/mapper-annotated-text/src/yamlRestTest/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextClientYamlTestSuiteIT.java index afb23106bc101..68d141b6df840 100644 --- a/plugins/mapper-annotated-text/src/yamlRestTest/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextClientYamlTestSuiteIT.java +++ b/plugins/mapper-annotated-text/src/yamlRestTest/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextClientYamlTestSuiteIT.java @@ -12,8 +12,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; public class AnnotatedTextClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -25,4 +27,12 @@ public AnnotatedTextClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate public static Iterable parameters() throws Exception { return createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().plugin("mapper-annotated-text").build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/plugins/mapper-murmur3/build.gradle b/plugins/mapper-murmur3/build.gradle index e5108814154a3..0271296df934d 100644 --- a/plugins/mapper-murmur3/build.gradle +++ b/plugins/mapper-murmur3/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License @@ -8,8 +6,8 @@ import org.elasticsearch.gradle.internal.info.BuildParams * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -apply plugin: 'elasticsearch.legacy-yaml-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' +apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { description 'The Mapper Murmur3 plugin allows to compute hashes of a field\'s values at index-time and to store them in the index.' @@ -20,12 +18,7 @@ esplugin { dependencies { compileOnly project(':modules:lang-painless:spi') testImplementation project(':modules:lang-painless') -} - -if (buildParams.isSnapshotBuild() == false) { - tasks.named("test").configure { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + clusterModules project(':modules:lang-painless') } restResources { @@ -33,8 +26,3 @@ restResources { include '_common', 'indices', 'index', 'search' } } - -testClusters.configureEach { - testDistribution = 'DEFAULT' - setting 'xpack.security.enabled', 'false' -} diff --git a/plugins/mapper-murmur3/src/yamlRestTest/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3ClientYamlTestSuiteIT.java b/plugins/mapper-murmur3/src/yamlRestTest/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3ClientYamlTestSuiteIT.java index d4b0f2e0dc6a7..399b488a5d2f7 100644 --- a/plugins/mapper-murmur3/src/yamlRestTest/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3ClientYamlTestSuiteIT.java +++ b/plugins/mapper-murmur3/src/yamlRestTest/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3ClientYamlTestSuiteIT.java @@ -12,8 +12,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; public class MapperMurmur3ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -25,4 +27,12 @@ public MapperMurmur3ClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate public static Iterable parameters() throws Exception { return createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("lang-painless").plugin("mapper-murmur3").build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index b7f7816a3a0e1..4da7c24de80f1 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -9,7 +9,6 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.OS -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.internal-yaml-rest-test' @@ -20,7 +19,7 @@ esplugin { } versions << [ - 'hadoop': '3.3.3' + 'hadoop': '3.4.1' ] configurations { @@ -42,9 +41,9 @@ dependencies { api "com.google.protobuf:protobuf-java:${versions.protobuf}" api "commons-logging:commons-logging:${versions.commonslogging}" api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" - api 'commons-cli:commons-cli:1.2' + api 'commons-cli:commons-cli:1.5.0' api "commons-codec:commons-codec:${versions.commonscodec}" - api 'commons-io:commons-io:2.8.0' + api 'commons-io:commons-io:2.16.1' api 'org.apache.commons:commons-lang3:3.11' api 'javax.servlet:javax.servlet-api:3.1.0' api "org.slf4j:slf4j-api:${versions.slf4j}" @@ -58,14 +57,14 @@ dependencies { javaRestTestImplementation project(':test:fixtures:krb5kdc-fixture') javaRestTestImplementation "org.slf4j:slf4j-api:${versions.slf4j}" javaRestTestRuntimeOnly "com.google.guava:guava:16.0.1" - javaRestTestRuntimeOnly "commons-cli:commons-cli:1.2" + javaRestTestRuntimeOnly "commons-cli:commons-cli:1.5.0" javaRestTestRuntimeOnly "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" yamlRestTestCompileOnly(project(':test:fixtures:hdfs-fixture')) yamlRestTestImplementation project(':test:fixtures:krb5kdc-fixture') yamlRestTestImplementation "org.slf4j:slf4j-api:${versions.slf4j}" yamlRestTestRuntimeOnly "com.google.guava:guava:16.0.1" - yamlRestTestRuntimeOnly "commons-cli:commons-cli:1.2" + yamlRestTestRuntimeOnly "commons-cli:commons-cli:1.5.0" yamlRestTestRuntimeOnly "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" hdfsFixture2 project(path: ':test:fixtures:hdfs-fixture', configuration: 'shadowedHdfs2') @@ -178,7 +177,6 @@ tasks.named("thirdPartyAudit").configure { 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', - 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64', 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64$1', 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64$Cell', @@ -189,6 +187,9 @@ tasks.named("thirdPartyAudit").configure { 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil', 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$1', 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$JvmMemoryAccessor', - 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$MemoryAccessor' + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$MemoryAccessor', + 'org.apache.hadoop.thirdparty.protobuf.MessageSchema', + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$Android32MemoryAccessor', + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$Android64MemoryAccessor' ) } diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java index 98aa9951172ba..ce6acd79a0bb9 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java @@ -47,7 +47,8 @@ class HdfsSecurityContext { // 2) allow hadoop to add credentials to our Subject new AuthPermission("modifyPrivateCredentials"), // 3) RPC Engine requires this for re-establishing pooled connections over the lifetime of the client - new PrivateCredentialPermission("org.apache.hadoop.security.Credentials * \"*\"", "read") }; + new PrivateCredentialPermission("org.apache.hadoop.security.Credentials * \"*\"", "read"), + new RuntimePermission("getClassLoader") }; // If Security is enabled, we need all the following elevated permissions: KERBEROS_AUTH_PERMISSIONS = new Permission[] { diff --git a/qa/ccs-rolling-upgrade-remote-cluster/build.gradle b/qa/ccs-rolling-upgrade-remote-cluster/build.gradle index ce5b840e6dc91..e63b1629db39c 100644 --- a/qa/ccs-rolling-upgrade-remote-cluster/build.gradle +++ b/qa/ccs-rolling-upgrade-remote-cluster/build.gradle @@ -8,7 +8,6 @@ */ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java index 3a983dbd058df..d98d53baf9015 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java @@ -18,7 +18,6 @@ import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; import org.elasticsearch.test.cluster.local.distribution.DistributionType; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.junit.Before; import org.junit.ClassRule; import org.junit.rules.RuleChain; @@ -269,10 +268,7 @@ private String getRollupIndexName() throws IOException { } public void testRollupIndex() throws Exception { - assumeTrue( - "Downsample got many stability improvements in 8.10.0", - oldClusterHasFeature(RestTestLegacyFeatures.TSDB_DOWNSAMPLING_STABLE) - ); + assumeTrue("Downsample got many stability improvements in 8.10.0", oldClusterHasFeature("gte_v8.10.0")); if (isRunningAgainstOldCluster()) { createIlmPolicy(); createIndex(); diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 26e4f3146da2f..0f41712abe927 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -41,7 +41,6 @@ import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; @@ -262,7 +261,7 @@ public void testNewReplicas() throws Exception { } public void testSearchTimeSeriesMode() throws Exception { - assumeTrue("indexing time series indices changed in 8.2.0", oldClusterHasFeature(RestTestLegacyFeatures.TSDB_NEW_INDEX_FORMAT)); + assumeTrue("indexing time series indices changed in 8.2.0", oldClusterHasFeature("gte_v8.2.0")); int numDocs; if (isRunningAgainstOldCluster()) { numDocs = createTimeSeriesModeIndex(1); @@ -300,7 +299,7 @@ public void testSearchTimeSeriesMode() throws Exception { } public void testNewReplicasTimeSeriesMode() throws Exception { - assumeTrue("indexing time series indices changed in 8.2.0", oldClusterHasFeature(RestTestLegacyFeatures.TSDB_NEW_INDEX_FORMAT)); + assumeTrue("indexing time series indices changed in 8.2.0", oldClusterHasFeature("gte_v8.2.0")); if (isRunningAgainstOldCluster()) { createTimeSeriesModeIndex(0); } else { diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java index f1f4fcf091e8f..9866d94dccc3c 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java @@ -17,10 +17,8 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; -import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.hamcrest.Matcher; import org.hamcrest.Matchers; import org.junit.ClassRule; @@ -31,9 +29,6 @@ import java.util.Map; import java.util.function.Supplier; -import static org.elasticsearch.test.MapMatcher.assertMap; -import static org.elasticsearch.test.MapMatcher.matchesMap; - public class LogsIndexModeFullClusterRestartIT extends ParameterizedFullClusterRestartTestCase { @ClassRule @@ -125,8 +120,6 @@ protected ElasticsearchCluster getUpgradeCluster() { }"""; public void testLogsIndexing() throws IOException { - assumeTrue("Test uses data streams", oldClusterHasFeature(RestTestLegacyFeatures.DATA_STREAMS_SUPPORTED)); - if (isRunningAgainstOldCluster()) { assertOK(client().performRequest(putTemplate(client(), "logs-template", STANDARD_TEMPLATE))); assertOK(client().performRequest(createDataStream("logs-apache-production"))); @@ -172,22 +165,16 @@ public void testLogsIndexing() throws IOException { assertOK(bulkIndexResponse); assertThat(entityAsMap(bulkIndexResponse).get("errors"), Matchers.is(false)); - assertIndexMappingsAndSettings(0, Matchers.nullValue(), matchesMap().extraOk()); - assertIndexMappingsAndSettings( - 1, - Matchers.equalTo("logsdb"), - matchesMap().extraOk().entry("_source", Map.of("mode", "synthetic")) - ); + assertIndexSettings(0, Matchers.nullValue()); + assertIndexSettings(1, Matchers.equalTo("logsdb")); } } - private void assertIndexMappingsAndSettings(int backingIndex, final Matcher indexModeMatcher, final MapMatcher mappingsMatcher) - throws IOException { + private void assertIndexSettings(int backingIndex, final Matcher indexModeMatcher) throws IOException { assertThat( getSettings(client(), getWriteBackingIndex(client(), "logs-apache-production", backingIndex)).get("index.mode"), indexModeMatcher ); - assertMap(getIndexMappingAsMap(getWriteBackingIndex(client(), "logs-apache-production", backingIndex)), mappingsMatcher); } private static Request createDataStream(final String dataStreamName) { diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java index 9ca420efe1156..aac2c661dea9f 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java @@ -12,8 +12,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.Strings; @@ -23,7 +21,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.ConstantScoreQueryBuilder; import org.elasticsearch.index.query.DisMaxQueryBuilder; @@ -43,7 +40,6 @@ import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; import org.elasticsearch.test.cluster.local.distribution.DistributionType; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.XContentBuilder; import org.junit.ClassRule; @@ -249,23 +245,10 @@ public void testQueryBuilderBWC() throws Exception { InputStream in = new ByteArrayInputStream(qbSource, 0, qbSource.length); StreamInput input = new NamedWriteableAwareStreamInput(new InputStreamStreamInput(in), registry) ) { - - @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_FOUNDATIONS) // condition will always be true - var originalClusterHasTransportVersion = oldClusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED); - final TransportVersion transportVersion; - if (originalClusterHasTransportVersion == false) { - transportVersion = TransportVersion.fromId( - parseLegacyVersion(getOldClusterVersion()).map(Version::id).orElse(TransportVersions.MINIMUM_COMPATIBLE.id()) - ); - } else { - transportVersion = TransportVersion.readVersion(input); - } - - input.setTransportVersion(transportVersion); + input.setTransportVersion(TransportVersion.readVersion(input)); QueryBuilder queryBuilder = input.readNamedWriteable(QueryBuilder.class); assert in.read() == -1; assertEquals(expectedQueryBuilder, queryBuilder); - } } } diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index f6549a2d83fe6..d8f906b23d523 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -11,6 +11,10 @@ import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils +import org.elasticsearch.gradle.testclusters.TestClustersPlugin apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -63,6 +67,8 @@ excludeList.add('indices.resolve_index/20_resolve_system_index/*') // Excluded because the error has changed excludeList.add('aggregations/percentiles_hdr_metric/Negative values test') +def clusterPath = getPath() + buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> if (bwcVersion != VersionProperties.getElasticsearchVersion()) { /* This project runs the core REST tests against a 4 node cluster where two of @@ -84,18 +90,42 @@ buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> tasks.register("${baseName}#mixedClusterTest", StandaloneRestIntegTestTask) { useCluster baseCluster mustRunAfter("precommit") + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + + def baseInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set(baseName) + it.parameters.service = serviceProvider + }.map { it.getAllHttpSocketURI() } + + def baseInfoAfterOneNodeUpdate = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set(baseName) + it.parameters.service = serviceProvider + }.map { it.getAllHttpSocketURI() } + + def baseInfoAfterTwoNodesUpdate = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set(baseName) + it.parameters.service = serviceProvider + }.map { it.getAllHttpSocketURI() } + def nonInputProps = nonInputProperties + def sharedRepoFolder = new File(buildDir, "cluster/shared/repo/${baseName}") doFirst { - delete("${buildDir}/cluster/shared/repo/${baseName}") + delete(sharedRepoFolder) // Getting the endpoints causes a wait for the cluster - println "Test cluster endpoints are: ${-> baseCluster.get().allHttpSocketURI.join(",")}" + println "Test cluster endpoints are: ${-> baseInfo.get().join(",")}" println "Upgrading one node to create a mixed cluster" baseCluster.get().nextNodeToNextVersion() // Getting the endpoints causes a wait for the cluster - println "Upgrade complete, endpoints are: ${-> baseCluster.get().allHttpSocketURI.join(",")}" + println "Upgrade complete, endpoints are: ${-> baseInfoAfterOneNodeUpdate.get().join(",")}" println "Upgrading another node to create a mixed cluster" baseCluster.get().nextNodeToNextVersion() - nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) - nonInputProperties.systemProperty('tests.clustername', baseName) + nonInputProps.systemProperty('tests.rest.cluster', baseInfoAfterTwoNodesUpdate.map(c -> c.join(","))) + nonInputProps.systemProperty('tests.clustername', baseName) if (excludeList.isEmpty() == false) { systemProperty 'tests.rest.blacklist', excludeList.join(',') } @@ -103,7 +133,7 @@ buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> systemProperty 'tests.path.repo', "${buildDir}/cluster/shared/repo/${baseName}" systemProperty 'tests.bwc_nodes_version', bwcVersion.toString().replace('-SNAPSHOT', '') systemProperty 'tests.new_nodes_version', project.version.toString().replace('-SNAPSHOT', '') - onlyIf("BWC tests disabled") { project.bwc_tests_enabled } +// onlyIf("BWC tests disabled") { project.bwc_tests_enabled } } tasks.register(bwcTaskName(bwcVersion)) { diff --git a/qa/multi-cluster-search/build.gradle b/qa/multi-cluster-search/build.gradle index 906a49134bb51..d46bf3f18f8cc 100644 --- a/qa/multi-cluster-search/build.gradle +++ b/qa/multi-cluster-search/build.gradle @@ -15,7 +15,6 @@ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java index a60e58c34918b..9f9aa78a4910b 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java @@ -12,18 +12,31 @@ import junit.framework.TestCase; import org.elasticsearch.packaging.util.Distribution; -import org.elasticsearch.packaging.util.FileUtils; +import org.elasticsearch.packaging.util.LintianResultParser; +import org.elasticsearch.packaging.util.LintianResultParser.Issue; +import org.elasticsearch.packaging.util.LintianResultParser.Result; import org.elasticsearch.packaging.util.Shell; import org.junit.BeforeClass; +import java.util.List; import java.util.Locale; import java.util.regex.Pattern; +import java.util.stream.Collectors; import static org.elasticsearch.packaging.util.FileUtils.getDistributionFile; import static org.junit.Assume.assumeTrue; public class DebMetadataTests extends PackagingTestCase { + private final LintianResultParser lintianParser = new LintianResultParser(); + private static final List IGNORED_TAGS = List.of( + // Override syntax changes between lintian versions in a non-backwards compatible way, so we have to tolerate these. + // Tag mismatched-override is a non-erasable tag which cannot be ignored with overrides, so we handle it here. + "mismatched-override", + // systemd-service-file-outside-lib has been incorrect and removed in the newer version on Lintian + "systemd-service-file-outside-lib" + ); + @BeforeClass public static void filterDistros() { assumeTrue("only deb", distribution.packaging == Distribution.Packaging.DEB); @@ -35,15 +48,26 @@ public void test05CheckLintian() { if (helpText.contains("--fail-on-warnings")) { extraArgs = "--fail-on-warnings"; } else if (helpText.contains("--fail-on error")) { - extraArgs = "--fail-on warning"; - // Recent lintian versions are picky about malformed or mismatched overrides. - // Unfortunately override syntax changes between lintian versions in a non-backwards compatible - // way, so we have to tolerate these (or maintain separate override files per lintian version). - if (helpText.contains("--suppress-tags")) { - extraArgs += " --suppress-tags malformed-override,mismatched-override"; + extraArgs = "--fail-on error,warning"; + } + Shell.Result result = sh.runIgnoreExitCode( + String.format(Locale.ROOT, "lintian %s %s", extraArgs, getDistributionFile(distribution())) + ); + Result lintianResult = lintianParser.parse(result.stdout()); + // Unfortunately Lintian overrides syntax changes between Lintian versions in a non-backwards compatible + // way, so we have to manage some exclusions outside the overrides file. + if (lintianResult.isSuccess() == false) { + List importantIssues = lintianResult.issues() + .stream() + .filter(issue -> IGNORED_TAGS.contains(issue.tag()) == false) + .toList(); + if (importantIssues.isEmpty() == false) { + fail( + "Issues for DEB package found by Lintian:\n" + + importantIssues.stream().map(Record::toString).collect(Collectors.joining("\n")) + ); } } - sh.run(String.format(Locale.ROOT, "lintian %s %s", extraArgs, FileUtils.getDistributionFile(distribution()))); } public void test06Dependencies() { diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java index 8cb8354eb5d71..3ad4c247a8b9b 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java @@ -96,11 +96,10 @@ /** * This class tests the Elasticsearch Docker images. We have several: *
    - *
  • The default image with a custom, small base image
  • - *
  • A UBI-based image
  • + *
  • The default image UBI-based image
  • *
  • Another UBI image for Iron Bank
  • *
  • A WOLFI-based image
  • - *
  • Images for Cloud
  • + *
  • Image for Cloud
  • *
*/ @ThreadLeakFilters(defaultFilters = true, filters = { HttpClientThreadsFilter.class }) @@ -383,15 +382,14 @@ public void test026InstallBundledRepositoryPluginsViaConfigFile() { public void test040JavaUsesTheOsProvidedKeystore() { final String path = sh.run("realpath jdk/lib/security/cacerts").stdout(); - if (distribution.packaging == Packaging.DOCKER_UBI || distribution.packaging == Packaging.DOCKER_IRON_BANK) { + if (distribution.packaging == Packaging.DOCKER || distribution.packaging == Packaging.DOCKER_IRON_BANK) { // In these images, the `cacerts` file ought to be a symlink here assertThat(path, equalTo("/etc/pki/ca-trust/extracted/java/cacerts")); } else if (distribution.packaging == Packaging.DOCKER_WOLFI || distribution.packaging == Packaging.DOCKER_CLOUD_ESS) { // In these images, the `cacerts` file ought to be a symlink here assertThat(path, equalTo("/etc/ssl/certs/java/cacerts")); } else { - // Whereas on other images, it's a real file so the real path is the same - assertThat(path, equalTo("/usr/share/elasticsearch/jdk/lib/security/cacerts")); + fail("Unknown distribution: " + distribution.packaging); } } @@ -1126,25 +1124,25 @@ public void test171AdditionalCliOptionsAreForwarded() throws Exception { } /** - * Check that the UBI images has the correct license information in the correct place. + * Check that the Docker images have the correct license information in the correct place. */ - public void test200UbiImagesHaveLicenseDirectory() { - assumeTrue(distribution.packaging == Packaging.DOCKER_UBI); + public void test200ImagesHaveLicenseDirectory() { + assumeTrue(distribution.packaging != Packaging.DOCKER_IRON_BANK); final String[] files = sh.run("find /licenses -type f").stdout().split("\n"); assertThat(files, arrayContaining("/licenses/LICENSE")); // UBI image doesn't contain `diff` - final String ubiLicense = sh.run("cat /licenses/LICENSE").stdout(); + final String imageLicense = sh.run("cat /licenses/LICENSE").stdout(); final String distroLicense = sh.run("cat /usr/share/elasticsearch/LICENSE.txt").stdout(); - assertThat(ubiLicense, equalTo(distroLicense)); + assertThat(imageLicense, equalTo(distroLicense)); } /** - * Check that the UBI image has the expected labels + * Check that the images has the expected labels */ - public void test210UbiLabels() throws Exception { - assumeTrue(distribution.packaging == Packaging.DOCKER_UBI); + public void test210Labels() throws Exception { + assumeTrue(distribution.packaging != Packaging.DOCKER_IRON_BANK); final Map labels = getImageLabels(distribution); diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java index 02e1ce35764cf..a47dd0e57642e 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java @@ -436,7 +436,7 @@ private void verifyKeystorePermissions() { switch (distribution.packaging) { case TAR, ZIP -> assertThat(keystore, file(File, ARCHIVE_OWNER, ARCHIVE_OWNER, p660)); case DEB, RPM -> assertThat(keystore, file(File, "root", "elasticsearch", p660)); - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> assertThat(keystore, DockerFileMatcher.file(p660)); + case DOCKER, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> assertThat(keystore, DockerFileMatcher.file(p660)); default -> throw new IllegalStateException("Unknown Elasticsearch packaging type."); } } diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java index b4a00ca56924a..a157cc84e624e 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java @@ -245,7 +245,7 @@ protected static void install() throws Exception { installation = Packages.installPackage(sh, distribution); Packages.verifyPackageInstallation(installation, distribution, sh); } - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> { + case DOCKER, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> { installation = Docker.runContainer(distribution); Docker.verifyContainerInstallation(installation); } @@ -333,7 +333,6 @@ public Shell.Result runElasticsearchStartCommand(String password, boolean daemon case RPM: return Packages.runElasticsearchStartCommand(sh); case DOCKER: - case DOCKER_UBI: case DOCKER_IRON_BANK: case DOCKER_CLOUD_ESS: case DOCKER_WOLFI: @@ -355,7 +354,6 @@ public void stopElasticsearch() throws Exception { Packages.stopElasticsearch(sh); break; case DOCKER: - case DOCKER_UBI: case DOCKER_IRON_BANK: case DOCKER_CLOUD_ESS: case DOCKER_WOLFI: @@ -371,7 +369,7 @@ public void awaitElasticsearchStartup(Shell.Result result) throws Exception { switch (distribution.packaging) { case TAR, ZIP -> Archives.assertElasticsearchStarted(installation); case DEB, RPM -> Packages.assertElasticsearchStarted(sh, installation); - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> Docker.waitForElasticsearchToStart(); + case DOCKER, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> Docker.waitForElasticsearchToStart(); default -> throw new IllegalStateException("Unknown Elasticsearch packaging type."); } } diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java index 11b8324384631..55c59db6219d3 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java @@ -29,8 +29,6 @@ public Distribution(Path path) { this.packaging = Packaging.TAR; } else if (filename.endsWith(".docker.tar")) { this.packaging = Packaging.DOCKER; - } else if (filename.endsWith(".ubi.tar")) { - this.packaging = Packaging.DOCKER_UBI; } else if (filename.endsWith(".ironbank.tar")) { this.packaging = Packaging.DOCKER_IRON_BANK; } else if (filename.endsWith(".cloud-ess.tar")) { @@ -61,7 +59,7 @@ public boolean isPackage() { */ public boolean isDocker() { return switch (packaging) { - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> true; + case DOCKER, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> true; default -> false; }; } @@ -73,7 +71,6 @@ public enum Packaging { DEB(".deb", Platforms.isDPKG()), RPM(".rpm", Platforms.isRPM()), DOCKER(".docker.tar", Platforms.isDocker()), - DOCKER_UBI(".ubi.tar", Platforms.isDocker()), DOCKER_IRON_BANK(".ironbank.tar", Platforms.isDocker()), DOCKER_CLOUD_ESS(".cloud-ess.tar", Platforms.isDocker()), DOCKER_WOLFI(".wolfi.tar", Platforms.isDocker()); diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/LintianResultParser.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/LintianResultParser.java new file mode 100644 index 0000000000000..511080427ea77 --- /dev/null +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/LintianResultParser.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.packaging.util; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class LintianResultParser { + + private static final Logger logger = LogManager.getLogger(LintianResultParser.class); + private static final Pattern RESULT_PATTERN = Pattern.compile("(?[EW]): (?\\S+): (?\\S+) (?.+)"); + + public Result parse(String output) { + String[] lines = output.split("\n"); + List issues = Arrays.stream(lines).map(line -> { + Matcher matcher = RESULT_PATTERN.matcher(line); + if (matcher.matches() == false) { + logger.info("Lintian output not matching expected pattern: {}", line); + return null; + } + Severity severity = switch (matcher.group("severity")) { + case "E" -> Severity.ERROR; + case "W" -> Severity.WARNING; + default -> Severity.UNKNOWN; + }; + return new Issue(severity, matcher.group("tag"), matcher.group("message")); + }).filter(Objects::nonNull).toList(); + + return new Result(issues.stream().noneMatch(it -> it.severity == Severity.ERROR || it.severity == Severity.WARNING), issues); + } + + public record Result(boolean isSuccess, List issues) {} + + public record Issue(Severity severity, String tag, String message) {} + + enum Severity { + ERROR, + WARNING, + UNKNOWN + } +} diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java index e3eac23d3ecce..5dc47993072a8 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java @@ -163,7 +163,6 @@ String build() { public static String getImageName(Distribution distribution) { String suffix = switch (distribution.packaging) { case DOCKER -> ""; - case DOCKER_UBI -> "-ubi"; case DOCKER_IRON_BANK -> "-ironbank"; case DOCKER_CLOUD_ESS -> "-cloud-ess"; case DOCKER_WOLFI -> "-wolfi"; diff --git a/qa/repository-multi-version/build.gradle b/qa/repository-multi-version/build.gradle index 79a8be4c1be24..646a7974868c4 100644 --- a/qa/repository-multi-version/build.gradle +++ b/qa/repository-multi-version/build.gradle @@ -7,7 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle index 2f717f201f248..1d7475427b33b 100644 --- a/qa/rolling-upgrade/build.gradle +++ b/qa/rolling-upgrade/build.gradle @@ -7,7 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java index e0d1e7aafa637..d9adec47ff483 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java @@ -14,14 +14,12 @@ import org.elasticsearch.Build; import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesRequest; import org.elasticsearch.client.Request; -import org.elasticsearch.client.ResponseException; import org.elasticsearch.cluster.metadata.DesiredNode; import org.elasticsearch.cluster.metadata.DesiredNodeWithStatus; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; @@ -43,24 +41,7 @@ public DesiredNodesUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { desiredNodesVersion = upgradedNodes + 1; } - private enum ProcessorsPrecision { - DOUBLE, - FLOAT - } - public void testUpgradeDesiredNodes() throws Exception { - assumeTrue("Desired nodes was introduced in 8.1", oldClusterHasFeature(RestTestLegacyFeatures.DESIRED_NODE_API_SUPPORTED)); - - if (oldClusterHasFeature(DesiredNode.DOUBLE_PROCESSORS_SUPPORTED)) { - assertUpgradedNodesCanReadDesiredNodes(); - } else if (oldClusterHasFeature(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED)) { - assertDesiredNodesUpdatedWithRoundedUpFloatsAreIdempotent(); - } else { - assertDesiredNodesWithFloatProcessorsAreRejectedInOlderVersions(); - } - } - - private void assertUpgradedNodesCanReadDesiredNodes() throws Exception { if (isMixedCluster() || isUpgradedCluster()) { final Map desiredNodes = getLatestDesiredNodes(); final String historyId = extractValue(desiredNodes, "history_id"); @@ -69,60 +50,10 @@ private void assertUpgradedNodesCanReadDesiredNodes() throws Exception { assertThat(version, is(equalTo(desiredNodesVersion - 1))); } - addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(desiredNodesVersion, ProcessorsPrecision.DOUBLE); + addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(desiredNodesVersion); assertAllDesiredNodesAreActualized(); } - private void assertDesiredNodesUpdatedWithRoundedUpFloatsAreIdempotent() throws Exception { - // We define the same set of desired nodes to ensure that they are equal across all - // the test runs, otherwise we cannot guarantee an idempotent update in this test - final var desiredNodes = getNodeNames().stream() - .map( - nodeName -> new DesiredNode( - Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), - 1238.49922909, - ByteSizeValue.ofGb(32), - ByteSizeValue.ofGb(128), - clusterHasFeature(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED) ? null : Build.current().version() - ) - ) - .toList(); - - if (isMixedCluster()) { - updateDesiredNodes(desiredNodes, desiredNodesVersion - 1); - } - for (int i = 0; i < 2; i++) { - updateDesiredNodes(desiredNodes, desiredNodesVersion); - } - - final Map latestDesiredNodes = getLatestDesiredNodes(); - final int latestDesiredNodesVersion = extractValue(latestDesiredNodes, "version"); - assertThat(latestDesiredNodesVersion, is(equalTo(desiredNodesVersion))); - - if (isUpgradedCluster()) { - assertAllDesiredNodesAreActualized(); - } - } - - private void assertDesiredNodesWithFloatProcessorsAreRejectedInOlderVersions() throws Exception { - if (isOldCluster()) { - addClusterNodesToDesiredNodesWithIntegerProcessors(1); - } else if (isMixedCluster()) { - // Processor ranges or float processors are forbidden during upgrades: 8.2 -> 8.3 clusters - final var responseException = expectThrows( - ResponseException.class, - () -> addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(desiredNodesVersion, ProcessorsPrecision.FLOAT) - ); - final var statusCode = responseException.getResponse().getStatusLine().getStatusCode(); - assertThat(statusCode, is(equalTo(400))); - } else { - assertAllDesiredNodesAreActualized(); - addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(4, ProcessorsPrecision.FLOAT); - } - - getLatestDesiredNodes(); - } - private Map getLatestDesiredNodes() throws IOException { final var getDesiredNodesRequest = new Request("GET", "/_internal/desired_nodes/_latest"); final var response = client().performRequest(getDesiredNodesRequest); @@ -143,15 +74,14 @@ private void assertAllDesiredNodesAreActualized() throws Exception { } } - private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int version, ProcessorsPrecision processorsPrecision) - throws Exception { + private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int version) throws Exception { final List nodes; if (randomBoolean()) { nodes = getNodeNames().stream() .map( nodeName -> new DesiredNode( Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), - processorsPrecision == ProcessorsPrecision.DOUBLE ? randomDoubleProcessorCount() : 0.5f, + randomDoubleProcessorCount(), ByteSizeValue.ofGb(randomIntBetween(10, 24)), ByteSizeValue.ofGb(randomIntBetween(128, 256)), clusterHasFeature(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED) ? null : Build.current().version() @@ -160,9 +90,7 @@ private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int ve .toList(); } else { nodes = getNodeNames().stream().map(nodeName -> { - double minProcessors = processorsPrecision == ProcessorsPrecision.DOUBLE - ? randomDoubleProcessorCount() - : randomFloatProcessorCount(); + double minProcessors = randomDoubleProcessorCount(); return new DesiredNode( Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), new DesiredNode.ProcessorsRange(minProcessors, minProcessors + randomIntBetween(10, 20)), @@ -175,21 +103,6 @@ private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int ve updateDesiredNodes(nodes, version); } - private void addClusterNodesToDesiredNodesWithIntegerProcessors(int version) throws Exception { - final var nodes = getNodeNames().stream() - .map( - nodeName -> new DesiredNode( - Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), - randomIntBetween(1, 24), - ByteSizeValue.ofGb(randomIntBetween(10, 24)), - ByteSizeValue.ofGb(randomIntBetween(128, 256)), - clusterHasFeature(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED) ? null : Build.current().version() - ) - ) - .toList(); - updateDesiredNodes(nodes, version); - } - private void updateDesiredNodes(List nodes, int version) throws IOException { final var request = new Request("PUT", "/_internal/desired_nodes/upgrade_test/" + version); try (var builder = JsonXContent.contentBuilder()) { @@ -226,10 +139,6 @@ private double randomDoubleProcessorCount() { return randomDoubleBetween(0.5, 512.1234, true); } - private float randomFloatProcessorCount() { - return randomIntBetween(1, 512) + randomFloat(); - } - @SuppressWarnings("unchecked") private static T extractValue(Map map, String path) { return (T) XContentMapValues.extractValue(path, map); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java index 70658da70eb80..bca0c26ad2c32 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java @@ -15,7 +15,6 @@ import org.elasticsearch.client.Response; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.junit.Before; import java.io.IOException; @@ -244,10 +243,6 @@ private String getRollupIndexName() throws IOException { } public void testRollupIndex() throws Exception { - assumeTrue( - "Downsample got many stability improvements in 8.10.0", - oldClusterHasFeature(RestTestLegacyFeatures.TSDB_DOWNSAMPLING_STABLE) - ); if (isOldCluster()) { createIlmPolicy(); createIndex(); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java deleted file mode 100644 index 2ed1b7fe9e79b..0000000000000 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.upgrades; - -import com.carrotsearch.randomizedtesting.annotations.Name; - -import org.apache.http.util.EntityUtils; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; -import org.hamcrest.Matchers; - -import java.nio.charset.StandardCharsets; -import java.util.Map; - -import static org.hamcrest.CoreMatchers.equalTo; - -public class HealthNodeUpgradeIT extends AbstractRollingUpgradeTestCase { - - public HealthNodeUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { - super(upgradedNodes); - } - - public void testHealthNode() throws Exception { - if (clusterHasFeature("health.supports_health")) { - assertBusy(() -> { - Response response = client().performRequest(new Request("GET", "_cat/tasks")); - String tasks = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); - assertThat(tasks, Matchers.containsString("health-node")); - }); - assertBusy(() -> { - String path = clusterHasFeature("health.supports_health_report_api") ? "_health_report" : "_internal/_health"; - Response response = client().performRequest(new Request("GET", path)); - Map health_report = entityAsMap(response.getEntity()); - assertThat(health_report.get("status"), equalTo("green")); - }); - } - } -} diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java index 65bf62783fd69..090f409fd46d0 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java @@ -18,10 +18,10 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.test.ListMatcher; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; @@ -258,7 +258,6 @@ private void bulk(String index, String valueSuffix, int count) throws IOExceptio public void testTsdb() throws IOException { final Version oldClusterVersion = Version.fromString(getOldClusterVersion()); - assumeTrue("indexing time series indices changed in 8.2.0", oldClusterHasFeature(RestTestLegacyFeatures.TSDB_NEW_INDEX_FORMAT)); StringBuilder bulk = new StringBuilder(); if (isOldCluster()) { @@ -385,6 +384,7 @@ private void tsdbBulk(StringBuilder bulk, String dim, long timeStart, long timeE private void assertTsdbAgg(final Version oldClusterVersion, final List expectedTsids, final Matcher... expected) throws IOException { + @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_ANALYTICS) boolean onOrAfterTsidHashingVersion = oldClusterVersion.onOrAfter(Version.V_8_13_0); Request request = new Request("POST", "/tsdb/_search"); request.addParameter("size", "0"); @@ -414,8 +414,6 @@ private void assertTsdbAgg(final Version oldClusterVersion, final List e } public void testSyntheticSource() throws IOException { - assumeTrue("added in 8.4.0", oldClusterHasFeature(RestTestLegacyFeatures.SYNTHETIC_SOURCE_SUPPORTED)); - if (isOldCluster()) { Request createIndex = new Request("PUT", "/synthetic"); XContentBuilder indexSpec = XContentBuilder.builder(XContentType.JSON.xContent()).startObject(); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java index 8c369ebc9950d..1eb7cbd3f70c2 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; -import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.hamcrest.Matcher; @@ -30,9 +29,6 @@ import java.util.Map; import java.util.function.Supplier; -import static org.elasticsearch.test.MapMatcher.assertMap; -import static org.elasticsearch.test.MapMatcher.matchesMap; - public class LogsIndexModeRollingUpgradeIT extends AbstractRollingUpgradeTestCase { @ClassRule() @@ -160,14 +156,10 @@ public void testLogsIndexing() throws IOException { assertOK(bulkIndexResponse); assertThat(entityAsMap(bulkIndexResponse).get("errors"), Matchers.is(false)); - assertIndexMappingsAndSettings(0, Matchers.nullValue(), matchesMap().extraOk()); - assertIndexMappingsAndSettings(1, Matchers.nullValue(), matchesMap().extraOk()); - assertIndexMappingsAndSettings(2, Matchers.nullValue(), matchesMap().extraOk()); - assertIndexMappingsAndSettings( - 3, - Matchers.equalTo("logsdb"), - matchesMap().extraOk().entry("_source", Map.of("mode", "synthetic")) - ); + assertIndexSettings(0, Matchers.nullValue()); + assertIndexSettings(1, Matchers.nullValue()); + assertIndexSettings(2, Matchers.nullValue()); + assertIndexSettings(3, Matchers.equalTo("logsdb")); } } @@ -183,13 +175,11 @@ static void enableLogsdbByDefault() throws IOException { assertOK(client().performRequest(request)); } - private void assertIndexMappingsAndSettings(int backingIndex, final Matcher indexModeMatcher, final MapMatcher mappingsMatcher) - throws IOException { + private void assertIndexSettings(int backingIndex, final Matcher indexModeMatcher) throws IOException { assertThat( getSettings(client(), getWriteBackingIndex(client(), "logs-apache-production", backingIndex)).get("index.mode"), indexModeMatcher ); - assertMap(getIndexMappingAsMap(getWriteBackingIndex(client(), "logs-apache-production", backingIndex)), mappingsMatcher); } private static Request createDataStream(final String dataStreamName) { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java index 3343a683bbd11..9217852f1867c 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java @@ -24,7 +24,6 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -50,12 +49,6 @@ public SnapshotBasedRecoveryIT(@Name("upgradedNodes") int upgradedNodes) { } public void testSnapshotBasedRecovery() throws Exception { - assumeTrue( - "Cancel shard allocation command is broken for initial versions of the desired_balance allocator", - oldClusterHasFeature(RestTestLegacyFeatures.DESIRED_BALANCED_ALLOCATOR_SUPPORTED) == false - || oldClusterHasFeature(RestTestLegacyFeatures.DESIRED_BALANCED_ALLOCATOR_FIXED) - ); - final String indexName = "snapshot_based_recovery"; final String repositoryName = "snapshot_based_recovery_repo"; final int numDocs = 200; diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java index 6744c84f29d0f..46b39128c3a31 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; import org.elasticsearch.test.rest.ObjectPath; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import java.io.IOException; import java.time.Instant; @@ -24,8 +23,6 @@ import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.backingIndexEqualTo; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; public class TsdbIT extends AbstractRollingUpgradeTestCase { @@ -131,7 +128,6 @@ public TsdbIT(@Name("upgradedNodes") int upgradedNodes) { """; public void testTsdbDataStream() throws Exception { - assumeTrue("TSDB was GA-ed in 8.7.0", oldClusterHasFeature(RestTestLegacyFeatures.TSDB_GENERALLY_AVAILABLE)); String dataStreamName = "k8s"; if (isOldCluster()) { final String INDEX_TEMPLATE = """ @@ -155,70 +151,6 @@ public void testTsdbDataStream() throws Exception { } } - public void testTsdbDataStreamWithComponentTemplate() throws Exception { - assumeTrue( - "TSDB was GA-ed in 8.7.0 and bug was fixed in 8.11.0", - oldClusterHasFeature(RestTestLegacyFeatures.TSDB_GENERALLY_AVAILABLE) - && (oldClusterHasFeature(RestTestLegacyFeatures.TSDB_EMPTY_TEMPLATE_FIXED) == false) - ); - String dataStreamName = "template-with-component-template"; - if (isOldCluster()) { - final String COMPONENT_TEMPLATE = """ - { - "template": $TEMPLATE - } - """; - var putComponentTemplate = new Request("POST", "/_component_template/1"); - String template = TEMPLATE.replace("\"time_series\"", "\"time_series\", \"routing_path\": [\"k8s.pod.uid\"]"); - putComponentTemplate.setJsonEntity(COMPONENT_TEMPLATE.replace("$TEMPLATE", template)); - assertOK(client().performRequest(putComponentTemplate)); - final String INDEX_TEMPLATE = """ - { - "index_patterns": ["$PATTERN"], - "composed_of": ["1"], - "data_stream": { - } - }"""; - // Add composable index template - String templateName = "2"; - var putIndexTemplateRequest = new Request("POST", "/_index_template/" + templateName); - putIndexTemplateRequest.setJsonEntity(INDEX_TEMPLATE.replace("$PATTERN", dataStreamName)); - assertOK(client().performRequest(putIndexTemplateRequest)); - - performOldClustertOperations(templateName, dataStreamName); - } else if (isMixedCluster()) { - performMixedClusterOperations(dataStreamName); - } else if (isUpgradedCluster()) { - performUpgradedClusterOperations(dataStreamName); - - var dataStreams = getDataStream(dataStreamName); - assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.name"), equalTo(dataStreamName)); - assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.generation"), equalTo(2)); - String firstBackingIndex = ObjectPath.evaluate(dataStreams, "data_streams.0.indices.0.index_name"); - { - var indices = getIndex(firstBackingIndex); - var escapedBackingIndex = firstBackingIndex.replace(".", "\\."); - assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".data_stream"), equalTo(dataStreamName)); - assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.mode"), nullValue()); - String startTime = ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.time_series.start_time"); - assertThat(startTime, nullValue()); - String endTime = ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.time_series.end_time"); - assertThat(endTime, nullValue()); - } - String secondBackingIndex = ObjectPath.evaluate(dataStreams, "data_streams.0.indices.1.index_name"); - { - var indices = getIndex(secondBackingIndex); - var escapedBackingIndex = secondBackingIndex.replace(".", "\\."); - assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".data_stream"), equalTo(dataStreamName)); - assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.mode"), equalTo("time_series")); - String startTime = ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.time_series.start_time"); - assertThat(startTime, notNullValue()); - String endTime = ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.time_series.end_time"); - assertThat(endTime, notNullValue()); - } - } - } - private void performUpgradedClusterOperations(String dataStreamName) throws Exception { ensureGreen(dataStreamName); var rolloverRequest = new Request("POST", "/" + dataStreamName + "/_rollover"); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java index f3a322b54039a..b2298c12b7b98 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java @@ -17,13 +17,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import java.io.IOException; import java.util.Map; import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; -import static org.hamcrest.Matchers.is; public class UpgradeWithOldIndexSettingsIT extends AbstractRollingUpgradeTestCase { @@ -35,33 +33,22 @@ public UpgradeWithOldIndexSettingsIT(@Name("upgradedNodes") int upgradedNodes) { private static final String EXPECTED_WARNING = "[index.indexing.slowlog.level] setting was deprecated in Elasticsearch and will " + "be removed in a future release! See the breaking changes documentation for the next major version."; - private static final String EXPECTED_V8_WARNING = "[index.indexing.slowlog.level] setting was deprecated in the previous Elasticsearch" - + " release and is removed in this release."; - public void testOldIndexSettings() throws Exception { if (isOldCluster()) { Request createTestIndex = new Request("PUT", "/" + INDEX_NAME); createTestIndex.setJsonEntity("{\"settings\": {\"index.indexing.slowlog.level\": \"WARN\"}}"); createTestIndex.setOptions(expectWarnings(EXPECTED_WARNING)); - if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED)) { - assertTrue( - expectThrows(ResponseException.class, () -> client().performRequest(createTestIndex)).getMessage() - .contains("unknown setting [index.indexing.slowlog.level]") - ); + assertTrue( + expectThrows(ResponseException.class, () -> client().performRequest(createTestIndex)).getMessage() + .contains("unknown setting [index.indexing.slowlog.level]") + ); - Request createTestIndex1 = new Request("PUT", "/" + INDEX_NAME); - client().performRequest(createTestIndex1); - } else { - // create index with settings no longer valid in 8.0 - client().performRequest(createTestIndex); - } + Request createTestIndex1 = new Request("PUT", "/" + INDEX_NAME); + client().performRequest(createTestIndex1); // add some data Request bulk = new Request("POST", "/_bulk"); bulk.addParameter("refresh", "true"); - if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED) == false) { - bulk.setOptions(expectWarnings(EXPECTED_WARNING)); - } bulk.setJsonEntity(Strings.format(""" {"index": {"_index": "%s"}} {"f1": "v1", "f2": "v2"} @@ -71,34 +58,12 @@ public void testOldIndexSettings() throws Exception { // add some more data Request bulk = new Request("POST", "/_bulk"); bulk.addParameter("refresh", "true"); - if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED) == false) { - bulk.setOptions(expectWarnings(EXPECTED_WARNING)); - } bulk.setJsonEntity(Strings.format(""" {"index": {"_index": "%s"}} {"f1": "v3", "f2": "v4"} """, INDEX_NAME)); client().performRequest(bulk); } else { - if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED) == false) { - Request createTestIndex = new Request("PUT", "/" + INDEX_NAME + "/_settings"); - // update index settings should work - createTestIndex.setJsonEntity("{\"index.indexing.slowlog.level\": \"INFO\"}"); - createTestIndex.setOptions(expectWarnings(EXPECTED_V8_WARNING)); - client().performRequest(createTestIndex); - - // ensure we were able to change the setting, despite it having no effect - Request indexSettingsRequest = new Request("GET", "/" + INDEX_NAME + "/_settings"); - Map response = entityAsMap(client().performRequest(indexSettingsRequest)); - - var slowLogLevel = (String) (XContentMapValues.extractValue( - INDEX_NAME + ".settings.index.indexing.slowlog.level", - response - )); - - // check that we can read our old index settings - assertThat(slowLogLevel, is("INFO")); - } assertCount(INDEX_NAME, 2); } } @@ -118,16 +83,6 @@ private void assertCount(String index, int countAtLeast) throws IOException { public static void updateIndexSettingsPermittingSlowlogDeprecationWarning(String index, Settings.Builder settings) throws IOException { Request request = new Request("PUT", "/" + index + "/_settings"); request.setJsonEntity(org.elasticsearch.common.Strings.toString(settings.build())); - if (oldClusterHasFeature(RestTestLegacyFeatures.DEPRECATION_WARNINGS_LEAK_FIXED) == false) { - // There is a bug (fixed in 7.17.9 and 8.7.0 where deprecation warnings could leak into ClusterApplierService#applyChanges) - // Below warnings are set (and leaking) from an index in this test case - request.setOptions(expectVersionSpecificWarnings(v -> { - v.compatible( - "[index.indexing.slowlog.level] setting was deprecated in Elasticsearch and will be removed in a future release! " - + "See the breaking changes documentation for the next major version." - ); - })); - } client().performRequest(request); } } diff --git a/qa/verify-version-constants/build.gradle b/qa/verify-version-constants/build.gradle index ee29da53dc51b..67fc962e087cb 100644 --- a/qa/verify-version-constants/build.gradle +++ b/qa/verify-version-constants/build.gradle @@ -8,7 +8,6 @@ */ import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 8e1df37804708..650d17e41de7f 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -59,4 +59,11 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling with vector_operations_count") task.skipTest("cat.aliases/10_basic/Deprecated local parameter", "CAT APIs not covered by compatibility policy") task.skipTest("cat.shards/10_basic/Help", "sync_id is removed in 9.0") + task.skipTest("search/500_date_range/from, to, include_lower, include_upper deprecated", "deprecated parameters are removed in 9.0") + task.skipTest("tsdb/20_mapping/stored source is supported", "no longer serialize source_mode") + task.skipTest("tsdb/20_mapping/Synthetic source", "no longer serialize source_mode") + task.skipTest("logsdb/10_settings/create logs index", "no longer serialize source_mode") + task.skipTest("logsdb/20_source_mapping/stored _source mode is supported", "no longer serialize source_mode") + task.skipTest("logsdb/20_source_mapping/include/exclude is supported with stored _source", "no longer serialize source_mode") + task.skipTest("logsdb/20_source_mapping/synthetic _source is default", "no longer serialize source_mode") }) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json b/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json index bce8dfd794dca..6f3d09c15c081 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json @@ -55,6 +55,10 @@ "type": "string", "description": "Specific the time to live for the point in time", "required": true + }, + "allow_partial_search_results": { + "type": "boolean", + "description": "Specify whether to tolerate shards missing when creating the point-in-time, or otherwise throw an exception. (default: false)" } }, "body":{ diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/16_creation_date_tier_preference.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/16_creation_date_tier_preference.yml deleted file mode 100644 index 6ecd9c3e9c2ce..0000000000000 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/16_creation_date_tier_preference.yml +++ /dev/null @@ -1,14 +0,0 @@ ---- -"Ensure index creation date and tier preference are exposed": - - requires: - cluster_features: ["stats.tier_creation_date"] - reason: index creation date and tier preference added to stats in 8.17 - - - do: - indices.create: - index: myindex - - do: - indices.stats: {} - - - is_true: indices.myindex.creation_date - - is_true: indices.myindex.tier_preference diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml index d0f89b1b8b6cb..463df7d2ab1bb 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml @@ -76,11 +76,6 @@ create logs index: - is_true: test - match: { test.settings.index.mode: "logsdb" } - - do: - indices.get_mapping: - index: test - - match: { test.mappings._source.mode: synthetic } - --- using default timestamp field mapping: - requires: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml index 27146557bb1be..06a007b8aaca5 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml @@ -13,10 +13,10 @@ synthetic _source is default: index: mode: logsdb - do: - indices.get: + indices.get_settings: index: test-default-source - - - match: { test-default-source.mappings._source.mode: "synthetic" } + - match: { test-default-source.settings.index.mode: logsdb } + - match: { test-default-source.settings.index.mapping.source.mode: null } --- stored _source mode is supported: @@ -28,11 +28,12 @@ stored _source mode is supported: index: mode: logsdb mapping.source.mode: stored + - do: - indices.get: + indices.get_settings: index: test-stored-source - - - match: { test-stored-source.mappings._source.mode: "stored" } + - match: { test-stored-source.settings.index.mode: logsdb } + - match: { test-stored-source.settings.index.mapping.source.mode: stored } --- disabled _source is not supported: @@ -110,7 +111,6 @@ include/exclude is supported with stored _source: indices.get: index: test-includes - - match: { test-includes.mappings._source.mode: "stored" } - match: { test-includes.mappings._source.includes: ["a"] } - do: @@ -129,5 +129,4 @@ include/exclude is supported with stored _source: indices.get: index: test-excludes - - match: { test-excludes.mappings._source.mode: "stored" } - match: { test-excludes.mappings._source.excludes: ["b"] } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/500_date_range.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/500_date_range.yml index e9bfffb8da604..76057b5a364fb 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/500_date_range.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/500_date_range.yml @@ -123,29 +123,3 @@ setup: - match: { hits.total: 1 } - length: { hits.hits: 1 } - match: { hits.hits.0._id: "4" } - - ---- -"from, to, include_lower, include_upper deprecated": - - requires: - cluster_features: "gte_v8.16.0" - reason: 'from, to, include_lower, include_upper parameters are deprecated since 8.16.0' - test_runner_features: warnings - - - do: - warnings: - - "Deprecated field [from] used, this field is unused and will be removed entirely" - - "Deprecated field [to] used, this field is unused and will be removed entirely" - - "Deprecated field [include_lower] used, this field is unused and will be removed entirely" - - "Deprecated field [include_upper] used, this field is unused and will be removed entirely" - search: - index: dates - body: - sort: field - query: - range: - date: - from: 1000 - to: 2023 - include_lower: false - include_upper: false diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml index 4d8f03a6e5e18..9fe3f5e0b7272 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml @@ -450,11 +450,6 @@ nested fields: type: long time_series_metric: gauge - - do: - indices.get_mapping: {} - - - match: {tsdb-synthetic.mappings._source.mode: synthetic} - --- stored source is supported: - requires: @@ -486,12 +481,6 @@ stored source is supported: type: keyword time_series_dimension: true - - do: - indices.get: - index: tsdb_index - - - match: { tsdb_index.mappings._source.mode: "stored" } - --- disabled source is not supported: - requires: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml index db718959919da..54b2bf59c8ddc 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml @@ -129,7 +129,7 @@ noop update: {} --- -update: +regular update: - requires: cluster_features: ["gte_v8.2.0"] reason: tsdb indexing changed in 8.2.0 diff --git a/server/build.gradle b/server/build.gradle index bc8decfa8babc..0bd807751ecbb 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -7,8 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.publish' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/ListenerActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/ListenerActionIT.java deleted file mode 100644 index 8b5e014b519c8..0000000000000 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/ListenerActionIT.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.action; - -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.client.internal.Requests; -import org.elasticsearch.test.ESIntegTestCase; - -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicReference; - -public class ListenerActionIT extends ESIntegTestCase { - public void testThreadedListeners() throws Throwable { - final CountDownLatch latch = new CountDownLatch(1); - final AtomicReference failure = new AtomicReference<>(); - final AtomicReference threadName = new AtomicReference<>(); - Client client = client(); - - IndexRequest request = new IndexRequest("test").id("1"); - if (randomBoolean()) { - // set the source, without it, we will have a verification failure - request.source(Requests.INDEX_CONTENT_TYPE, "field1", "value1"); - } - - client.index(request, new ActionListener() { - @Override - public void onResponse(DocWriteResponse indexResponse) { - threadName.set(Thread.currentThread().getName()); - latch.countDown(); - } - - @Override - public void onFailure(Exception e) { - threadName.set(Thread.currentThread().getName()); - failure.set(e); - latch.countDown(); - } - }); - - latch.await(); - - assertFalse(threadName.get().contains("listener")); - } -} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java index 4977d87d5a348..deae022795ad2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java @@ -65,7 +65,7 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { .put(super.nodeSettings(nodeOrdinal, otherSettings)) .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK.getKey(), "512B") .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK_SIZE.getKey(), "2048B") - .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK.getKey(), "2KB") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK.getKey(), "4KB") .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK_SIZE.getKey(), "1024B") .build(); } @@ -161,6 +161,8 @@ public void testIncrementalBulkLowWatermarkBackOff() throws Exception { IndexRequest indexRequest = indexRequest(index); long total = indexRequest.ramBytesUsed(); + long lowWaterMarkSplits = indexingPressure.stats().getLowWaterMarkSplits(); + long highWaterMarkSplits = indexingPressure.stats().getHighWaterMarkSplits(); while (total < 2048) { refCounted.incRef(); handler.addItems(List.of(indexRequest), refCounted::decRef, () -> nextPage.set(true)); @@ -175,6 +177,8 @@ public void testIncrementalBulkLowWatermarkBackOff() throws Exception { handler.addItems(List.of(indexRequest(index)), refCounted::decRef, () -> nextPage.set(true)); assertBusy(() -> assertThat(indexingPressure.stats().getCurrentCombinedCoordinatingAndPrimaryBytes(), equalTo(0L))); + assertBusy(() -> assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(lowWaterMarkSplits + 1))); + assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(highWaterMarkSplits)); PlainActionFuture future = new PlainActionFuture<>(); handler.lastItems(List.of(indexRequest), refCounted::decRef, future); @@ -192,6 +196,8 @@ public void testIncrementalBulkHighWatermarkBackOff() throws Exception { IncrementalBulkService incrementalBulkService = internalCluster().getInstance(IncrementalBulkService.class, nodeName); IndexingPressure indexingPressure = internalCluster().getInstance(IndexingPressure.class, nodeName); ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeName); + long lowWaterMarkSplits = indexingPressure.stats().getLowWaterMarkSplits(); + long highWaterMarkSplits = indexingPressure.stats().getHighWaterMarkSplits(); AbstractRefCounted refCounted = AbstractRefCounted.of(() -> {}); AtomicBoolean nextPage = new AtomicBoolean(false); @@ -217,6 +223,8 @@ public void testIncrementalBulkHighWatermarkBackOff() throws Exception { handlerNoThrottle.addItems(requestsNoThrottle, refCounted::decRef, () -> nextPage.set(true)); assertTrue(nextPage.get()); nextPage.set(false); + assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(highWaterMarkSplits)); + assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(lowWaterMarkSplits)); ArrayList> requestsThrottle = new ArrayList<>(); // Test that a request larger than SPLIT_BULK_HIGH_WATERMARK_SIZE (1KB) is throttled @@ -235,6 +243,8 @@ public void testIncrementalBulkHighWatermarkBackOff() throws Exception { // Wait until we are ready for the next page assertBusy(() -> assertTrue(nextPage.get())); + assertBusy(() -> assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(highWaterMarkSplits + 1))); + assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(lowWaterMarkSplits)); for (IncrementalBulkService.Handler h : handlers) { refCounted.incRef(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java index a1395f81eb091..67576059de1e0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexSettings; @@ -669,7 +670,7 @@ public Aggregator subAggregator(String aggregatorName) { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) { return new InternalAggregation[] { buildEmptyAggregation() }; } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerMetricsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerMetricsIT.java index 9a71bf86388a4..b3ec4a5331180 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerMetricsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerMetricsIT.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.routing.allocation.allocator; +import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteUtils; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterInfoServiceUtils; import org.elasticsearch.cluster.InternalClusterInfoService; @@ -68,6 +69,7 @@ public void testDesiredBalanceMetrics() { final var infoService = (InternalClusterInfoService) internalCluster().getCurrentMasterNodeInstance(ClusterInfoService.class); ClusterInfoServiceUtils.setUpdateFrequency(infoService, TimeValue.timeValueMillis(200)); assertNotNull("info should not be null", ClusterInfoServiceUtils.refresh(infoService)); + ClusterRerouteUtils.reroute(client()); // ensure we leverage the latest cluster info final var telemetryPlugin = getTelemetryPlugin(internalCluster().getMasterName()); telemetryPlugin.collect(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index 0647a24aa39c8..de9e3f28a2109 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -50,6 +50,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertRequestBuilderThrows; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -843,24 +844,13 @@ public void testMultipleTemplate() throws IOException { ensureGreen(); - // ax -> matches template - assertResponse( + assertResponses(response -> { + assertHitCount(response, 1); + assertEquals("value1", response.getHits().getAt(0).field("field1").getValue().toString()); + assertNull(response.getHits().getAt(0).field("field2")); + }, prepareSearch("ax").setQuery(termQuery("field1", "value1")).addStoredField("field1").addStoredField("field2"), - response -> { - assertHitCount(response, 1); - assertEquals("value1", response.getHits().getAt(0).field("field1").getValue().toString()); - assertNull(response.getHits().getAt(0).field("field2")); - } - ); - - // bx -> matches template - assertResponse( - prepareSearch("bx").setQuery(termQuery("field1", "value1")).addStoredField("field1").addStoredField("field2"), - response -> { - assertHitCount(response, 1); - assertEquals("value1", response.getHits().getAt(0).field("field1").getValue().toString()); - assertNull(response.getHits().getAt(0).field("field2")); - } + prepareSearch("bx").setQuery(termQuery("field1", "value1")).addStoredField("field1").addStoredField("field2") ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java index 9364e7437141e..e4d44212f2854 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java @@ -9,35 +9,48 @@ package org.elasticsearch.monitor.metrics; +import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.IncrementalBulkService; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.telemetry.Measurement; import org.elasticsearch.telemetry.TestTelemetryPlugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Function; import static org.elasticsearch.index.IndexingPressure.MAX_COORDINATING_BYTES; import static org.elasticsearch.index.IndexingPressure.MAX_PRIMARY_BYTES; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThan; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0) public class NodeIndexingMetricsIT extends ESIntegTestCase { @@ -453,6 +466,211 @@ public void testPrimaryDocumentRejectionMetricsFluctuatingOverTime() throws Exce } } + // Borrowed this test from IncrementalBulkIT and added test for metrics to it + public void testIncrementalBulkLowWatermarkSplitMetrics() throws Exception { + final String nodeName = internalCluster().startNode( + Settings.builder() + .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK.getKey(), "512B") + .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK_SIZE.getKey(), "2048B") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK.getKey(), "4KB") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK_SIZE.getKey(), "1024B") + .build() + ); + ensureStableCluster(1); + + String index = "test"; + createIndex(index); + + IncrementalBulkService incrementalBulkService = internalCluster().getInstance(IncrementalBulkService.class, nodeName); + IndexingPressure indexingPressure = internalCluster().getInstance(IndexingPressure.class, nodeName); + final TestTelemetryPlugin testTelemetryPlugin = internalCluster().getInstance(PluginsService.class, nodeName) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + testTelemetryPlugin.resetMeter(); + + IncrementalBulkService.Handler handler = incrementalBulkService.newBulkRequest(); + + AbstractRefCounted refCounted = AbstractRefCounted.of(() -> {}); + AtomicBoolean nextPage = new AtomicBoolean(false); + + IndexRequest indexRequest = indexRequest(index); + long total = indexRequest.ramBytesUsed(); + while (total < 2048) { + refCounted.incRef(); + handler.addItems(List.of(indexRequest), refCounted::decRef, () -> nextPage.set(true)); + assertTrue(nextPage.get()); + nextPage.set(false); + indexRequest = indexRequest(index); + total += indexRequest.ramBytesUsed(); + } + + assertThat(indexingPressure.stats().getCurrentCombinedCoordinatingAndPrimaryBytes(), greaterThan(0L)); + assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(0L)); + + testTelemetryPlugin.collect(); + assertThat( + getSingleRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.low_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + assertThat( + getSingleRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.high_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + + refCounted.incRef(); + handler.addItems(List.of(indexRequest(index)), refCounted::decRef, () -> nextPage.set(true)); + + assertBusy(() -> assertThat(indexingPressure.stats().getCurrentCombinedCoordinatingAndPrimaryBytes(), equalTo(0L))); + assertBusy(() -> assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(1L))); + assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(0L)); + + testTelemetryPlugin.collect(); + assertThat( + getLatestRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.low_watermark_splits.total" + ).getLong(), + equalTo(1L) + ); + assertThat( + getLatestRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.high_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + + PlainActionFuture future = new PlainActionFuture<>(); + handler.lastItems(List.of(indexRequest), refCounted::decRef, future); + + BulkResponse bulkResponse = safeGet(future); + assertNoFailures(bulkResponse); + assertFalse(refCounted.hasReferences()); + } + + // Borrowed this test from IncrementalBulkIT and added test for metrics to it + public void testIncrementalBulkHighWatermarkSplitMetrics() throws Exception { + final String nodeName = internalCluster().startNode( + Settings.builder() + .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK.getKey(), "512B") + .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK_SIZE.getKey(), "2048B") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK.getKey(), "4KB") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK_SIZE.getKey(), "1024B") + .build() + ); + ensureStableCluster(1); + + String index = "test"; + createIndex(index); + + IncrementalBulkService incrementalBulkService = internalCluster().getInstance(IncrementalBulkService.class, nodeName); + IndexingPressure indexingPressure = internalCluster().getInstance(IndexingPressure.class, nodeName); + ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeName); + final TestTelemetryPlugin testTelemetryPlugin = internalCluster().getInstance(PluginsService.class, nodeName) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + testTelemetryPlugin.resetMeter(); + + AbstractRefCounted refCounted = AbstractRefCounted.of(() -> {}); + AtomicBoolean nextPage = new AtomicBoolean(false); + + ArrayList handlers = new ArrayList<>(); + for (int i = 0; i < 4; ++i) { + ArrayList> requests = new ArrayList<>(); + add512BRequests(requests, index); + IncrementalBulkService.Handler handler = incrementalBulkService.newBulkRequest(); + handlers.add(handler); + refCounted.incRef(); + handler.addItems(requests, refCounted::decRef, () -> nextPage.set(true)); + assertTrue(nextPage.get()); + nextPage.set(false); + } + + // Test that a request smaller than SPLIT_BULK_HIGH_WATERMARK_SIZE (1KB) is not throttled + ArrayList> requestsNoThrottle = new ArrayList<>(); + add512BRequests(requestsNoThrottle, index); + IncrementalBulkService.Handler handlerNoThrottle = incrementalBulkService.newBulkRequest(); + handlers.add(handlerNoThrottle); + refCounted.incRef(); + handlerNoThrottle.addItems(requestsNoThrottle, refCounted::decRef, () -> nextPage.set(true)); + assertTrue(nextPage.get()); + nextPage.set(false); + assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(0L)); + + testTelemetryPlugin.collect(); + assertThat( + getSingleRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.low_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + assertThat( + getSingleRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.high_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + + ArrayList> requestsThrottle = new ArrayList<>(); + // Test that a request larger than SPLIT_BULK_HIGH_WATERMARK_SIZE (1KB) is throttled + add512BRequests(requestsThrottle, index); + add512BRequests(requestsThrottle, index); + + CountDownLatch finishLatch = new CountDownLatch(1); + blockWritePool(threadPool, finishLatch); + IncrementalBulkService.Handler handlerThrottled = incrementalBulkService.newBulkRequest(); + refCounted.incRef(); + handlerThrottled.addItems(requestsThrottle, refCounted::decRef, () -> nextPage.set(true)); + assertFalse(nextPage.get()); + finishLatch.countDown(); + + handlers.add(handlerThrottled); + + // Wait until we are ready for the next page + assertBusy(() -> assertTrue(nextPage.get())); + assertBusy(() -> assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(1L))); + assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(0L)); + + testTelemetryPlugin.collect(); + assertThat( + getLatestRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.low_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + assertThat( + getLatestRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.high_watermark_splits.total" + ).getLong(), + equalTo(1L) + ); + + for (IncrementalBulkService.Handler h : handlers) { + refCounted.incRef(); + PlainActionFuture future = new PlainActionFuture<>(); + h.lastItems(List.of(indexRequest(index)), refCounted::decRef, future); + BulkResponse bulkResponse = safeGet(future); + assertNoFailures(bulkResponse); + } + + assertBusy(() -> assertThat(indexingPressure.stats().getCurrentCombinedCoordinatingAndPrimaryBytes(), equalTo(0L))); + refCounted.decRef(); + assertFalse(refCounted.hasReferences()); + testTelemetryPlugin.collect(); + } + private static Measurement getSingleRecordedMetric(Function> metricGetter, String name) { final List measurements = metricGetter.apply(name); assertFalse("Indexing metric is not recorded", measurements.isEmpty()); @@ -470,4 +688,47 @@ private static boolean doublesEquals(double expected, double actual) { final double eps = .0000001; return Math.abs(expected - actual) < eps; } + + private static IndexRequest indexRequest(String index) { + IndexRequest indexRequest = new IndexRequest(); + indexRequest.index(index); + indexRequest.source(Map.of("field", randomAlphaOfLength(10))); + return indexRequest; + } + + private static void add512BRequests(ArrayList> requests, String index) { + long total = 0; + while (total < 512) { + IndexRequest indexRequest = indexRequest(index); + requests.add(indexRequest); + total += indexRequest.ramBytesUsed(); + } + assertThat(total, lessThan(1024L)); + } + + private static void blockWritePool(ThreadPool threadPool, CountDownLatch finishLatch) { + final var threadCount = threadPool.info(ThreadPool.Names.WRITE).getMax(); + final var startBarrier = new CyclicBarrier(threadCount + 1); + final var blockingTask = new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + fail(e); + } + + @Override + protected void doRun() { + safeAwait(startBarrier); + safeAwait(finishLatch); + } + + @Override + public boolean isForceExecution() { + return true; + } + }; + for (int i = 0; i < threadCount; i++) { + threadPool.executor(ThreadPool.Names.WRITE).execute(blockingTask); + } + safeAwait(startBarrier); + } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java index 544f0a08eaa6c..0aa28b9f9dbe8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.sampler.random.InternalRandomSampler; import org.elasticsearch.search.aggregations.bucket.sampler.random.RandomSamplerAggregationBuilder; @@ -20,11 +21,13 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.IntStream; import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.lessThan; @@ -112,27 +115,28 @@ public void testRandomSamplerConsistentSeed() { } ); - for (int i = 0; i < NUM_SAMPLE_RUNS; i++) { - assertResponse( - prepareSearch("idx").setPreference("shard:0") - .addAggregation( - new RandomSamplerAggregationBuilder("sampler").setProbability(PROBABILITY) - .setSeed(0) - .subAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE)) - .subAggregation(avg("mean_numeric").field(NUMERIC_VALUE)) - .setShardSeed(42) - ), - response -> { - InternalRandomSampler sampler = response.getAggregations().get("sampler"); - double monotonicValue = ((Avg) sampler.getAggregations().get("mean_monotonic")).getValue(); - double numericValue = ((Avg) sampler.getAggregations().get("mean_numeric")).getValue(); - long docCount = sampler.getDocCount(); - assertEquals(monotonicValue, sampleMonotonicValue[0], tolerance); - assertEquals(numericValue, sampleNumericValue[0], tolerance); - assertEquals(docCount, sampledDocCount[0]); - } - ); - } + assertResponses(response -> { + InternalRandomSampler sampler = response.getAggregations().get("sampler"); + double monotonicValue = ((Avg) sampler.getAggregations().get("mean_monotonic")).getValue(); + double numericValue = ((Avg) sampler.getAggregations().get("mean_numeric")).getValue(); + long docCount = sampler.getDocCount(); + assertEquals(monotonicValue, sampleMonotonicValue[0], tolerance); + assertEquals(numericValue, sampleNumericValue[0], tolerance); + assertEquals(docCount, sampledDocCount[0]); + }, + IntStream.rangeClosed(0, NUM_SAMPLE_RUNS - 1) + .mapToObj( + num -> prepareSearch("idx").setPreference("shard:0") + .addAggregation( + new RandomSamplerAggregationBuilder("sampler").setProbability(PROBABILITY) + .setSeed(0) + .subAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE)) + .subAggregation(avg("mean_numeric").field(NUMERIC_VALUE)) + .setShardSeed(42) + ) + ) + .toArray(SearchRequestBuilder[]::new) + ); } public void testRandomSampler() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java index d1841ebaf8071..87665c3d784f1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -54,6 +54,7 @@ import java.util.Set; import java.util.TreeMap; import java.util.concurrent.ExecutionException; +import java.util.function.Consumer; import java.util.function.Function; import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; @@ -66,6 +67,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -323,6 +325,12 @@ public void testRandomSorting() throws IOException, InterruptedException, Execut } public void test3078() { + Consumer assertConsumer = response -> { + assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); + assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); + assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); + }; + assertAcked(indicesAdmin().prepareCreate("test").setMapping("field", "type=keyword").get()); ensureGreen(); @@ -332,11 +340,7 @@ public void test3078() { refresh(); assertResponse( prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), - response -> { - assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); - } + assertConsumer ); // reindex and refresh prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); @@ -344,22 +348,14 @@ public void test3078() { assertResponse( prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), - response -> { - assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); - } + assertConsumer ); // reindex - no refresh prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); assertResponse( prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), - response -> { - assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); - } + assertConsumer ); // force merge forceMerge(); @@ -368,20 +364,12 @@ public void test3078() { prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); assertResponse( prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), - response -> { - assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); - } + assertConsumer ); refresh(); assertResponse( prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), - response -> { - assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); - } + assertConsumer ); } @@ -395,39 +383,19 @@ public void testScoreSortDirection() throws Exception { refresh(); - assertResponse( + assertResponses(response -> { + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getScore(), Matchers.lessThan(response.getHits().getAt(0).getScore())); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + assertThat(response.getHits().getAt(2).getScore(), Matchers.lessThan(response.getHits().getAt(1).getScore())); + assertThat(response.getHits().getAt(2).getId(), equalTo("3")); + }, prepareSearch("test").setQuery( QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field")) ), - response -> { - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getScore(), Matchers.lessThan(response.getHits().getAt(0).getScore())); - assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(2).getScore(), Matchers.lessThan(response.getHits().getAt(1).getScore())); - assertThat(response.getHits().getAt(2).getId(), equalTo("3")); - } - ); - assertResponse( - prepareSearch("test").setQuery( - QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field")) - ).addSort("_score", SortOrder.DESC), - response -> { - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getScore(), Matchers.lessThan(response.getHits().getAt(0).getScore())); - assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(2).getScore(), Matchers.lessThan(response.getHits().getAt(1).getScore())); - assertThat(response.getHits().getAt(2).getId(), equalTo("3")); - } - ); - assertResponse( prepareSearch("test").setQuery( QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field")) - ).addSort("_score", SortOrder.DESC), - response -> { - assertThat(response.getHits().getAt(2).getId(), equalTo("3")); - assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - } + ).addSort("_score", SortOrder.DESC) ); } @@ -878,30 +846,20 @@ public void testSortMissingStrings() throws IOException { throw new RuntimeException(); } - logger.info("--> sort with no missing (same as missing _last)"); - assertResponse( - prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC)), - response -> { - assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); + assertResponses(response -> { + assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getId(), equalTo("3")); - assertThat(response.getHits().getAt(2).getId(), equalTo("2")); - } + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("3")); + assertThat(response.getHits().getAt(2).getId(), equalTo("2")); + }, + // "--> sort with no missing (same as missing _last)" + prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC)), + // "--> sort with missing _last" + prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC).missing("_last")) ); - logger.info("--> sort with missing _last"); - assertResponse( - prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC).missing("_last")), - response -> { - assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getId(), equalTo("3")); - assertThat(response.getHits().getAt(2).getId(), equalTo("2")); - } - ); logger.info("--> sort with missing _first"); assertResponse( prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC).missing("_first")), @@ -1263,59 +1221,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).longValue(), equalTo(2L)); } ); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat(response.getHits().getHits().length, equalTo(3)); - - assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(-4)); - - assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(1)); - - assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); - }); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat(response.getHits().getHits().length, equalTo(3)); - - assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(20)); - - assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(10)); - - assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); - }); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat(response.getHits().getHits().length, equalTo(3)); - - assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(-4)); - - assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(1)); - - assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); - }); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat(response.getHits().getHits().length, equalTo(3)); - - assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(20)); - - assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(10)); - - assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); - }); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.ASC), response -> { + assertResponses(response -> { assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); @@ -1327,8 +1233,12 @@ public void testSortMVField() throws Exception { assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); - }); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.DESC), response -> { + }, + prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.ASC), + prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.ASC), + prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.ASC) + ); + assertResponses(response -> { assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); @@ -1340,7 +1250,11 @@ public void testSortMVField() throws Exception { assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); - }); + }, + prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.DESC), + prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.DESC), + prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.DESC) + ); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.ASC), response -> { assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); @@ -1478,8 +1392,7 @@ public void testSortOnRareField() throws IOException { } refresh(); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC), response -> { - + Consumer assertResponse = response -> { assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1490,27 +1403,17 @@ public void testSortOnRareField() throws IOException { assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); assertThat(response.getHits().getAt(2).getSortValues()[0], equalTo("03")); - }); + }; + + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC), assertResponse); + for (int i = 0; i < 15; i++) { prepareIndex("test").setId(Integer.toString(300 + i)) .setSource(jsonBuilder().startObject().array("some_other_field", "foobar").endObject()) .get(); refresh(); } - - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC), response -> { - - assertThat(response.getHits().getHits().length, equalTo(3)); - - assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo("20")); - - assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(response.getHits().getAt(1).getSortValues()[0], equalTo("10")); - - assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(response.getHits().getAt(2).getSortValues()[0], equalTo("03")); - }); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC), assertResponse); } public void testSortMetaField() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java index 1383f33a41d84..aabca1b9333f8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java @@ -34,6 +34,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSortValues; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.closeTo; @@ -292,49 +293,22 @@ public void testSinglePointGeoDistanceSort() throws ExecutionException, Interrup String hashPoint = "s037ms06g7h0"; - GeoDistanceSortBuilder geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, hashPoint); - - assertResponse( - prepareSearch().setQuery(matchAllQuery()).addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)), - response -> checkCorrectSortOrderForGeoSort(response) - ); - - geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, new GeoPoint(2, 2)); - - assertResponse( - prepareSearch().setQuery(matchAllQuery()).addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)), - response -> checkCorrectSortOrderForGeoSort(response) - ); - - geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, 2, 2); - - assertResponse( - prepareSearch().setQuery(matchAllQuery()).addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)), - response -> checkCorrectSortOrderForGeoSort(response) - ); - - assertResponse( + assertResponses( + response -> checkCorrectSortOrderForGeoSort(response), + prepareSearch().setQuery(matchAllQuery()) + .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, hashPoint).sortMode(SortMode.MIN).order(SortOrder.ASC)), + prepareSearch().setQuery(matchAllQuery()) + .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, new GeoPoint(2, 2)).sortMode(SortMode.MIN).order(SortOrder.ASC)), + prepareSearch().setQuery(matchAllQuery()) + .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, 2, 2).sortMode(SortMode.MIN).order(SortOrder.ASC)), prepareSearch().setSource(new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0))), - response -> checkCorrectSortOrderForGeoSort(response) - ); - - assertResponse( prepareSearch().setSource(new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, "s037ms06g7h0"))), - response -> checkCorrectSortOrderForGeoSort(response) - ); - - assertResponse( prepareSearch().setSource(new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0))), - response -> checkCorrectSortOrderForGeoSort(response) - ); - - assertResponse( prepareSearch().setSource( new SearchSourceBuilder().sort( SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0).validation(GeoValidationMethod.COERCE) ) - ), - response -> checkCorrectSortOrderForGeoSort(response) + ) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java index ec9c680e17fc3..9d53eb03eb04e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java @@ -22,12 +22,14 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class MetadataFetchingIT extends ESIntegTestCase { + public void testSimple() { assertAcked(prepareCreate("test")); ensureGreen(); @@ -35,17 +37,14 @@ public void testSimple() { prepareIndex("test").setId("1").setSource("field", "value").get(); refresh(); - assertResponse(prepareSearch("test").storedFields("_none_").setFetchSource(false).setVersion(true), response -> { + assertResponses(response -> { assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); assertThat(response.getHits().getAt(0).getVersion(), notNullValue()); - }); - - assertResponse(prepareSearch("test").storedFields("_none_"), response -> { - assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); - }); + }, + prepareSearch("test").storedFields("_none_").setFetchSource(false).setVersion(true), + prepareSearch("test").storedFields("_none_") + ); } public void testInnerHits() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java index 616fc2e1f3483..0e7f8b604a8df 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java @@ -12,11 +12,13 @@ import org.elasticsearch.test.ESIntegTestCase; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.IsEqual.equalTo; public class SourceFetchingIT extends ESIntegTestCase { + public void testSourceDefaultBehavior() { createIndex("test"); ensureGreen(); @@ -24,18 +26,16 @@ public void testSourceDefaultBehavior() { indexDoc("test", "1", "field", "value"); refresh(); - assertResponse(prepareSearch("test"), response -> assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue())); + assertResponses( + response -> assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()), + prepareSearch("test"), + prepareSearch("test").addStoredField("_source") + ); assertResponse( prepareSearch("test").addStoredField("bla"), response -> assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()) ); - - assertResponse( - prepareSearch("test").addStoredField("_source"), - response -> assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()) - ); - } public void testSourceFiltering() { @@ -55,20 +55,20 @@ public void testSourceFiltering() { response -> assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()) ); - assertResponse(prepareSearch("test").setFetchSource("field1", null), response -> { + assertResponses(response -> { assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value")); - }); + }, + prepareSearch("test").setFetchSource("field1", null), + prepareSearch("test").setFetchSource(new String[] { "*" }, new String[] { "field2" }) + ); + assertResponse(prepareSearch("test").setFetchSource("hello", null), response -> { assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(0)); }); - assertResponse(prepareSearch("test").setFetchSource(new String[] { "*" }, new String[] { "field2" }), response -> { - assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value")); - }); + } /** @@ -82,15 +82,13 @@ public void testSourceWithWildcardFiltering() { prepareIndex("test").setId("1").setSource("field", "value").get(); refresh(); - assertResponse(prepareSearch("test").setFetchSource(new String[] { "*.notexisting", "field" }, null), response -> { - assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field"), equalTo("value")); - }); - assertResponse(prepareSearch("test").setFetchSource(new String[] { "field.notexisting.*", "field" }, null), response -> { + assertResponses(response -> { assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field"), equalTo("value")); - }); + }, + prepareSearch("test").setFetchSource(new String[] { "*.notexisting", "field" }, null), + prepareSearch("test").setFetchSource(new String[] { "field.notexisting.*", "field" }, null) + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java index 980ef2a87c9c2..e5e641bfdda21 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java @@ -109,6 +109,7 @@ public void testRestartNodeDuringSnapshot() throws Exception { final var clusterService = internalCluster().getCurrentMasterNodeInstance(ClusterService.class); final var snapshotFuture = startFullSnapshotBlockedOnDataNode(randomIdentifier(), repoName, originalNode); + safeAwait((ActionListener l) -> flushMasterQueue(clusterService, l)); final var snapshotCompletesWithoutPausingListener = ClusterServiceUtils.addTemporaryStateListener(clusterService, state -> { final var entriesForRepo = SnapshotsInProgress.get(state).forRepo(repoName); if (entriesForRepo.isEmpty()) { diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 29c869a9f8d77..35d1a44624b0f 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -423,10 +423,8 @@ org.elasticsearch.action.bulk.BulkFeatures, org.elasticsearch.features.FeatureInfrastructureFeatures, org.elasticsearch.health.HealthFeatures, - org.elasticsearch.cluster.service.TransportFeatures, org.elasticsearch.cluster.metadata.MetadataFeatures, org.elasticsearch.rest.RestFeatures, - org.elasticsearch.indices.IndicesFeatures, org.elasticsearch.repositories.RepositoriesFeatures, org.elasticsearch.action.admin.cluster.allocation.AllocationStatsFeatures, org.elasticsearch.rest.action.admin.cluster.ClusterRerouteFeatures, diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 32198ba7584be..3c5c365654206 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -25,7 +25,9 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Tuple; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.health.node.action.HealthNodeNotDiscoveredException; import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.DocumentParsingException; @@ -611,23 +613,31 @@ protected static void generateThrowableXContent(XContentBuilder builder, Params */ public static XContentBuilder generateFailureXContent(XContentBuilder builder, Params params, @Nullable Exception e, boolean detailed) throws IOException { - // No exception to render as an error + if (builder.getRestApiVersion() == RestApiVersion.V_8) { + if (e == null) { + return builder.field(ERROR, "unknown"); + } + if (detailed == false) { + return generateNonDetailedFailureXContentV8(builder, e); + } + // else fallthrough + } + if (e == null) { - return builder.field(ERROR, "unknown"); + // No exception to render as an error + builder.startObject(ERROR); + builder.field(TYPE, "unknown"); + builder.field(REASON, "unknown"); + return builder.endObject(); } - // Render the exception with a simple message if (detailed == false) { - String message = "No ElasticsearchException found"; - Throwable t = e; - for (int counter = 0; counter < 10 && t != null; counter++) { - if (t instanceof ElasticsearchException) { - message = t.getClass().getSimpleName() + "[" + t.getMessage() + "]"; - break; - } - t = t.getCause(); - } - return builder.field(ERROR, message); + // just render the type & message + Throwable t = ExceptionsHelper.unwrapCause(e); + builder.startObject(ERROR); + builder.field(TYPE, getExceptionName(t)); + builder.field(REASON, t.getMessage()); + return builder.endObject(); } // Render the exception with all details @@ -646,6 +656,20 @@ public static XContentBuilder generateFailureXContent(XContentBuilder builder, P return builder.endObject(); } + @UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // remove V8 API + private static XContentBuilder generateNonDetailedFailureXContentV8(XContentBuilder builder, @Nullable Exception e) throws IOException { + String message = "No ElasticsearchException found"; + Throwable t = e; + for (int counter = 0; counter < 10 && t != null; counter++) { + if (t instanceof ElasticsearchException) { + message = t.getClass().getSimpleName() + "[" + t.getMessage() + "]"; + break; + } + t = t.getCause(); + } + return builder.field(ERROR, message); + } + /** * Parses the output of {@link #generateFailureXContent(XContentBuilder, Params, Exception, boolean)} */ @@ -729,8 +753,8 @@ public static String getExceptionName(Throwable ex) { static String buildMessage(String type, String reason, String stack) { StringBuilder message = new StringBuilder("Elasticsearch exception ["); - message.append(TYPE).append('=').append(type).append(", "); - message.append(REASON).append('=').append(reason); + message.append(TYPE).append('=').append(type); + message.append(", ").append(REASON).append('=').append(reason); if (stack != null) { message.append(", ").append(STACK_TRACE).append('=').append(stack); } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 5b5d12d738194..688d2aaf905a6 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -200,6 +200,12 @@ static TransportVersion def(int id) { public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE = def(8_791_00_0); public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES = def(8_792_00_0); public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS = def(8_793_00_0); + public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS_REVERT = def(8_794_00_0); + public static final TransportVersion FAST_REFRESH_RCO_2 = def(8_795_00_0); + public static final TransportVersion ESQL_ENRICH_RUNTIME_WARNINGS = def(8_796_00_0); + public static final TransportVersion INGEST_PIPELINE_CONFIGURATION_AS_MAP = def(8_797_00_0); + public static final TransportVersion INDEXING_PRESSURE_THROTTLING_STATS = def(8_798_00_0); + public static final TransportVersion REINDEX_DATA_STREAMS = def(8_799_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 7791ca200a785..7b65547a7d591 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -189,7 +189,9 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_15_4 = new Version(8_15_04_99); public static final Version V_8_16_0 = new Version(8_16_00_99); public static final Version V_8_16_1 = new Version(8_16_01_99); + public static final Version V_8_16_2 = new Version(8_16_02_99); public static final Version V_8_17_0 = new Version(8_17_00_99); + public static final Version V_8_18_0 = new Version(8_18_00_99); public static final Version V_9_0_0 = new Version(9_00_00_99); public static final Version CURRENT = V_9_0_0; diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 9f727f49530a1..98d6284fd91d2 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -853,7 +853,7 @@ public void initRestHandlers(Supplier nodesInCluster, Predicate< registerHandler.accept(new RestClusterStateAction(settingsFilter, threadPool)); registerHandler.accept(new RestClusterHealthAction()); registerHandler.accept(new RestClusterUpdateSettingsAction()); - registerHandler.accept(new RestClusterGetSettingsAction(settings, clusterSettings, settingsFilter, clusterSupportsFeature)); + registerHandler.accept(new RestClusterGetSettingsAction(settings, clusterSettings, settingsFilter)); registerHandler.accept(new RestClusterRerouteAction(settingsFilter)); registerHandler.accept(new RestClusterSearchShardsAction()); registerHandler.accept(new RestPendingClusterTasksAction()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java index bed2815f5a895..a0948af88e2f5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java @@ -20,7 +20,6 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.desirednodes.VersionConflictException; -import org.elasticsearch.cluster.metadata.DesiredNode; import org.elasticsearch.cluster.metadata.DesiredNodes; import org.elasticsearch.cluster.metadata.DesiredNodesMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -99,22 +98,6 @@ protected void masterOperation( ); } - @Override - protected void doExecute(Task task, UpdateDesiredNodesRequest request, ActionListener listener) { - if (request.clusterHasRequiredFeatures(nf -> featureService.clusterHasFeature(clusterService.state(), nf)) == false) { - listener.onFailure( - new IllegalArgumentException( - "Unable to use processor ranges, floating-point (with greater precision) processors " - + "in mixed-clusters with nodes that do not support feature " - + DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED.id() - ) - ); - return; - } - - super.doExecute(task, request, listener); - } - static ClusterState replaceDesiredNodes(ClusterState clusterState, DesiredNodes newDesiredNodes) { return clusterState.copyAndUpdateMetadata( metadata -> metadata.putCustom(DesiredNodesMetadata.TYPE, new DesiredNodesMetadata(newDesiredNodes)) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java index a94401fdd66f3..21b714b105b59 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -26,7 +25,6 @@ import java.io.IOException; import java.util.List; import java.util.Objects; -import java.util.function.Predicate; public class UpdateDesiredNodesRequest extends AcknowledgedRequest { private static final TransportVersion DRY_RUN_VERSION = TransportVersions.V_8_4_0; @@ -117,11 +115,6 @@ public boolean isDryRun() { return dryRun; } - public boolean clusterHasRequiredFeatures(Predicate clusterHasFeature) { - return clusterHasFeature.test(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED) - || nodes.stream().allMatch(n -> n.clusterHasRequiredFeatures(clusterHasFeature)); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java index e2475bca31d53..afe615add28df 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java @@ -19,7 +19,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.SystemIndices; @@ -56,15 +55,13 @@ public class TransportGetFeatureUpgradeStatusAction extends TransportMasterNodeA /** * Once all feature migrations for 8.x -> 9.x have been tested, we can bump this to Version.V_8_0_0 */ - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final Version NO_UPGRADE_REQUIRED_VERSION = Version.V_7_0_0; - public static final IndexVersion NO_UPGRADE_REQUIRED_INDEX_VERSION = IndexVersions.V_7_0_0; + public static final Version NO_UPGRADE_REQUIRED_VERSION = Version.V_8_0_0; + public static final IndexVersion NO_UPGRADE_REQUIRED_INDEX_VERSION = IndexVersions.V_8_0_0; private final SystemIndices systemIndices; PersistentTasksService persistentTasksService; @Inject - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // Once we begin working on 9.x, we need to update our migration classes public TransportGetFeatureUpgradeStatusAction( TransportService transportService, ThreadPool threadPool, @@ -149,7 +146,6 @@ static GetFeatureUpgradeStatusResponse.FeatureUpgradeStatus getFeatureUpgradeSta .map(idxInfo -> ERROR) .map(idxStatus -> GetFeatureUpgradeStatusResponse.UpgradeStatus.combine(idxStatus, initialStatus)) .orElse(initialStatus); - return new GetFeatureUpgradeStatusResponse.FeatureUpgradeStatus(featureName, minimumVersion, status, indexInfos); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/features/TransportNodesFeaturesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/features/TransportNodesFeaturesAction.java index 83d1356e5ef62..d20eee96809e8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/features/TransportNodesFeaturesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/features/TransportNodesFeaturesAction.java @@ -16,7 +16,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.features.FeatureService; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -27,8 +27,7 @@ import java.io.IOException; import java.util.List; -@UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) -// @UpdateForV10 // this can be removed in v10. It may be called by v8 nodes to v9 nodes. +@UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // this can be removed in v10. It may be called by v8 nodes to v9 nodes. public class TransportNodesFeaturesAction extends TransportNodesAction< NodesFeaturesRequest, NodesFeaturesResponse, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java index c1f867c247345..5c4be62723e07 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java @@ -793,6 +793,8 @@ static class IndexPressureStats implements ToXContentFragment { long currentCoordinatingOps = 0; long currentPrimaryOps = 0; long currentReplicaOps = 0; + long lowWaterMarkSplits = 0; + long highWaterMarkSplits = 0; for (NodeStats nodeStat : nodeStats) { IndexingPressureStats nodeStatIndexingPressureStats = nodeStat.getIndexingPressureStats(); if (nodeStatIndexingPressureStats != null) { @@ -816,6 +818,8 @@ static class IndexPressureStats implements ToXContentFragment { currentReplicaOps += nodeStatIndexingPressureStats.getCurrentReplicaOps(); primaryDocumentRejections += nodeStatIndexingPressureStats.getPrimaryDocumentRejections(); totalCoordinatingRequests += nodeStatIndexingPressureStats.getTotalCoordinatingRequests(); + lowWaterMarkSplits += nodeStatIndexingPressureStats.getLowWaterMarkSplits(); + highWaterMarkSplits += nodeStatIndexingPressureStats.getHighWaterMarkSplits(); } } indexingPressureStats = new IndexingPressureStats( @@ -838,7 +842,9 @@ static class IndexPressureStats implements ToXContentFragment { currentPrimaryOps, currentReplicaOps, primaryDocumentRejections, - totalCoordinatingRequests + totalCoordinatingRequests, + lowWaterMarkSplits, + highWaterMarkSplits ); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java index d8db2c5e657b4..ce9b48666d6ed 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java @@ -48,9 +48,10 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, return new CancellableTask(id, type, action, "", parentTaskId, headers); } - public ClusterStatsRequest asRemoteStats() { - this.remoteStats = true; - return this; + public static ClusterStatsRequest newRemoteClusterStatsRequest() { + final var request = new ClusterStatsRequest(); + request.remoteStats = true; + return request; } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/LongMetric.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/LongMetric.java index 737e83d4b30a1..07d9c11ae4c07 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/LongMetric.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/LongMetric.java @@ -74,6 +74,7 @@ public static LongMetricValue fromStream(StreamInput in) throws IOException { try { // TODO: not sure what is the good value for minBarForHighestToLowestValueRatio here? Histogram dh = Histogram.decodeFromCompressedByteBuffer(bb, 1); + dh.setAutoResize(true); return new LongMetricValue(dh); } catch (DataFormatException e) { throw new IOException(e); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index 36b018b5002eb..97585ea9a1024 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.nodes.TransportNodesAction; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterSnapshotStats; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; @@ -108,20 +109,19 @@ public class TransportClusterStatsAction extends TransportNodesAction< private final MetadataStatsCache mappingStatsCache; private final MetadataStatsCache analysisStatsCache; private final RemoteClusterService remoteClusterService; - private final TransportRemoteClusterStatsAction remoteClusterStatsAction; @Inject public TransportClusterStatsAction( ThreadPool threadPool, ClusterService clusterService, TransportService transportService, + Client client, NodeService nodeService, IndicesService indicesService, RepositoriesService repositoriesService, UsageService usageService, ActionFilters actionFilters, - Settings settings, - TransportRemoteClusterStatsAction remoteClusterStatsAction + Settings settings ) { super( TYPE.name(), @@ -141,7 +141,9 @@ public TransportClusterStatsAction( this.analysisStatsCache = new MetadataStatsCache<>(threadPool.getThreadContext(), AnalysisStats::of); this.remoteClusterService = transportService.getRemoteClusterService(); this.settings = settings; - this.remoteClusterStatsAction = remoteClusterStatsAction; + + // register remote-cluster action with transport service only and not as a local-node Action that the Client can invoke + new TransportRemoteClusterStatsAction(client, transportService, actionFilters); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportRemoteClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportRemoteClusterStatsAction.java index 4d57f10807af6..882aaa8b18e15 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportRemoteClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportRemoteClusterStatsAction.java @@ -10,11 +10,11 @@ package org.elasticsearch.action.admin.cluster.stats; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -27,26 +27,26 @@ public class TransportRemoteClusterStatsAction extends HandledTransportAction { public static final String NAME = "cluster:monitor/stats/remote"; - public static final ActionType TYPE = new ActionType<>(NAME); public static final RemoteClusterActionType REMOTE_TYPE = new RemoteClusterActionType<>( NAME, RemoteClusterStatsResponse::new ); - private final NodeClient client; + + private final Client client; + private final TransportService transportService; @Inject - public TransportRemoteClusterStatsAction(NodeClient client, TransportService transportService, ActionFilters actionFilters) { + public TransportRemoteClusterStatsAction(Client client, TransportService transportService, ActionFilters actionFilters) { super(NAME, transportService, actionFilters, RemoteClusterStatsRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.client = client; + this.transportService = transportService; } @Override protected void doExecute(Task task, RemoteClusterStatsRequest request, ActionListener listener) { - ClusterStatsRequest subRequest = new ClusterStatsRequest().asRemoteStats(); - subRequest.setParentTask(request.getParentTask()); - client.execute( + new ParentTaskAssigningClient(client, transportService.getLocalNode(), task).execute( TransportClusterStatsAction.TYPE, - subRequest, + ClusterStatsRequest.newRemoteClusterStatsRequest(), listener.map( clusterStatsResponse -> new RemoteClusterStatsResponse( clusterStatsResponse.getClusterUUID(), diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java index 7857e9a22e9b9..cb667400240f0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.injection.guice.Inject; @@ -120,27 +119,18 @@ public void onPrimaryOperationComplete( ActionListener listener ) { assert replicaRequest.primaryRefreshResult.refreshed() : "primary has not refreshed"; - boolean fastRefresh = IndexSettings.INDEX_FAST_REFRESH_SETTING.get( - clusterService.state().metadata().index(indexShardRoutingTable.shardId().getIndex()).getSettings() + UnpromotableShardRefreshRequest unpromotableReplicaRequest = new UnpromotableShardRefreshRequest( + indexShardRoutingTable, + replicaRequest.primaryRefreshResult.primaryTerm(), + replicaRequest.primaryRefreshResult.generation(), + false + ); + transportService.sendRequest( + transportService.getLocalNode(), + TransportUnpromotableShardRefreshAction.NAME, + unpromotableReplicaRequest, + new ActionListenerResponseHandler<>(listener.safeMap(r -> null), in -> ActionResponse.Empty.INSTANCE, refreshExecutor) ); - - // Indices marked with fast refresh do not rely on refreshing the unpromotables - if (fastRefresh) { - listener.onResponse(null); - } else { - UnpromotableShardRefreshRequest unpromotableReplicaRequest = new UnpromotableShardRefreshRequest( - indexShardRoutingTable, - replicaRequest.primaryRefreshResult.primaryTerm(), - replicaRequest.primaryRefreshResult.generation(), - false - ); - transportService.sendRequest( - transportService.getLocalNode(), - TransportUnpromotableShardRefreshAction.NAME, - unpromotableReplicaRequest, - new ActionListenerResponseHandler<>(listener.safeMap(r -> null), in -> ActionResponse.Empty.INSTANCE, refreshExecutor) - ); - } } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java index 6c24ec2d17604..4458c008babcd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java @@ -24,6 +24,9 @@ import java.util.List; +import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO_2; +import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; + public class TransportUnpromotableShardRefreshAction extends TransportBroadcastUnpromotableAction< UnpromotableShardRefreshRequest, ActionResponse.Empty> { @@ -73,6 +76,18 @@ protected void unpromotableShardOperation( return; } + // During an upgrade to FAST_REFRESH_RCO_2, we expect search shards to be first upgraded before the primary is upgraded. Thus, + // when the primary is upgraded, and starts to deliver unpromotable refreshes, we expect the search shards to be upgraded already. + // Note that the fast refresh setting is final. + // TODO: remove assertion (ES-9563) + assert INDEX_FAST_REFRESH_SETTING.get(shard.indexSettings().getSettings()) == false + || transportService.getLocalNodeConnection().getTransportVersion().onOrAfter(FAST_REFRESH_RCO_2) + : "attempted to refresh a fast refresh search shard " + + shard + + " on transport version " + + transportService.getLocalNodeConnection().getTransportVersion() + + " (before FAST_REFRESH_RCO_2)"; + ActionListener.run(responseListener, listener -> { shard.waitForPrimaryTermAndGeneration( request.getPrimaryTerm(), diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java index 7cefc086e17dc..5bdecd10075e6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java @@ -22,7 +22,8 @@ public class IndexStats implements Iterable { - public static final NodeFeature TIER_CREATION_DATE = new NodeFeature("stats.tier_creation_date"); + // feature was effectively reverted but we still need to keep this constant around + public static final NodeFeature REVERTED_TIER_CREATION_DATE = new NodeFeature("stats.tier_creation_date"); private final String index; @@ -32,10 +33,6 @@ public class IndexStats implements Iterable { private final IndexMetadata.State state; - private final List tierPreference; - - private final Long creationDate; - private final ShardStats shards[]; public IndexStats( @@ -43,16 +40,12 @@ public IndexStats( String uuid, @Nullable ClusterHealthStatus health, @Nullable IndexMetadata.State state, - @Nullable List tierPreference, - @Nullable Long creationDate, ShardStats[] shards ) { this.index = index; this.uuid = uuid; this.health = health; this.state = state; - this.tierPreference = tierPreference; - this.creationDate = creationDate; this.shards = shards; } @@ -72,14 +65,6 @@ public IndexMetadata.State getState() { return state; } - public List getTierPreference() { - return tierPreference; - } - - public Long getCreationDate() { - return creationDate; - } - public ShardStats[] getShards() { return this.shards; } @@ -148,24 +133,13 @@ public static class IndexStatsBuilder { private final String uuid; private final ClusterHealthStatus health; private final IndexMetadata.State state; - private final List tierPreference; - private final Long creationDate; private final List shards = new ArrayList<>(); - public IndexStatsBuilder( - String indexName, - String uuid, - @Nullable ClusterHealthStatus health, - @Nullable IndexMetadata.State state, - @Nullable List tierPreference, - @Nullable Long creationDate - ) { + public IndexStatsBuilder(String indexName, String uuid, @Nullable ClusterHealthStatus health, @Nullable IndexMetadata.State state) { this.indexName = indexName; this.uuid = uuid; this.health = health; this.state = state; - this.tierPreference = tierPreference; - this.creationDate = creationDate; } public IndexStatsBuilder add(ShardStats shardStats) { @@ -174,15 +148,7 @@ public IndexStatsBuilder add(ShardStats shardStats) { } public IndexStats build() { - return new IndexStats( - indexName, - uuid, - health, - state, - tierPreference, - creationDate, - shards.toArray(new ShardStats[shards.size()]) - ); + return new IndexStats(indexName, uuid, health, state, shards.toArray(new ShardStats[shards.size()])); } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsFeatures.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsFeatures.java index 2b67885273d05..558343db1023a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsFeatures.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsFeatures.java @@ -18,6 +18,6 @@ public class IndicesStatsFeatures implements FeatureSpecification { @Override public Set getFeatures() { - return Set.of(IndexStats.TIER_CREATION_DATE); + return Set.of(IndexStats.REVERTED_TIER_CREATION_DATE); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java index 205f1cbc04e8b..91e0e7cbc1dff 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java @@ -47,10 +47,6 @@ public class IndicesStatsResponse extends ChunkedBroadcastResponse { private final Map indexStateMap; - private final Map> indexTierPreferenceMap; - - private final Map indexCreationDateMap; - private final ShardStats[] shards; private Map shardStatsMap; @@ -58,23 +54,22 @@ public class IndicesStatsResponse extends ChunkedBroadcastResponse { IndicesStatsResponse(StreamInput in) throws IOException { super(in); shards = in.readArray(ShardStats::new, ShardStats[]::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS_REVERT)) { indexHealthMap = in.readMap(ClusterHealthStatus::readFrom); indexStateMap = in.readMap(IndexMetadata.State::readFrom); - indexTierPreferenceMap = in.readMap(StreamInput::readStringCollectionAsList); - indexCreationDateMap = in.readMap(StreamInput::readLong); + } else if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS)) { + indexHealthMap = in.readMap(ClusterHealthStatus::readFrom); + indexStateMap = in.readMap(IndexMetadata.State::readFrom); + in.readMap(StreamInput::readStringCollectionAsList); // unused, reverted + in.readMap(StreamInput::readLong); // unused, reverted } else if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) { // Between 8.1 and INDEX_STATS_ADDITIONAL_FIELDS, we had a different format for the response // where we only had health and state available. indexHealthMap = in.readMap(ClusterHealthStatus::readFrom); indexStateMap = in.readMap(IndexMetadata.State::readFrom); - indexTierPreferenceMap = Map.of(); - indexCreationDateMap = Map.of(); } else { indexHealthMap = Map.of(); indexStateMap = Map.of(); - indexTierPreferenceMap = Map.of(); - indexCreationDateMap = Map.of(); } } @@ -94,8 +89,6 @@ public class IndicesStatsResponse extends ChunkedBroadcastResponse { Objects.requireNonNull(shards); Map indexHealthModifiableMap = new HashMap<>(); Map indexStateModifiableMap = new HashMap<>(); - Map> indexTierPreferenceModifiableMap = new HashMap<>(); - Map indexCreationDateModifiableMap = new HashMap<>(); for (ShardStats shard : shards) { Index index = shard.getShardRouting().index(); IndexMetadata indexMetadata = metadata.index(index); @@ -105,14 +98,10 @@ public class IndicesStatsResponse extends ChunkedBroadcastResponse { ignored -> new ClusterIndexHealth(indexMetadata, routingTable.index(index)).getStatus() ); indexStateModifiableMap.computeIfAbsent(index.getName(), ignored -> indexMetadata.getState()); - indexTierPreferenceModifiableMap.computeIfAbsent(index.getName(), ignored -> indexMetadata.getTierPreference()); - indexCreationDateModifiableMap.computeIfAbsent(index.getName(), ignored -> indexMetadata.getCreationDate()); } } indexHealthMap = unmodifiableMap(indexHealthModifiableMap); indexStateMap = unmodifiableMap(indexStateModifiableMap); - indexTierPreferenceMap = unmodifiableMap(indexTierPreferenceModifiableMap); - indexCreationDateMap = unmodifiableMap(indexCreationDateModifiableMap); } public Map asMap() { @@ -150,14 +139,7 @@ public Map getIndices() { Index index = shard.getShardRouting().index(); IndexStatsBuilder indexStatsBuilder = indexToIndexStatsBuilder.computeIfAbsent( index.getName(), - k -> new IndexStatsBuilder( - k, - index.getUUID(), - indexHealthMap.get(index.getName()), - indexStateMap.get(index.getName()), - indexTierPreferenceMap.get(index.getName()), - indexCreationDateMap.get(index.getName()) - ) + k -> new IndexStatsBuilder(k, index.getUUID(), indexHealthMap.get(index.getName()), indexStateMap.get(index.getName())) ); indexStatsBuilder.add(shard); } @@ -202,12 +184,14 @@ public CommonStats getPrimaries() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeArray(shards); - if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS_REVERT)) { out.writeMap(indexHealthMap, StreamOutput::writeWriteable); out.writeMap(indexStateMap, StreamOutput::writeWriteable); - out.writeMap(indexTierPreferenceMap, StreamOutput::writeStringCollection); - out.writeMap(indexCreationDateMap, StreamOutput::writeLong); - + } else if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS)) { + out.writeMap(indexHealthMap, StreamOutput::writeWriteable); + out.writeMap(indexStateMap, StreamOutput::writeWriteable); + out.writeMap(Map.of(), StreamOutput::writeStringCollection); + out.writeMap(Map.of(), StreamOutput::writeLong); } else if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) { out.writeMap(indexHealthMap, StreamOutput::writeWriteable); out.writeMap(indexStateMap, StreamOutput::writeWriteable); @@ -237,12 +221,6 @@ protected Iterator customXContentChunks(ToXContent.Params params) { if (indexStats.getState() != null) { builder.field("status", indexStats.getState().toString().toLowerCase(Locale.ROOT)); } - if (indexStats.getTierPreference() != null) { - builder.field("tier_preference", indexStats.getTierPreference()); - } - if (indexStats.getCreationDate() != null) { - builder.field("creation_date", indexStats.getCreationDate()); - } builder.startObject("primaries"); indexStats.getPrimaries().toXContent(builder, p); builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/ReindexDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/ReindexDataStreamAction.java new file mode 100644 index 0000000000000..814c512c43bec --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/datastreams/ReindexDataStreamAction.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.action.datastreams; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public class ReindexDataStreamAction extends ActionType { + + public static final ReindexDataStreamAction INSTANCE = new ReindexDataStreamAction(); + public static final String NAME = "indices:admin/data_stream/reindex"; + + public ReindexDataStreamAction() { + super(NAME); + } + + public static class ReindexDataStreamResponse extends ActionResponse implements ToXContentObject { + private final String taskId; + + public ReindexDataStreamResponse(String taskId) { + super(); + this.taskId = taskId; + } + + public ReindexDataStreamResponse(StreamInput in) throws IOException { + super(in); + this.taskId = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(taskId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("task", getTaskId()); + builder.endObject(); + return builder; + } + + public String getTaskId() { + return taskId; + } + + @Override + public int hashCode() { + return Objects.hashCode(taskId); + } + + @Override + public boolean equals(Object other) { + return other instanceof ReindexDataStreamResponse && taskId.equals(((ReindexDataStreamResponse) other).taskId); + } + + } + + public static class ReindexDataStreamRequest extends ActionRequest { + private final String sourceDataStream; + + public ReindexDataStreamRequest(String sourceDataStream) { + super(); + this.sourceDataStream = sourceDataStream; + } + + public ReindexDataStreamRequest(StreamInput in) throws IOException { + super(in); + this.sourceDataStream = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(sourceDataStream); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public boolean getShouldStoreResult() { + return true; // do not wait_for_completion + } + + public String getSourceDataStream() { + return sourceDataStream; + } + + @Override + public int hashCode() { + return Objects.hashCode(sourceDataStream); + } + + @Override + public boolean equals(Object other) { + return other instanceof ReindexDataStreamRequest + && sourceDataStream.equals(((ReindexDataStreamRequest) other).sourceDataStream); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index 9e535344c9589..fb4b3907d2bfd 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -126,12 +126,10 @@ protected void asyncShardOperation(GetRequest request, ShardId shardId, ActionLi IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); IndexShard indexShard = indexService.getShard(shardId.id()); if (indexShard.routingEntry().isPromotableToPrimary() == false) { - // TODO: Re-evaluate assertion (ES-8227) - // assert indexShard.indexSettings().isFastRefresh() == false - // : "a search shard should not receive a TransportGetAction for an index with fast refresh"; handleGetOnUnpromotableShard(request, indexShard, listener); return; } + // TODO: adapt assertion to assert only that it is not stateless (ES-9563) assert DiscoveryNode.isStateless(clusterService.getSettings()) == false || indexShard.indexSettings().isFastRefresh() : "in Stateless a promotable to primary shard can receive a TransportGetAction only if an index has the fast refresh setting"; if (request.realtime()) { // we are not tied to a refresh cycle here anyway diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index 34b3ae50e0b51..93e1b18ec64c6 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.indices.refresh.TransportShardRefreshAction; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.replication.BasicReplicationRequest; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -124,12 +125,10 @@ protected void asyncShardOperation(MultiGetShardRequest request, ShardId shardId IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); IndexShard indexShard = indexService.getShard(shardId.id()); if (indexShard.routingEntry().isPromotableToPrimary() == false) { - // TODO: Re-evaluate assertion (ES-8227) - // assert indexShard.indexSettings().isFastRefresh() == false - // : "a search shard should not receive a TransportShardMultiGetAction for an index with fast refresh"; handleMultiGetOnUnpromotableShard(request, indexShard, listener); return; } + // TODO: adapt assertion to assert only that it is not stateless (ES-9563) assert DiscoveryNode.isStateless(clusterService.getSettings()) == false || indexShard.indexSettings().isFastRefresh() : "in Stateless a promotable to primary shard can receive a TransportShardMultiGetAction only if an index has " + "the fast refresh setting"; @@ -282,15 +281,15 @@ private void tryShardMultiGetFromTranslog( } else { assert r.segmentGeneration() > -1L; assert r.primaryTerm() > Engine.UNKNOWN_PRIMARY_TERM; - indexShard.waitForPrimaryTermAndGeneration( - r.primaryTerm(), - r.segmentGeneration(), + final ActionListener termAndGenerationListener = ContextPreservingActionListener.wrapPreservingContext( listener.delegateFailureAndWrap( (ll, aLong) -> getExecutor(request, shardId).execute( ActionRunnable.supply(ll, () -> handleLocalGets(request, r.multiGetShardResponse(), shardId)) ) - ) + ), + threadPool.getThreadContext() ); + indexShard.waitForPrimaryTermAndGeneration(r.primaryTerm(), r.segmentGeneration(), termAndGenerationListener); } } }), TransportShardMultiGetFomTranslogAction.Response::new, getExecutor(request, shardId)) diff --git a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java index 3ed1dfef50053..760b87af49a78 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java @@ -80,7 +80,7 @@ public RestStatus status() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); for (PipelineConfiguration pipeline : pipelines) { - builder.field(pipeline.getId(), summary ? Map.of() : pipeline.getConfigAsMap()); + builder.field(pipeline.getId(), summary ? Map.of() : pipeline.getConfig()); } builder.endObject(); return builder; diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index c051f0ca7a6f5..09fb70fb06ba4 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -299,7 +299,7 @@ protected void performPhaseOnShard(final int shardIndex, final SearchShardIterat } private void doPerformPhaseOnShard(int shardIndex, SearchShardIterator shardIt, SearchShardTarget shard, Releasable releasable) { - executePhaseOnShard(shardIt, shard, new SearchActionListener<>(shard, shardIndex) { + var shardListener = new SearchActionListener(shard, shardIndex) { @Override public void innerOnResponse(Result result) { try { @@ -315,7 +315,15 @@ public void onFailure(Exception e) { releasable.close(); onShardFailure(shardIndex, shard, shardIt, e); } - }); + }; + final Transport.Connection connection; + try { + connection = getConnection(shard.getClusterAlias(), shard.getNodeId()); + } catch (Exception e) { + shardListener.onFailure(e); + return; + } + executePhaseOnShard(shardIt, connection, shardListener); } private void failOnUnavailable(int shardIndex, SearchShardIterator shardIt) { @@ -327,12 +335,12 @@ private void failOnUnavailable(int shardIndex, SearchShardIterator shardIt) { /** * Sends the request to the actual shard. * @param shardIt the shards iterator - * @param shard the shard routing to send the request for + * @param connection to node that the shard is located on * @param listener the listener to notify on response */ protected abstract void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java index 69ca1569a7c07..25d59a06664da 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java @@ -84,16 +84,9 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction @Override protected void executePhaseOnShard( final SearchShardIterator shardIt, - final SearchShardTarget shard, + final Transport.Connection connection, final SearchActionListener listener ) { - final Transport.Connection connection; - try { - connection = getConnection(shard.getClusterAlias(), shard.getNodeId()); - } catch (Exception e) { - listener.onFailure(e); - return; - } getSearchTransport().sendExecuteDfs(connection, buildShardSearchRequest(shardIt, listener.requestIndex), getTask(), listener); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java index d91ea85e2fa97..986f7210c0d1b 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java @@ -79,7 +79,7 @@ protected static void doCheckNoMissingShards( /** * Releases shard targets that are not used in the docsIdsToLoad. */ - protected void releaseIrrelevantSearchContext(SearchPhaseResult searchPhaseResult, AbstractSearchAsyncAction context) { + protected static void releaseIrrelevantSearchContext(SearchPhaseResult searchPhaseResult, AbstractSearchAsyncAction context) { // we only release search context that we did not fetch from, if we are not scrolling // or using a PIT and if it has at least one hit that didn't make it to the global topDocs if (searchPhaseResult == null) { diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java index 84e0e2adea612..f75b84abc2f0f 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java @@ -91,16 +91,9 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction listener ) { - final Transport.Connection connection; - try { - connection = getConnection(shard.getClusterAlias(), shard.getNodeId()); - } catch (Exception e) { - listener.onFailure(e); - return; - } ShardSearchRequest request = rewriteShardSearchRequest(super.buildShardSearchRequest(shardIt, listener.requestIndex)); getSearchTransport().sendExecuteQuery(connection, request, getTask(), listener); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java index 7ba4a7ce59869..9e60eedbad6a2 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java @@ -35,7 +35,6 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; -import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ShardSearchContextId; @@ -252,16 +251,9 @@ protected String missingShardsErrorMessage(StringBuilder missingShards) { @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener phaseListener ) { - final Transport.Connection connection; - try { - connection = connectionLookup.apply(shardIt.getClusterAlias(), shard.getNodeId()); - } catch (Exception e) { - phaseListener.onFailure(e); - return; - } transportService.sendChildRequest( connection, OPEN_SHARD_READER_CONTEXT_NAME, diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java index 683c3589c893d..7414aeeb2c405 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.translog.Translog; @@ -53,9 +52,7 @@ public void refreshShard( case WAIT_UNTIL -> waitUntil(indexShard, location, new ActionListener<>() { @Override public void onResponse(Boolean forced) { - // Fast refresh indices do not depend on the unpromotables being refreshed - boolean fastRefresh = IndexSettings.INDEX_FAST_REFRESH_SETTING.get(indexShard.indexSettings().getSettings()); - if (location != null && (indexShard.routingEntry().isSearchable() == false && fastRefresh == false)) { + if (location != null && indexShard.routingEntry().isSearchable() == false) { refreshUnpromotables(indexShard, location, listener, forced, postWriteRefreshTimeout); } else { listener.onResponse(forced); @@ -68,9 +65,7 @@ public void onFailure(Exception e) { } }); case IMMEDIATE -> immediate(indexShard, listener.delegateFailureAndWrap((l, r) -> { - // Fast refresh indices do not depend on the unpromotables being refreshed - boolean fastRefresh = IndexSettings.INDEX_FAST_REFRESH_SETTING.get(indexShard.indexSettings().getSettings()); - if (indexShard.getReplicationGroup().getRoutingTable().unpromotableShards().size() > 0 && fastRefresh == false) { + if (indexShard.getReplicationGroup().getRoutingTable().unpromotableShards().size() > 0) { sendUnpromotableRequests(indexShard, r.generation(), true, l, postWriteRefreshTimeout); } else { l.onResponse(true); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 699198a8e22c2..56d185645e149 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -17,6 +17,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.env.Environment; import org.elasticsearch.node.NodeValidationException; +import org.elasticsearch.plugins.PluginsLoader; import java.io.PrintStream; @@ -42,6 +43,9 @@ class Bootstrap { // the loaded settings for the node, not valid until after phase 2 of initialization private final SetOnce nodeEnv = new SetOnce<>(); + // loads information about plugins required for entitlements in phase 2, used by plugins service in phase 3 + private final SetOnce pluginsLoader = new SetOnce<>(); + Bootstrap(PrintStream out, PrintStream err, ServerArgs args) { this.out = out; this.err = err; @@ -72,6 +76,14 @@ Environment environment() { return nodeEnv.get(); } + void setPluginsLoader(PluginsLoader pluginsLoader) { + this.pluginsLoader.set(pluginsLoader); + } + + PluginsLoader pluginsLoader() { + return pluginsLoader.get(); + } + void exitWithNodeValidationException(NodeValidationException e) { Logger logger = LogManager.getLogger(Elasticsearch.class); logger.error("node validation exception\n{}", e.getMessage()); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 2a83f749e7d33..95e5b00a2805f 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -30,6 +30,7 @@ import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.Tuple; import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexVersion; @@ -41,6 +42,9 @@ import org.elasticsearch.nativeaccess.NativeAccess; import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeValidationException; +import org.elasticsearch.plugins.PluginBundle; +import org.elasticsearch.plugins.PluginsLoader; +import org.elasticsearch.plugins.PluginsUtils; import java.io.IOException; import java.io.InputStream; @@ -50,8 +54,10 @@ import java.nio.file.Path; import java.security.Permission; import java.security.Security; +import java.util.ArrayList; import java.util.List; import java.util.Objects; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -199,9 +205,24 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { VectorUtil.class ); + // load the plugin Java modules and layers now for use in entitlements + var pluginsLoader = new PluginsLoader(nodeEnv.modulesFile(), nodeEnv.pluginsFile()); + bootstrap.setPluginsLoader(pluginsLoader); + if (Boolean.parseBoolean(System.getProperty("es.entitlements.enabled"))) { logger.info("Bootstrapping Entitlements"); - EntitlementBootstrap.bootstrap(); + + List> pluginData = new ArrayList<>(); + Set moduleBundles = PluginsUtils.getModuleBundles(nodeEnv.modulesFile()); + for (PluginBundle moduleBundle : moduleBundles) { + pluginData.add(Tuple.tuple(moduleBundle.getDir(), moduleBundle.pluginDescriptor().isModular())); + } + Set pluginBundles = PluginsUtils.getPluginBundles(nodeEnv.pluginsFile()); + for (PluginBundle pluginBundle : pluginBundles) { + pluginData.add(Tuple.tuple(pluginBundle.getDir(), pluginBundle.pluginDescriptor().isModular())); + } + // TODO: add a functor to map module to plugin name + EntitlementBootstrap.bootstrap(pluginData, callerClass -> null); } else { // install SM after natives, shutdown hooks, etc. logger.info("Bootstrapping java SecurityManager"); @@ -244,7 +265,7 @@ private static void ensureInitialized(Class... classes) { private static void initPhase3(Bootstrap bootstrap) throws IOException, NodeValidationException { checkLucene(); - Node node = new Node(bootstrap.environment()) { + Node node = new Node(bootstrap.environment(), bootstrap.pluginsLoader()) { @Override protected void validateNodeBeforeAcceptingRequests( final BootstrapContext context, diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java b/server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java index 57b90454c7e8b..ad285cbd391cd 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java @@ -95,7 +95,7 @@ public Set allNodeFeatures() { /** * {@code true} if {@code feature} is present on all nodes in the cluster. *

- * NOTE: This should not be used directly, as it does not read historical features. + * NOTE: This should not be used directly. * Please use {@link org.elasticsearch.features.FeatureService#clusterHasFeature} instead. */ @SuppressForbidden(reason = "directly reading cluster features") diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index 8a4464f194fc5..35b6eb1852237 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -246,6 +246,7 @@ public Coordinator( this.joinValidationService = new JoinValidationService( settings, transportService, + namedWriteableRegistry, this::getStateForJoinValidationService, () -> getLastAcceptedState().metadata(), this.onJoinValidators diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java index 7de7fd4d92d1b..9d5d74fa24648 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java @@ -21,6 +21,8 @@ import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -106,6 +108,7 @@ public class JoinValidationService { public JoinValidationService( Settings settings, TransportService transportService, + NamedWriteableRegistry namedWriteableRegistry, Supplier clusterStateSupplier, Supplier metadataSupplier, Collection> joinValidators @@ -120,9 +123,9 @@ public JoinValidationService( transportService.registerRequestHandler( JoinValidationService.JOIN_VALIDATE_ACTION_NAME, this.responseExecutor, - ValidateJoinRequest::new, + BytesTransportRequest::new, (request, channel, task) -> { - final var remoteState = request.getOrReadState(); + final var remoteState = readClusterState(namedWriteableRegistry, request); final var remoteMetadata = remoteState.metadata(); final var localMetadata = metadataSupplier.get(); if (localMetadata.clusterUUIDCommitted() && localMetadata.clusterUUID().equals(remoteMetadata.clusterUUID()) == false) { @@ -145,6 +148,20 @@ public JoinValidationService( ); } + private static ClusterState readClusterState(NamedWriteableRegistry namedWriteableRegistry, BytesTransportRequest request) + throws IOException { + try ( + var bytesStreamInput = request.bytes().streamInput(); + var in = new NamedWriteableAwareStreamInput( + CompressorFactory.COMPRESSOR.threadLocalStreamInput(bytesStreamInput), + namedWriteableRegistry + ) + ) { + in.setTransportVersion(request.version()); + return ClusterState.readFrom(in, null); + } + } + public void validateJoin(DiscoveryNode discoveryNode, ActionListener listener) { // This node isn't in the cluster yet so ClusterState#getMinTransportVersion() doesn't apply, we must obtain a specific connection // so we can check its transport version to decide how to proceed. diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java deleted file mode 100644 index c81e4877196b3..0000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ -package org.elasticsearch.cluster.coordination; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.common.CheckedSupplier; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressorFactory; -import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.RefCounted; -import org.elasticsearch.transport.TransportRequest; - -import java.io.IOException; - -public class ValidateJoinRequest extends TransportRequest { - private final CheckedSupplier stateSupplier; - private final RefCounted refCounted; - - public ValidateJoinRequest(StreamInput in) throws IOException { - super(in); - // recent versions send a BytesTransportRequest containing a compressed representation of the state - final var bytes = in.readReleasableBytesReference(); - final var version = in.getTransportVersion(); - final var namedWriteableRegistry = in.namedWriteableRegistry(); - this.stateSupplier = () -> readCompressed(version, bytes, namedWriteableRegistry); - this.refCounted = bytes; - } - - private static ClusterState readCompressed( - TransportVersion version, - BytesReference bytes, - NamedWriteableRegistry namedWriteableRegistry - ) throws IOException { - try ( - var bytesStreamInput = bytes.streamInput(); - var in = new NamedWriteableAwareStreamInput( - CompressorFactory.COMPRESSOR.threadLocalStreamInput(bytesStreamInput), - namedWriteableRegistry - ) - ) { - in.setTransportVersion(version); - return ClusterState.readFrom(in, null); - } - } - - public ValidateJoinRequest(ClusterState state) { - this.stateSupplier = () -> state; - this.refCounted = null; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - stateSupplier.get().writeTo(out); - } - - public ClusterState getOrReadState() throws IOException { - return stateSupplier.get(); - } - - @Override - public void incRef() { - if (refCounted != null) { - refCounted.incRef(); - } - } - - @Override - public boolean tryIncRef() { - return refCounted == null || refCounted.tryIncRef(); - } - - @Override - public boolean decRef() { - return refCounted != null && refCounted.decRef(); - } - - @Override - public boolean hasReferences() { - return refCounted == null || refCounted.hasReferences(); - } -} diff --git a/server/src/main/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListener.java b/server/src/main/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListener.java deleted file mode 100644 index 4d9074be15695..0000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListener.java +++ /dev/null @@ -1,218 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.features; - -import org.elasticsearch.Version; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.admin.cluster.node.features.NodeFeatures; -import org.elasticsearch.action.admin.cluster.node.features.NodesFeaturesRequest; -import org.elasticsearch.action.admin.cluster.node.features.NodesFeaturesResponse; -import org.elasticsearch.action.admin.cluster.node.features.TransportNodesFeaturesAction; -import org.elasticsearch.client.internal.ClusterAdminClient; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterFeatures; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.ClusterStateTaskExecutor; -import org.elasticsearch.cluster.ClusterStateTaskListener; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.cluster.service.MasterServiceTaskQueue; -import org.elasticsearch.common.Priority; -import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; -import org.elasticsearch.threadpool.Scheduler; -import org.elasticsearch.threadpool.ThreadPool; - -import java.util.Collections; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Executor; -import java.util.stream.Collectors; - -@UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // this can be removed in v9 -public class NodeFeaturesFixupListener implements ClusterStateListener { - - private static final Logger logger = LogManager.getLogger(NodeFeaturesFixupListener.class); - - private static final TimeValue RETRY_TIME = TimeValue.timeValueSeconds(30); - - private final MasterServiceTaskQueue taskQueue; - private final ClusterAdminClient client; - private final Scheduler scheduler; - private final Executor executor; - private final Set pendingNodes = Collections.synchronizedSet(new HashSet<>()); - - public NodeFeaturesFixupListener(ClusterService service, ClusterAdminClient client, ThreadPool threadPool) { - // there tends to be a lot of state operations on an upgrade - this one is not time-critical, - // so use LOW priority. It just needs to be run at some point after upgrade. - this( - service.createTaskQueue("fix-node-features", Priority.LOW, new NodesFeaturesUpdater()), - client, - threadPool, - threadPool.executor(ThreadPool.Names.CLUSTER_COORDINATION) - ); - } - - NodeFeaturesFixupListener( - MasterServiceTaskQueue taskQueue, - ClusterAdminClient client, - Scheduler scheduler, - Executor executor - ) { - this.taskQueue = taskQueue; - this.client = client; - this.scheduler = scheduler; - this.executor = executor; - } - - class NodesFeaturesTask implements ClusterStateTaskListener { - private final Map> results; - private final int retryNum; - - NodesFeaturesTask(Map> results, int retryNum) { - this.results = results; - this.retryNum = retryNum; - } - - @Override - public void onFailure(Exception e) { - logger.error("Could not apply features for nodes {} to cluster state", results.keySet(), e); - scheduleRetry(results.keySet(), retryNum); - } - - public Map> results() { - return results; - } - } - - static class NodesFeaturesUpdater implements ClusterStateTaskExecutor { - @Override - public ClusterState execute(BatchExecutionContext context) { - ClusterState.Builder builder = ClusterState.builder(context.initialState()); - var existingFeatures = builder.nodeFeatures(); - - boolean modified = false; - for (var c : context.taskContexts()) { - for (var e : c.getTask().results().entrySet()) { - // double check there are still no features for the node - if (existingFeatures.getOrDefault(e.getKey(), Set.of()).isEmpty()) { - builder.putNodeFeatures(e.getKey(), e.getValue()); - modified = true; - } - } - c.success(() -> {}); - } - return modified ? builder.build() : context.initialState(); - } - } - - @Override - public void clusterChanged(ClusterChangedEvent event) { - if (event.nodesDelta().masterNodeChanged() && event.localNodeMaster()) { - /* - * Execute this if we have just become master. - * Check if there are any nodes that should have features in cluster state, but don't. - * This can happen if the master was upgraded from before 8.13, and one or more non-master nodes - * were already upgraded. They don't re-join the cluster with the new master, so never get their features - * (which the master now understands) added to cluster state. - * So we need to do a separate transport call to get the node features and add them to cluster state. - * We can't use features to determine when this should happen, as the features are incorrect. - * We also can't use transport version, as that is unreliable for upgrades - * from versions before 8.8 (see TransportVersionFixupListener). - * So the only thing we can use is release version. - * This is ok here, as Serverless will never hit this case, so the node feature fetch action will never be called on Serverless. - * This whole class will be removed in ES v9. - */ - ClusterFeatures nodeFeatures = event.state().clusterFeatures(); - Set queryNodes = event.state() - .nodes() - .stream() - .filter(n -> n.getVersion().onOrAfter(Version.V_8_15_0)) - .map(DiscoveryNode::getId) - .filter(n -> getNodeFeatures(nodeFeatures, n).isEmpty()) - .collect(Collectors.toSet()); - - if (queryNodes.isEmpty() == false) { - logger.debug("Fetching actual node features for nodes {}", queryNodes); - queryNodesFeatures(queryNodes, 0); - } - } - } - - @SuppressForbidden(reason = "Need to access a specific node's features") - private static Set getNodeFeatures(ClusterFeatures features, String nodeId) { - return features.nodeFeatures().getOrDefault(nodeId, Set.of()); - } - - private void scheduleRetry(Set nodes, int thisRetryNum) { - // just keep retrying until this succeeds - logger.debug("Scheduling retry {} for nodes {}", thisRetryNum + 1, nodes); - scheduler.schedule(() -> queryNodesFeatures(nodes, thisRetryNum + 1), RETRY_TIME, executor); - } - - private void queryNodesFeatures(Set nodes, int retryNum) { - // some might already be in-progress - Set outstandingNodes = Sets.newHashSetWithExpectedSize(nodes.size()); - synchronized (pendingNodes) { - for (String n : nodes) { - if (pendingNodes.add(n)) { - outstandingNodes.add(n); - } - } - } - if (outstandingNodes.isEmpty()) { - // all nodes already have in-progress requests - return; - } - - NodesFeaturesRequest request = new NodesFeaturesRequest(outstandingNodes.toArray(String[]::new)); - client.execute(TransportNodesFeaturesAction.TYPE, request, new ActionListener<>() { - @Override - public void onResponse(NodesFeaturesResponse response) { - pendingNodes.removeAll(outstandingNodes); - handleResponse(response, retryNum); - } - - @Override - public void onFailure(Exception e) { - pendingNodes.removeAll(outstandingNodes); - logger.warn("Could not read features for nodes {}", outstandingNodes, e); - scheduleRetry(outstandingNodes, retryNum); - } - }); - } - - private void handleResponse(NodesFeaturesResponse response, int retryNum) { - if (response.hasFailures()) { - Set failedNodes = new HashSet<>(); - for (FailedNodeException fne : response.failures()) { - logger.warn("Failed to read features from node {}", fne.nodeId(), fne); - failedNodes.add(fne.nodeId()); - } - scheduleRetry(failedNodes, retryNum); - } - // carry on and read what we can - - Map> results = response.getNodes() - .stream() - .collect(Collectors.toUnmodifiableMap(n -> n.getNode().getId(), NodeFeatures::nodeFeatures)); - - if (results.isEmpty() == false) { - taskQueue.submitTask("fix-node-features", new NodesFeaturesTask(results, retryNum), null); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java index fb8559b19d81d..de3343c1944c1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java @@ -37,7 +37,6 @@ import java.util.Objects; import java.util.Set; import java.util.TreeSet; -import java.util.function.Predicate; import java.util.regex.Pattern; import static java.lang.String.format; @@ -48,7 +47,6 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparable { public static final NodeFeature RANGE_FLOAT_PROCESSORS_SUPPORTED = new NodeFeature("desired_node.range_float_processors"); - public static final NodeFeature DOUBLE_PROCESSORS_SUPPORTED = new NodeFeature("desired_node.double_processors"); public static final NodeFeature DESIRED_NODE_VERSION_DEPRECATED = new NodeFeature("desired_node.version_deprecated"); public static final TransportVersion RANGE_FLOAT_PROCESSORS_SUPPORT_TRANSPORT_VERSION = TransportVersions.V_8_3_0; @@ -348,10 +346,6 @@ public Set getRoles() { return roles; } - public boolean clusterHasRequiredFeatures(Predicate clusterHasFeature) { - return (processorsRange == null && processors.hasDecimals() == false) || clusterHasFeature.test(RANGE_FLOAT_PROCESSORS_SUPPORTED); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index bf80c38d64a4e..279243eeff7cf 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -48,17 +48,24 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.SortedMap; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.LongSupplier; import java.util.function.Predicate; -import java.util.stream.Collectors; -import java.util.stream.Stream; +/** + * This class main focus is to resolve multi-syntax target expressions to resources or concrete indices. This resolution is influenced + * by IndicesOptions and other flags passed through the method call. Examples of the functionality it provides: + * - Resolve expressions to concrete indices + * - Resolve expressions to data stream names + * - Resolve expressions to resources (meaning indices, data streams and aliases) + * Note: This class is performance sensitive, so we pay extra attention on the data structure usage and we avoid streams and iterators + * when possible in favor of the classic for-i loops. + */ public class IndexNameExpressionResolver { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(IndexNameExpressionResolver.class); @@ -190,7 +197,7 @@ public List dataStreamNames(ClusterState state, IndicesOptions options, getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressions(context, indexExpressions); + final Collection expressions = resolveExpressionsToResources(context, indexExpressions); return expressions.stream() .map(x -> state.metadata().getIndicesLookup().get(x)) .filter(Objects::nonNull) @@ -220,7 +227,7 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressions(context, request.index()); + final Collection expressions = resolveExpressionsToResources(context, request.index()); if (expressions.size() == 1) { IndexAbstraction ia = state.metadata().getIndicesLookup().get(expressions.iterator().next()); @@ -236,7 +243,7 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit ); } } - checkSystemIndexAccess(context, Set.of(ia.getWriteIndex())); + SystemResourceAccess.checkSystemIndexAccess(context, threadContext, ia.getWriteIndex()); return ia; } else { throw new IllegalArgumentException( @@ -245,30 +252,110 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit } } - protected static Collection resolveExpressions(Context context, String... expressions) { - if (context.getOptions().expandWildcardExpressions() == false) { + /** + * Resolve the expression to the set of indices, aliases, and, optionally, data streams that the expression matches. + * If {@param preserveDataStreams} is {@code true}, data streams that are covered by the wildcards from the + * {@param expressions} are returned as-is, without expanding them further to their respective backing indices. + */ + protected static Collection resolveExpressionsToResources(Context context, String... expressions) { + // If we do not expand wildcards, then empty or _all expression result in an empty list + boolean expandWildcards = context.getOptions().expandWildcardExpressions(); + if (expandWildcards == false) { if (expressions == null || expressions.length == 0 || expressions.length == 1 && Metadata.ALL.equals(expressions[0])) { return List.of(); - } else { - return ExplicitResourceNameFilter.filterUnavailable( - context, - DateMathExpressionResolver.resolve(context, Arrays.asList(expressions)) - ); } } else { if (expressions == null || expressions.length == 0 || expressions.length == 1 && (Metadata.ALL.equals(expressions[0]) || Regex.isMatchAllPattern(expressions[0]))) { return WildcardExpressionResolver.resolveAll(context); + } else if (isNoneExpression(expressions)) { + return List.of(); + } + } + + // Using ArrayList when we know we do not have wildcards is an optimisation, given that one expression result in 0 or 1 resources. + Collection resources = expandWildcards && WildcardExpressionResolver.hasWildcards(expressions) + ? new LinkedHashSet<>() + : new ArrayList<>(expressions.length); + boolean wildcardSeen = false; + for (int i = 0, n = expressions.length; i < n; i++) { + String originalExpression = expressions[i]; + + // Resolve exclusion, a `-` prefixed expression is an exclusion only if it succeeds a wildcard. + boolean isExclusion = wildcardSeen && originalExpression.startsWith("-"); + String baseExpression = isExclusion ? originalExpression.substring(1) : originalExpression; + + // Resolve date math + baseExpression = DateMathExpressionResolver.resolveExpression(baseExpression, context::getStartTime); + + // Validate base expression + validateResourceExpression(context, baseExpression, expressions); + + // Check if it's wildcard + boolean isWildcard = expandWildcards && WildcardExpressionResolver.isWildcard(originalExpression); + wildcardSeen |= isWildcard; + + if (isWildcard) { + Set matchingResources = WildcardExpressionResolver.matchWildcardToResources(context, baseExpression); + + if (context.getOptions().allowNoIndices() == false && matchingResources.isEmpty()) { + throw notFoundException(baseExpression); + } + + if (isExclusion) { + resources.removeAll(matchingResources); + } else { + resources.addAll(matchingResources); + } } else { - return WildcardExpressionResolver.resolve( - context, - ExplicitResourceNameFilter.filterUnavailable( - context, - DateMathExpressionResolver.resolve(context, Arrays.asList(expressions)) - ) - ); + if (isExclusion) { + resources.remove(baseExpression); + } else if (ensureAliasOrIndexExists(context, baseExpression)) { + resources.add(baseExpression); + } + } + } + return resources; + } + + /** + * Validates the requested expression by performing the following checks: + * - Ensure it's not empty + * - Ensure it doesn't start with `_` + * - Ensure it's not a remote expression unless the allow unavailable targets is enabled. + */ + private static void validateResourceExpression(Context context, String current, String[] expressions) { + if (Strings.isEmpty(current)) { + throw notFoundException(current); + } + // Expressions can not start with an underscore. This is reserved for APIs. If the check gets here, the API + // does not exist and the path is interpreted as an expression. If the expression begins with an underscore, + // throw a specific error that is different from the [[IndexNotFoundException]], which is typically thrown + // if the expression can't be found. + if (current.charAt(0) == '_') { + throw new InvalidIndexNameException(current, "must not start with '_'."); + } + ensureRemoteExpressionRequireIgnoreUnavailable(context.getOptions(), current, expressions); + } + + /** + * Throws an exception if the expression is a remote expression and we do not allow unavailable targets + */ + private static void ensureRemoteExpressionRequireIgnoreUnavailable(IndicesOptions options, String current, String[] expressions) { + if (options.ignoreUnavailable()) { + return; + } + if (RemoteClusterAware.isRemoteIndexName(current)) { + List crossClusterIndices = new ArrayList<>(); + for (int i = 0; i < expressions.length; i++) { + if (RemoteClusterAware.isRemoteIndexName(expressions[i])) { + crossClusterIndices.add(expressions[i]); + } } + throw new IllegalArgumentException( + "Cross-cluster calls are not supported in this context but remote indices were requested: " + crossClusterIndices + ); } } @@ -341,7 +428,7 @@ String[] concreteIndexNames(Context context, String... indexExpressions) { } Index[] concreteIndices(Context context, String... indexExpressions) { - final Collection expressions = resolveExpressions(context, indexExpressions); + final Collection expressions = resolveExpressionsToResources(context, indexExpressions); final Set concreteIndicesResult = Sets.newLinkedHashSetWithExpectedSize(expressions.size()); final Map indicesLookup = context.getState().metadata().getIndicesLookup(); @@ -395,7 +482,9 @@ Index[] concreteIndices(Context context, String... indexExpressions) { && context.getOptions().includeFailureIndices()) { // Collect the data streams involved Set aliasDataStreams = new HashSet<>(); - for (Index index : indexAbstraction.getIndices()) { + List indices = indexAbstraction.getIndices(); + for (int i = 0, n = indices.size(); i < n; i++) { + Index index = indices.get(i); aliasDataStreams.add(indicesLookup.get(index.getName()).getParentDataStream()); } for (DataStream dataStream : aliasDataStreams) { @@ -416,13 +505,16 @@ Index[] concreteIndices(Context context, String... indexExpressions) { if (context.getOptions().allowNoIndices() == false && concreteIndicesResult.isEmpty()) { throw notFoundException(indexExpressions); } - checkSystemIndexAccess(context, concreteIndicesResult); - return concreteIndicesResult.toArray(Index.EMPTY_ARRAY); + Index[] resultArray = concreteIndicesResult.toArray(Index.EMPTY_ARRAY); + SystemResourceAccess.checkSystemIndexAccess(context, threadContext, resultArray); + return resultArray; } private static void resolveIndicesForDataStream(Context context, DataStream dataStream, Set concreteIndicesResult) { if (shouldIncludeRegularIndices(context.getOptions())) { - for (Index index : dataStream.getIndices()) { + List indices = dataStream.getIndices(); + for (int i = 0, n = indices.size(); i < n; i++) { + Index index = indices.get(i); if (shouldTrackConcreteIndex(context, index)) { concreteIndicesResult.add(index); } @@ -431,7 +523,9 @@ private static void resolveIndicesForDataStream(Context context, DataStream data if (shouldIncludeFailureIndices(context.getOptions())) { // We short-circuit here, if failure indices are not allowed and they can be skipped if (context.getOptions().allowFailureIndices() || context.getOptions().ignoreUnavailable() == false) { - for (Index index : dataStream.getFailureIndices().getIndices()) { + List failureIndices = dataStream.getFailureIndices().getIndices(); + for (int i = 0, n = failureIndices.size(); i < n; i++) { + Index index = failureIndices.get(i); if (shouldTrackConcreteIndex(context, index)) { concreteIndicesResult.add(index); } @@ -482,64 +576,6 @@ private static boolean resolvesToMoreThanOneIndex(IndexAbstraction indexAbstract return indexAbstraction.getIndices().size() > 1; } - private void checkSystemIndexAccess(Context context, Set concreteIndices) { - final Predicate systemIndexAccessPredicate = context.getSystemIndexAccessPredicate(); - if (systemIndexAccessPredicate == Predicates.always()) { - return; - } - doCheckSystemIndexAccess(context, concreteIndices, systemIndexAccessPredicate); - } - - private void doCheckSystemIndexAccess(Context context, Set concreteIndices, Predicate systemIndexAccessPredicate) { - final Metadata metadata = context.getState().metadata(); - final List resolvedSystemIndices = new ArrayList<>(); - final List resolvedNetNewSystemIndices = new ArrayList<>(); - final Set resolvedSystemDataStreams = new HashSet<>(); - final SortedMap indicesLookup = metadata.getIndicesLookup(); - boolean matchedIndex = false; - for (Index concreteIndex : concreteIndices) { - IndexMetadata idxMetadata = metadata.index(concreteIndex); - String name = concreteIndex.getName(); - if (idxMetadata.isSystem() && systemIndexAccessPredicate.test(name) == false) { - matchedIndex = true; - IndexAbstraction indexAbstraction = indicesLookup.get(name); - if (indexAbstraction.getParentDataStream() != null) { - resolvedSystemDataStreams.add(indexAbstraction.getParentDataStream().getName()); - } else if (systemIndices.isNetNewSystemIndex(name)) { - resolvedNetNewSystemIndices.add(name); - } else { - resolvedSystemIndices.add(name); - } - } - } - if (matchedIndex) { - handleMatchedSystemIndices(resolvedSystemIndices, resolvedSystemDataStreams, resolvedNetNewSystemIndices); - } - } - - private void handleMatchedSystemIndices( - List resolvedSystemIndices, - Set resolvedSystemDataStreams, - List resolvedNetNewSystemIndices - ) { - if (resolvedSystemIndices.isEmpty() == false) { - Collections.sort(resolvedSystemIndices); - deprecationLogger.warn( - DeprecationCategory.API, - "open_system_index_access", - "this request accesses system indices: {}, but in a future major version, direct access to system " - + "indices will be prevented by default", - resolvedSystemIndices - ); - } - if (resolvedSystemDataStreams.isEmpty() == false) { - throw SystemIndices.dataStreamAccessException(threadContext, resolvedSystemDataStreams); - } - if (resolvedNetNewSystemIndices.isEmpty() == false) { - throw SystemIndices.netNewSystemIndexAccessException(threadContext, resolvedNetNewSystemIndices); - } - } - private static IndexNotFoundException notFoundException(String... indexExpressions) { final IndexNotFoundException infe; if (indexExpressions == null @@ -568,16 +604,16 @@ private static IndexNotFoundException notFoundException(String... indexExpressio } private static boolean shouldTrackConcreteIndex(Context context, Index index) { - if (context.systemIndexAccessLevel == SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY - && context.netNewSystemIndexPredicate.test(index.getName())) { + if (SystemResourceAccess.isNetNewInBackwardCompatibleMode(context, index)) { // Exclude this one as it's a net-new system index, and we explicitly don't want those. return false; } + IndicesOptions options = context.getOptions(); if (DataStream.isFailureStoreFeatureFlagEnabled() && context.options.allowFailureIndices() == false) { DataStream parentDataStream = context.getState().metadata().getIndicesLookup().get(index.getName()).getParentDataStream(); if (parentDataStream != null && parentDataStream.isFailureStoreEnabled()) { if (parentDataStream.isFailureStoreIndex(index.getName())) { - if (context.options.ignoreUnavailable()) { + if (options.ignoreUnavailable()) { return false; } else { throw new FailureIndexNotSupportedException(index); @@ -587,7 +623,6 @@ private static boolean shouldTrackConcreteIndex(Context context, Index index) { } final IndexMetadata imd = context.state.metadata().index(index); if (imd.getState() == IndexMetadata.State.CLOSE) { - IndicesOptions options = context.options; if (options.forbidClosedIndices() && options.ignoreUnavailable() == false) { throw new IndexClosedException(index); } else { @@ -721,21 +756,6 @@ public boolean hasIndexAbstraction(String indexAbstraction, ClusterState state) return state.metadata().hasIndexAbstraction(resolvedAliasOrIndex); } - /** - * @return If the specified string is data math expression then this method returns the resolved expression. - */ - public static String resolveDateMathExpression(String dateExpression) { - return DateMathExpressionResolver.resolveExpression(dateExpression); - } - - /** - * @param time instant to consider when parsing the expression - * @return If the specified string is data math expression then this method returns the resolved expression. - */ - public static String resolveDateMathExpression(String dateExpression, long time) { - return DateMathExpressionResolver.resolveExpression(dateExpression, () -> time); - } - /** * Resolve an array of expressions to the set of indices and aliases that these expressions match. */ @@ -765,7 +785,8 @@ public Set resolveExpressions( getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - Collection resolved = resolveExpressions(context, expressions); + // unmodifiable without creating a new collection as it might contain many items + Collection resolved = resolveExpressionsToResources(context, expressions); if (resolved instanceof Set) { // unmodifiable without creating a new collection as it might contain many items return Collections.unmodifiableSet((Set) resolved); @@ -779,7 +800,7 @@ public Set resolveExpressions( * given index. *

Only aliases with filters are returned. If the indices list contains a non-filtering reference to * the index itself - null is returned. Returns {@code null} if no filtering is required. - * NOTE: The provided expressions must have been resolved already via {@link #resolveExpressions}. + * NOTE: The provided expressions must have been resolved already via {@link #resolveExpressionsToResources(Context, String...)}. */ public String[] filteringAliases(ClusterState state, String index, Set resolvedExpressions) { return indexAliases(state, index, AliasMetadata::filteringRequired, DataStreamAlias::filteringRequired, false, resolvedExpressions); @@ -799,7 +820,8 @@ boolean iterateIndexAliases(int indexAliasesSize, int resolvedExpressionsSize) { * Iterates through the list of indices and selects the effective list of required aliases for the given index. *

Only aliases where the given predicate tests successfully are returned. If the indices list contains a non-required reference to * the index itself - null is returned. Returns {@code null} if no filtering is required. - *

NOTE: the provided expressions must have been resolved already via {@link #resolveExpressions}. + *

NOTE: the provided expressions must have been resolved already via + * {@link #resolveExpressionsToResources(Context, String...)}. */ public String[] indexAliases( ClusterState state, @@ -878,7 +900,8 @@ public String[] indexAliases( .toArray(AliasMetadata[]::new); } List aliases = null; - for (AliasMetadata aliasMetadata : aliasCandidates) { + for (int i = 0; i < aliasCandidates.length; i++) { + AliasMetadata aliasMetadata = aliasCandidates[i]; if (requiredAlias.test(aliasMetadata)) { // If required - add it to the list of aliases if (aliases == null) { @@ -914,7 +937,7 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection resolvedExpressions = resolveExpressions(context, expressions); + final Collection resolvedExpressions = resolveExpressionsToResources(context, expressions); // TODO: it appears that this can never be true? if (isAllIndices(resolvedExpressions)) { @@ -932,7 +955,8 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab for (String expression : resolvedExpressions) { IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(expression); if (indexAbstraction != null && indexAbstraction.getType() == Type.ALIAS) { - for (Index index : indexAbstraction.getIndices()) { + for (int i = 0, n = indexAbstraction.getIndices().size(); i < n; i++) { + Index index = indexAbstraction.getIndices().get(i); String concreteIndex = index.getName(); if (norouting.contains(concreteIndex) == false) { AliasMetadata aliasMetadata = state.metadata().index(concreteIndex).getAliases().get(indexAbstraction.getName()); @@ -961,7 +985,8 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab continue; } if (dataStream.getIndices() != null) { - for (Index index : dataStream.getIndices()) { + for (int i = 0, n = dataStream.getIndices().size(); i < n; i++) { + Index index = dataStream.getIndices().get(i); String concreteIndex = index.getName(); routings = collectRoutings(routings, paramRouting, norouting, concreteIndex); } @@ -1006,8 +1031,8 @@ public static Map> resolveSearchRoutingAllIndices(Metadata m Set r = Sets.newHashSet(Strings.splitStringByCommaToArray(routing)); Map> routings = new HashMap<>(); String[] concreteIndices = metadata.getConcreteAllIndices(); - for (String index : concreteIndices) { - routings.put(index, r); + for (int i = 0; i < concreteIndices.length; i++) { + routings.put(concreteIndices[i], r); } return routings; } @@ -1036,6 +1061,16 @@ static boolean isExplicitAllPattern(Collection aliasesOrIndices) { return aliasesOrIndices != null && aliasesOrIndices.size() == 1 && Metadata.ALL.equals(aliasesOrIndices.iterator().next()); } + /** + * Identifies if this expression list is *,-* which effectively means a request that requests no indices. + */ + static boolean isNoneExpression(String[] expressions) { + return expressions.length == 2 && "*".equals(expressions[0]) && "-*".equals(expressions[1]); + } + + /** + * @return the system access level that will be applied in this resolution. See {@link SystemIndexAccessLevel} for details. + */ public SystemIndexAccessLevel getSystemIndexAccessLevel() { final SystemIndexAccessLevel accessLevel = SystemIndices.getSystemIndexAccessLevel(threadContext); assert accessLevel != SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY @@ -1043,6 +1078,14 @@ public SystemIndexAccessLevel getSystemIndexAccessLevel() { return accessLevel; } + /** + * Determines the right predicate based on the {@link IndexNameExpressionResolver#getSystemIndexAccessLevel()}. Specifically: + * - NONE implies no access to net-new system indices and data streams + * - BACKWARDS_COMPATIBLE_ONLY allows access also to net-new system resources + * - ALL allows access to everything + * - otherwise we fall back to {@link SystemIndices#getProductSystemIndexNamePredicate(ThreadContext)} + * @return the predicate that defines the access to system indices. + */ public Predicate getSystemIndexAccessPredicate() { final SystemIndexAccessLevel systemIndexAccessLevel = getSystemIndexAccessLevel(); final Predicate systemIndexAccessLevelPredicate; @@ -1067,6 +1110,43 @@ public Predicate getNetNewSystemIndexPredicate() { return systemIndices::isNetNewSystemIndex; } + /** + * This returns `true` if the given {@param name} is of a resource that exists. + * Otherwise, it returns `false` if the `ignore_unvailable` option is `true`, or, if `false`, it throws a "not found" type of + * exception. + */ + @Nullable + private static boolean ensureAliasOrIndexExists(Context context, String name) { + boolean ignoreUnavailable = context.getOptions().ignoreUnavailable(); + IndexAbstraction indexAbstraction = context.getState().getMetadata().getIndicesLookup().get(name); + if (indexAbstraction == null) { + if (ignoreUnavailable) { + return false; + } else { + throw notFoundException(name); + } + } + // treat aliases as unavailable indices when ignoreAliases is set to true (e.g. delete index and update aliases api) + if (indexAbstraction.getType() == Type.ALIAS && context.getOptions().ignoreAliases()) { + if (ignoreUnavailable) { + return false; + } else { + throw aliasesNotSupportedException(name); + } + } + if (indexAbstraction.isDataStreamRelated() && context.includeDataStreams() == false) { + if (ignoreUnavailable) { + return false; + } else { + IndexNotFoundException infe = notFoundException(name); + // Allows callers to handle IndexNotFoundException differently based on whether data streams were excluded. + infe.addMetadata(EXCLUDED_DATA_STREAMS_KEY, "true"); + throw infe; + } + } + return true; + } + public static class Context { private final ClusterState state; @@ -1242,7 +1322,7 @@ public Predicate getSystemIndexAccessPredicate() { } /** - * Resolves alias/index name expressions with wildcards into the corresponding concrete indices/aliases + * Resolves name expressions with wildcards into the corresponding concrete indices/aliases/data streams */ static final class WildcardExpressionResolver { @@ -1251,8 +1331,8 @@ private WildcardExpressionResolver() { } /** - * Returns all the indices, datastreams, and aliases, considering the open/closed, system, and hidden context parameters. - * Depending on the context, returns the names of the datastreams themselves or their backing indices. + * Returns all the indices, data streams, and aliases, considering the open/closed, system, and hidden context parameters. + * Depending on the context, returns the names of the data streams themselves or their backing indices. */ public static Collection resolveAll(Context context) { List concreteIndices = resolveEmptyOrTrivialWildcard(context); @@ -1261,16 +1341,17 @@ public static Collection resolveAll(Context context) { return concreteIndices; } - Stream ias = context.getState() + Set resolved = new HashSet<>(concreteIndices.size()); + context.getState() .metadata() .getIndicesLookup() .values() .stream() .filter(ia -> context.getOptions().expandWildcardsHidden() || ia.isHidden() == false) .filter(ia -> shouldIncludeIfDataStream(ia, context) || shouldIncludeIfAlias(ia, context)) - .filter(ia -> ia.isSystem() == false || context.systemIndexAccessPredicate.test(ia.getName())); + .filter(ia -> ia.isSystem() == false || context.systemIndexAccessPredicate.test(ia.getName())) + .forEach(ia -> resolved.addAll(expandToOpenClosed(context, ia))); - Set resolved = expandToOpenClosed(context, ias).collect(Collectors.toSet()); resolved.addAll(concreteIndices); return resolved; } @@ -1283,73 +1364,6 @@ private static boolean shouldIncludeIfAlias(IndexAbstraction ia, IndexNameExpres return context.getOptions().ignoreAliases() == false && ia.getType() == Type.ALIAS; } - /** - * Returns all the existing resource (index, alias and datastream) names that the {@param expressions} list resolves to. - * The passed-in {@param expressions} can contain wildcards and exclusions, as well as plain resource names. - *
- * The return is a {@code Collection} (usually a {@code Set} but can also be a {@code List}, for performance reasons) of plain - * resource names only. All the returned resources are "accessible", in the given context, i.e. the resources exist - * and are not an alias or a datastream if the context does not permit it. - * Wildcard expressions, depending on the context: - *

    - *
  1. might throw an exception if they don't resolve to anything
  2. - *
  3. might not resolve to hidden or system resources (but plain names can refer to hidden or system resources)
  4. - *
  5. might resolve to aliases and datastreams, and it could be (depending on the context) that their backing indices are what's - * ultimately returned, instead of the alias or datastream name
  6. - *
- */ - public static Collection resolve(Context context, List expressions) { - // fast exit if there are no wildcards to evaluate - if (context.getOptions().expandWildcardExpressions() == false) { - return expressions; - } - int firstWildcardIndex = 0; - for (; firstWildcardIndex < expressions.size(); firstWildcardIndex++) { - String expression = expressions.get(firstWildcardIndex); - if (isWildcard(expression)) { - break; - } - } - if (firstWildcardIndex == expressions.size()) { - return expressions; - } - Set result = new HashSet<>(); - for (int i = 0; i < firstWildcardIndex; i++) { - result.add(expressions.get(i)); - } - AtomicBoolean emptyWildcardExpansion = context.getOptions().allowNoIndices() ? null : new AtomicBoolean(); - for (int i = firstWildcardIndex; i < expressions.size(); i++) { - String expression = expressions.get(i); - boolean isExclusion = i > firstWildcardIndex && expression.charAt(0) == '-'; - if (i == firstWildcardIndex || isWildcard(expression)) { - Stream matchingResources = matchResourcesToWildcard( - context, - isExclusion ? expression.substring(1) : expression - ); - Stream matchingOpenClosedNames = expandToOpenClosed(context, matchingResources); - if (emptyWildcardExpansion != null) { - emptyWildcardExpansion.set(true); - matchingOpenClosedNames = matchingOpenClosedNames.peek(x -> emptyWildcardExpansion.set(false)); - } - if (isExclusion) { - matchingOpenClosedNames.forEach(result::remove); - } else { - matchingOpenClosedNames.forEach(result::add); - } - if (emptyWildcardExpansion != null && emptyWildcardExpansion.get()) { - throw notFoundException(expression); - } - } else { - if (isExclusion) { - result.remove(expression.substring(1)); - } else { - result.add(expression); - } - } - } - return result; - } - private static IndexMetadata.State excludeState(IndicesOptions options) { final IndexMetadata.State excludeState; if (options.expandWildcardsOpen() && options.expandWildcardsClosed()) { @@ -1366,55 +1380,82 @@ private static IndexMetadata.State excludeState(IndicesOptions options) { } /** - * Given a single wildcard {@param expression}, return the {@code Stream} that contains all the resources (i.e. indices, aliases, - * and datastreams), that exist in the cluster at this moment in time, and that the wildcard "resolves" to (i.e. the resource's + * Given a single wildcard {@param expression}, return a {@code Set} that contains all the resources (i.e. indices, aliases, + * and data streams), that exist in the cluster at this moment in time, and that the wildcard "resolves" to (i.e. the resource's * name matches the {@param expression} wildcard). * The {@param context} provides the current time-snapshot view of cluster state, as well as conditions - * on whether to consider alias, datastream, system, and hidden resources. - * It does NOT consider the open or closed status of index resources. + * on whether to consider alias, data stream, system, and hidden resources. */ - private static Stream matchResourcesToWildcard(Context context, String wildcardExpression) { + static Set matchWildcardToResources(Context context, String wildcardExpression) { assert isWildcard(wildcardExpression); final SortedMap indicesLookup = context.getState().getMetadata().getIndicesLookup(); - Stream matchesStream; + Set matchedResources = new HashSet<>(); + // this applies an initial pre-filtering in the case where the expression is a common suffix wildcard, eg "test*" if (Regex.isSuffixMatchPattern(wildcardExpression)) { - // this is an initial pre-filtering in the case where the expression is a common suffix wildcard, eg "test*" - matchesStream = filterIndicesLookupForSuffixWildcard(indicesLookup, wildcardExpression).values().stream(); - } else { - matchesStream = indicesLookup.values().stream(); - if (Regex.isMatchAllPattern(wildcardExpression) == false) { - matchesStream = matchesStream.filter( - indexAbstraction -> Regex.simpleMatch(wildcardExpression, indexAbstraction.getName()) - ); + for (IndexAbstraction ia : filterIndicesLookupForSuffixWildcard(indicesLookup, wildcardExpression).values()) { + maybeAddToResult(context, wildcardExpression, ia, matchedResources); + } + return matchedResources; + } + // In case of match all it fetches all index abstractions + if (Regex.isMatchAllPattern(wildcardExpression)) { + for (IndexAbstraction ia : indicesLookup.values()) { + maybeAddToResult(context, wildcardExpression, ia, matchedResources); } + return matchedResources; } - if (context.getOptions().ignoreAliases()) { - matchesStream = matchesStream.filter(indexAbstraction -> indexAbstraction.getType() != Type.ALIAS); + for (IndexAbstraction indexAbstraction : indicesLookup.values()) { + if (Regex.simpleMatch(wildcardExpression, indexAbstraction.getName())) { + maybeAddToResult(context, wildcardExpression, indexAbstraction, matchedResources); + } } - if (context.includeDataStreams() == false) { - matchesStream = matchesStream.filter(indexAbstraction -> indexAbstraction.isDataStreamRelated() == false); + return matchedResources; + } + + private static void maybeAddToResult( + Context context, + String wildcardExpression, + IndexAbstraction indexAbstraction, + Set matchedResources + ) { + if (shouldExpandToIndexAbstraction(context, wildcardExpression, indexAbstraction)) { + matchedResources.addAll(expandToOpenClosed(context, indexAbstraction)); } - // historic, i.e. not net-new, system indices are included irrespective of the system access predicate - // the system access predicate is based on the endpoint kind and HTTP request headers that identify the stack feature - matchesStream = matchesStream.filter( - indexAbstraction -> indexAbstraction.isSystem() == false - || (indexAbstraction.getType() != Type.DATA_STREAM - && indexAbstraction.getParentDataStream() == null - && context.netNewSystemIndexPredicate.test(indexAbstraction.getName()) == false) - || context.systemIndexAccessPredicate.test(indexAbstraction.getName()) - ); + } + + /** + * Checks if this index abstraction should be included because it matched the wildcard expression. + * @param context the options of this request that influence the decision if this index abstraction should be included in the result + * @param wildcardExpression the wildcard expression that matched this index abstraction + * @param indexAbstraction the index abstraction in question + * @return true, if the index abstraction should be included in the result + */ + private static boolean shouldExpandToIndexAbstraction( + Context context, + String wildcardExpression, + IndexAbstraction indexAbstraction + ) { + if (context.getOptions().ignoreAliases() && indexAbstraction.getType() == Type.ALIAS) { + return false; + } + if (context.includeDataStreams() == false && indexAbstraction.isDataStreamRelated()) { + return false; + } + + if (indexAbstraction.isSystem() + && SystemResourceAccess.shouldExpandToSystemIndexAbstraction(context, indexAbstraction) == false) { + return false; + } + if (context.getOptions().expandWildcardsHidden() == false) { - if (wildcardExpression.startsWith(".")) { - // there is this behavior that hidden indices that start with "." are not hidden if the wildcard expression also - // starts with "." - matchesStream = matchesStream.filter( - indexAbstraction -> indexAbstraction.isHidden() == false || indexAbstraction.getName().startsWith(".") - ); - } else { - matchesStream = matchesStream.filter(indexAbstraction -> indexAbstraction.isHidden() == false); + // there is this behavior that hidden indices that start with "." are not hidden if the wildcard expression also + // starts with "." + if (indexAbstraction.isHidden() + && (wildcardExpression.startsWith(".") && indexAbstraction.getName().startsWith(".")) == false) { + return false; } } - return matchesStream; + return true; } private static Map filterIndicesLookupForSuffixWildcard( @@ -1430,35 +1471,39 @@ private static Map filterIndicesLookupForSuffixWildcar } /** - * Return the {@code Stream} of open and/or closed index names for the given {@param resources}. + * Return the {@code Set} of open and/or closed index names for the given {@param resources}. * Data streams and aliases are interpreted to refer to multiple indices, * then all index resources are filtered by their open/closed status. */ - private static Stream expandToOpenClosed(Context context, Stream resources) { + private static Set expandToOpenClosed(Context context, IndexAbstraction indexAbstraction) { final IndexMetadata.State excludeState = excludeState(context.getOptions()); - return resources.flatMap(indexAbstraction -> { - if (context.isPreserveAliases() && indexAbstraction.getType() == Type.ALIAS) { - return Stream.of(indexAbstraction.getName()); - } else if (context.isPreserveDataStreams() && indexAbstraction.getType() == Type.DATA_STREAM) { - return Stream.of(indexAbstraction.getName()); - } else { - Stream indicesStateStream = Stream.of(); - if (shouldIncludeRegularIndices(context.getOptions())) { - indicesStateStream = indexAbstraction.getIndices().stream().map(context.state.metadata()::index); - } - if (indexAbstraction.getType() == Type.DATA_STREAM && shouldIncludeFailureIndices(context.getOptions())) { - DataStream dataStream = (DataStream) indexAbstraction; - indicesStateStream = Stream.concat( - indicesStateStream, - dataStream.getFailureIndices().getIndices().stream().map(context.state.metadata()::index) - ); + Set resources = new HashSet<>(); + if (context.isPreserveAliases() && indexAbstraction.getType() == Type.ALIAS) { + resources.add(indexAbstraction.getName()); + } else if (context.isPreserveDataStreams() && indexAbstraction.getType() == Type.DATA_STREAM) { + resources.add(indexAbstraction.getName()); + } else { + if (shouldIncludeRegularIndices(context.getOptions())) { + for (int i = 0, n = indexAbstraction.getIndices().size(); i < n; i++) { + Index index = indexAbstraction.getIndices().get(i); + IndexMetadata indexMetadata = context.state.metadata().index(index); + if (indexMetadata.getState() != excludeState) { + resources.add(index.getName()); + } } - if (excludeState != null) { - indicesStateStream = indicesStateStream.filter(indexMeta -> indexMeta.getState() != excludeState); + } + if (indexAbstraction.getType() == Type.DATA_STREAM && shouldIncludeFailureIndices(context.getOptions())) { + DataStream dataStream = (DataStream) indexAbstraction; + for (int i = 0, n = dataStream.getFailureIndices().getIndices().size(); i < n; i++) { + Index index = dataStream.getFailureIndices().getIndices().get(i); + IndexMetadata indexMetadata = context.state.metadata().index(index); + if (indexMetadata.getState() != excludeState) { + resources.add(index.getName()); + } } - return indicesStateStream.map(indexMeta -> indexMeta.getIndex().getName()); } - }); + } + return resources; } private static List resolveEmptyOrTrivialWildcard(Context context) { @@ -1471,26 +1516,26 @@ private static List resolveEmptyOrTrivialWildcard(Context context) { } private static List resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(Context context, String[] allIndices) { - return Arrays.stream(allIndices).filter(name -> { - if (name.startsWith(".")) { - IndexAbstraction abstraction = context.state.metadata().getIndicesLookup().get(name); - assert abstraction != null : "null abstraction for " + name + " but was in array of all indices"; - if (abstraction.isSystem()) { - if (context.netNewSystemIndexPredicate.test(name)) { - if (SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY.equals(context.systemIndexAccessLevel)) { - return false; - } else { - return context.systemIndexAccessPredicate.test(name); - } - } else if (abstraction.getType() == Type.DATA_STREAM || abstraction.getParentDataStream() != null) { - return context.systemIndexAccessPredicate.test(name); - } - } else { - return true; - } + List filteredIndices = new ArrayList<>(allIndices.length); + for (int i = 0; i < allIndices.length; i++) { + if (shouldIncludeIndexAbstraction(context, allIndices[i])) { + filteredIndices.add(allIndices[i]); } + } + return filteredIndices; + } + + private static boolean shouldIncludeIndexAbstraction(Context context, String name) { + if (name.startsWith(".") == false) { return true; - }).toList(); + } + + IndexAbstraction abstraction = context.state.metadata().getIndicesLookup().get(name); + assert abstraction != null : "null abstraction for " + name + " but was in array of all indices"; + if (abstraction.isSystem() == false) { + return true; + } + return SystemResourceAccess.isSystemIndexAbstractionAccessible(context, abstraction); } private static String[] resolveEmptyOrTrivialWildcardToAllIndices(IndicesOptions options, Metadata metadata) { @@ -1513,8 +1558,39 @@ private static String[] resolveEmptyOrTrivialWildcardToAllIndices(IndicesOptions return Strings.EMPTY_ARRAY; } } + + static boolean isWildcard(String expression) { + return Regex.isSimpleMatchPattern(expression); + } + + static boolean hasWildcards(String[] expressions) { + for (int i = 0; i < expressions.length; i++) { + if (isWildcard(expressions[i])) { + return true; + } + } + return false; + } + } + + /** + * @return If the specified string is data math expression then this method returns the resolved expression. + */ + public static String resolveDateMathExpression(String dateExpression) { + return DateMathExpressionResolver.resolveExpression(dateExpression); + } + + /** + * @param time instant to consider when parsing the expression + * @return If the specified string is data math expression then this method returns the resolved expression. + */ + public static String resolveDateMathExpression(String dateExpression, long time) { + return DateMathExpressionResolver.resolveExpression(dateExpression, () -> time); } + /** + * Resolves a date math expression based on the requested time. + */ public static final class DateMathExpressionResolver { private static final DateFormatter DEFAULT_DATE_FORMATTER = DateFormatter.forPattern("uuuu.MM.dd"); @@ -1530,35 +1606,18 @@ private DateMathExpressionResolver() { } /** - * Resolves date math expressions. If this is a noop the given {@code expressions} list is returned without copying. - * As a result callers of this method should not mutate the returned list. Mutating it may come with unexpected side effects. + * Resolves a date math expression using the current time. This method recognises a date math expression iff when they start with + * %3C and end with %3E. Otherwise, it returns the expression intact. */ - public static List resolve(Context context, List expressions) { - boolean wildcardSeen = false; - final boolean expandWildcards = context.getOptions().expandWildcardExpressions(); - String[] result = null; - for (int i = 0, n = expressions.size(); i < n; i++) { - String expression = expressions.get(i); - // accepts date-math exclusions that are of the form "-<...{}>",f i.e. the "-" is outside the "<>" date-math template - boolean isExclusion = wildcardSeen && expression.startsWith("-"); - wildcardSeen = wildcardSeen || (expandWildcards && isWildcard(expression)); - String toResolve = isExclusion ? expression.substring(1) : expression; - String resolved = resolveExpression(toResolve, context::getStartTime); - if (toResolve != resolved) { - if (result == null) { - result = expressions.toArray(Strings.EMPTY_ARRAY); - } - result[i] = isExclusion ? "-" + resolved : resolved; - } - } - return result == null ? expressions : Arrays.asList(result); - } - - static String resolveExpression(String expression) { + public static String resolveExpression(String expression) { return resolveExpression(expression, System::currentTimeMillis); } - static String resolveExpression(String expression, LongSupplier getTime) { + /** + * Resolves a date math expression using the provided time. This method recognises a date math expression iff when they start with + * %3C and end with %3E. Otherwise, it returns the expression intact. + */ + public static String resolveExpression(String expression, LongSupplier getTime) { if (expression.startsWith(EXPRESSION_LEFT_BOUND) == false || expression.endsWith(EXPRESSION_RIGHT_BOUND) == false) { return expression; } @@ -1707,135 +1766,133 @@ private static String doResolveExpression(String expression, LongSupplier getTim } } - public static final class ExplicitResourceNameFilter { + /** + * In this class we collect the system access relevant code. The helper methods provide the following functionalities: + * - determining the access to a system index abstraction + * - verifying the access to system abstractions and adding the necessary warnings + * - determining the access to a system index based on its name + * WARNING: we have observed differences in how the access is determined. For now this behaviour is documented and preserved. + */ + public static final class SystemResourceAccess { - private ExplicitResourceNameFilter() { + private SystemResourceAccess() { // Utility class } /** - * Returns an expression list with "unavailable" (missing or not acceptable) resource names filtered out. - * Only explicit resource names are considered for filtering. Wildcard and exclusion expressions are kept in. + * Checks if this system index abstraction should be included when resolving via {@link + * IndexNameExpressionResolver.WildcardExpressionResolver#resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(Context, String[])}. + * NOTE: it behaves differently than {@link SystemResourceAccess#shouldExpandToSystemIndexAbstraction(Context, IndexAbstraction)} + * because in the case that the access level is BACKWARDS_COMPATIBLE_ONLY it does not include the net-new indices, this is + * questionable. */ - public static List filterUnavailable(Context context, List expressions) { - ensureRemoteIndicesRequireIgnoreUnavailable(context.getOptions(), expressions); - final boolean expandWildcards = context.getOptions().expandWildcardExpressions(); - boolean wildcardSeen = false; - List result = null; - for (int i = 0; i < expressions.size(); i++) { - String expression = expressions.get(i); - if (Strings.isEmpty(expression)) { - throw notFoundException(expression); - } - // Expressions can not start with an underscore. This is reserved for APIs. If the check gets here, the API - // does not exist and the path is interpreted as an expression. If the expression begins with an underscore, - // throw a specific error that is different from the [[IndexNotFoundException]], which is typically thrown - // if the expression can't be found. - if (expression.charAt(0) == '_') { - throw new InvalidIndexNameException(expression, "must not start with '_'."); - } - final boolean isWildcard = expandWildcards && isWildcard(expression); - if (isWildcard || (wildcardSeen && expression.charAt(0) == '-') || ensureAliasOrIndexExists(context, expression)) { - if (result != null) { - result.add(expression); - } + public static boolean isSystemIndexAbstractionAccessible(Context context, IndexAbstraction abstraction) { + assert abstraction.isSystem() : "We should only check this for system resources"; + if (context.netNewSystemIndexPredicate.test(abstraction.getName())) { + if (SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY.equals(context.systemIndexAccessLevel)) { + return false; } else { - if (result == null) { - result = new ArrayList<>(expressions.size() - 1); - result.addAll(expressions.subList(0, i)); - } + return context.systemIndexAccessPredicate.test(abstraction.getName()); } - wildcardSeen |= isWildcard; + } else if (abstraction.getType() == Type.DATA_STREAM || abstraction.getParentDataStream() != null) { + return context.systemIndexAccessPredicate.test(abstraction.getName()); } - return result == null ? expressions : result; + return true; } /** - * This returns `true` if the given {@param name} is of a resource that exists. - * Otherwise, it returns `false` if the `ignore_unvailable` option is `true`, or, if `false`, it throws a "not found" type of - * exception. + * Historic, i.e. not net-new, system indices are included irrespective of the system access predicate + * the system access predicate is based on the endpoint kind and HTTP request headers that identify the stack feature. + * A historic system resource, can only be an index since system data streams were added later. */ - @Nullable - private static boolean ensureAliasOrIndexExists(Context context, String name) { - boolean ignoreUnavailable = context.getOptions().ignoreUnavailable(); - IndexAbstraction indexAbstraction = context.getState().getMetadata().getIndicesLookup().get(name); - if (indexAbstraction == null) { - if (ignoreUnavailable) { - return false; - } else { - throw notFoundException(name); - } - } - // treat aliases as unavailable indices when ignoreAliases is set to true (e.g. delete index and update aliases api) - if (indexAbstraction.getType() == Type.ALIAS && context.getOptions().ignoreAliases()) { - if (ignoreUnavailable) { - return false; - } else { - throw aliasesNotSupportedException(name); - } - } - if (indexAbstraction.isDataStreamRelated() && context.includeDataStreams() == false) { - if (ignoreUnavailable) { - return false; - } else { - IndexNotFoundException infe = notFoundException(name); - // Allows callers to handle IndexNotFoundException differently based on whether data streams were excluded. - infe.addMetadata(EXCLUDED_DATA_STREAMS_KEY, "true"); - throw infe; - } - } - return true; + private static boolean shouldExpandToSystemIndexAbstraction(Context context, IndexAbstraction indexAbstraction) { + assert indexAbstraction.isSystem() : "We should only check this for system resources"; + boolean isHistoric = indexAbstraction.getType() != Type.DATA_STREAM + && indexAbstraction.getParentDataStream() == null + && context.netNewSystemIndexPredicate.test(indexAbstraction.getName()) == false; + return isHistoric || context.systemIndexAccessPredicate.test(indexAbstraction.getName()); } - private static void ensureRemoteIndicesRequireIgnoreUnavailable(IndicesOptions options, List indexExpressions) { - if (options.ignoreUnavailable()) { + /** + * Checks if any system indices that should not have been accessible according to the + * {@link Context#getSystemIndexAccessPredicate()} are accessed, and it performs the following actions: + * - if there are historic (aka not net-new) system indices, then it adds a deprecation warning + * - if it contains net-new system indices or system data streams, it throws an exception. + */ + private static void checkSystemIndexAccess(Context context, ThreadContext threadContext, Index... concreteIndices) { + final Predicate systemIndexAccessPredicate = context.getSystemIndexAccessPredicate(); + if (systemIndexAccessPredicate == Predicates.always()) { return; } - for (String index : indexExpressions) { - if (RemoteClusterAware.isRemoteIndexName(index)) { - failOnRemoteIndicesNotIgnoringUnavailable(indexExpressions); - } - } + doCheckSystemIndexAccess(context, systemIndexAccessPredicate, threadContext, concreteIndices); } - private static void failOnRemoteIndicesNotIgnoringUnavailable(List indexExpressions) { - List crossClusterIndices = new ArrayList<>(); - for (String index : indexExpressions) { - if (RemoteClusterAware.isRemoteIndexName(index)) { - crossClusterIndices.add(index); + private static void doCheckSystemIndexAccess( + Context context, + Predicate systemIndexAccessPredicate, + ThreadContext threadContext, + Index... concreteIndices + ) { + final Metadata metadata = context.getState().metadata(); + final List resolvedSystemIndices = new ArrayList<>(); + final List resolvedNetNewSystemIndices = new ArrayList<>(); + final Set resolvedSystemDataStreams = new HashSet<>(); + final SortedMap indicesLookup = metadata.getIndicesLookup(); + boolean matchedIndex = false; + for (int i = 0; i < concreteIndices.length; i++) { + Index concreteIndex = concreteIndices[i]; + IndexMetadata idxMetadata = metadata.index(concreteIndex); + String name = concreteIndex.getName(); + if (idxMetadata.isSystem() && systemIndexAccessPredicate.test(name) == false) { + matchedIndex = true; + IndexAbstraction indexAbstraction = indicesLookup.get(name); + if (indexAbstraction.getParentDataStream() != null) { + resolvedSystemDataStreams.add(indexAbstraction.getParentDataStream().getName()); + } else if (context.netNewSystemIndexPredicate.test(name)) { + resolvedNetNewSystemIndices.add(name); + } else { + resolvedSystemIndices.add(name); + } } } - throw new IllegalArgumentException( - "Cross-cluster calls are not supported in this context but remote indices were requested: " + crossClusterIndices - ); - } - } - - /** - * This is a context for the DateMathExpressionResolver which does not require {@code IndicesOptions} or {@code ClusterState} - * since it uses only the start time to resolve expressions. - */ - public static final class ResolverContext extends Context { - public ResolverContext() { - this(System.currentTimeMillis()); - } - - public ResolverContext(long startTime) { - super(null, null, startTime, false, false, false, false, SystemIndexAccessLevel.ALL, Predicates.never(), Predicates.never()); + if (matchedIndex) { + handleMatchedSystemIndices(resolvedSystemIndices, resolvedSystemDataStreams, resolvedNetNewSystemIndices, threadContext); + } } - @Override - public ClusterState getState() { - throw new UnsupportedOperationException("should never be called"); + private static void handleMatchedSystemIndices( + List resolvedSystemIndices, + Set resolvedSystemDataStreams, + List resolvedNetNewSystemIndices, + ThreadContext threadContext + ) { + if (resolvedSystemIndices.isEmpty() == false) { + Collections.sort(resolvedSystemIndices); + deprecationLogger.warn( + DeprecationCategory.API, + "open_system_index_access", + "this request accesses system indices: {}, but in a future major version, direct access to system " + + "indices will be prevented by default", + resolvedSystemIndices + ); + } + if (resolvedSystemDataStreams.isEmpty() == false) { + throw SystemIndices.dataStreamAccessException(threadContext, resolvedSystemDataStreams); + } + if (resolvedNetNewSystemIndices.isEmpty() == false) { + throw SystemIndices.netNewSystemIndexAccessException(threadContext, resolvedNetNewSystemIndices); + } } - @Override - public IndicesOptions getOptions() { - throw new UnsupportedOperationException("should never be called"); + /** + * Used in {@link IndexNameExpressionResolver#shouldTrackConcreteIndex(Context, Index)} to exclude net-new indices + * when we are in backwards compatible only access level. + * This also feels questionable as well. + */ + private static boolean isNetNewInBackwardCompatibleMode(Context context, Index index) { + return context.systemIndexAccessLevel == SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY + && context.netNewSystemIndexPredicate.test(index.getName()); } } - private static boolean isWildcard(String expression) { - return Regex.isSimpleMatchPattern(expression); - } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataFeatures.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataFeatures.java index 89f776a7ada0f..49bd38330e3af 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataFeatures.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataFeatures.java @@ -9,24 +9,12 @@ package org.elasticsearch.cluster.metadata; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import java.util.Map; import java.util.Set; public class MetadataFeatures implements FeatureSpecification { - @Override - public Map getHistoricalFeatures() { - return Map.of( - DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED, - Version.V_8_3_0, - DesiredNode.DOUBLE_PROCESSORS_SUPPORTED, - Version.V_8_5_0 - ); - } - @Override public Set getFeatures() { return Set.of(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index d6ed28454df96..3878a3329b634 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -782,7 +782,7 @@ private void validateUseOfDeprecatedIngestPipelines(String name, IngestMetadata private void emitWarningIfPipelineIsDeprecated(String name, Map pipelines, String pipelineName) { Optional.ofNullable(pipelineName) .map(pipelines::get) - .filter(p -> Boolean.TRUE.equals(p.getConfigAsMap().get("deprecated"))) + .filter(p -> Boolean.TRUE.equals(p.getConfig().get("deprecated"))) .ifPresent( p -> deprecationLogger.warn( DeprecationCategory.TEMPLATES, diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoryMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoryMetadata.java index 9b3abf38c519b..0b9c359006b23 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoryMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoryMetadata.java @@ -46,7 +46,11 @@ public class RepositoryMetadata implements Writeable { * @param settings repository settings */ public RepositoryMetadata(String name, String type, Settings settings) { - this(name, RepositoryData.MISSING_UUID, type, settings, RepositoryData.UNKNOWN_REPO_GEN, RepositoryData.EMPTY_REPO_GEN); + this(name, RepositoryData.MISSING_UUID, type, settings); + } + + public RepositoryMetadata(String name, String uuid, String type, Settings settings) { + this(name, uuid, type, settings, RepositoryData.UNKNOWN_REPO_GEN, RepositoryData.EMPTY_REPO_GEN); } public RepositoryMetadata(RepositoryMetadata metadata, long generation, long pendingGeneration) { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java index aa92f395b20d2..be0e3429a2ce4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java @@ -167,8 +167,7 @@ public void process(IndexRequest indexRequest) { // generate id if not already provided final String id = indexRequest.id(); if (id == null) { - if (creationVersion.between(IndexVersions.TIME_BASED_K_ORDERED_DOC_ID_BACKPORT, IndexVersions.UPGRADE_TO_LUCENE_10_0_0) - || creationVersion.onOrAfter(IndexVersions.TIME_BASED_K_ORDERED_DOC_ID) && indexMode == IndexMode.LOGSDB) { + if (shouldUseTimeBasedId(indexMode, creationVersion)) { indexRequest.autoGenerateTimeBasedId(); } else { indexRequest.autoGenerateId(); @@ -178,6 +177,15 @@ public void process(IndexRequest indexRequest) { } } + private static boolean shouldUseTimeBasedId(final IndexMode indexMode, final IndexVersion creationVersion) { + return indexMode == IndexMode.LOGSDB && isNewIndexVersion(creationVersion); + } + + private static boolean isNewIndexVersion(final IndexVersion creationVersion) { + return creationVersion.between(IndexVersions.TIME_BASED_K_ORDERED_DOC_ID_BACKPORT, IndexVersions.UPGRADE_TO_LUCENE_10_0_0) + || creationVersion.onOrAfter(IndexVersions.TIME_BASED_K_ORDERED_DOC_ID); + } + @Override public int indexShard( String id, diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java index f7812d284f2af..13fc874f52e9f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java @@ -32,6 +32,7 @@ import java.util.Set; import java.util.stream.Collectors; +import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO_2; import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; public class OperationRouting { @@ -305,8 +306,14 @@ public ShardId shardId(ClusterState clusterState, String index, String id, @Null } public static boolean canSearchShard(ShardRouting shardRouting, ClusterState clusterState) { + // TODO: remove if and always return isSearchable (ES-9563) if (INDEX_FAST_REFRESH_SETTING.get(clusterState.metadata().index(shardRouting.index()).getSettings())) { - return shardRouting.isPromotableToPrimary(); + // Until all the cluster is upgraded, we send searches/gets to the primary (even if it has been upgraded) to execute locally. + if (clusterState.getMinTransportVersion().onOrAfter(FAST_REFRESH_RCO_2)) { + return shardRouting.isSearchable(); + } else { + return shardRouting.isPromotableToPrimary(); + } } else { return shardRouting.isSearchable(); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java index 898e5da1e2da2..6c1f839c55712 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java @@ -139,6 +139,10 @@ protected void processInput(DesiredBalanceInput desiredBalanceInput) { recordTime( cumulativeComputationTime, + // We set currentDesiredBalance back to INITIAL when the node stands down as master in onNoLongerMaster. + // However, it is possible that we revert the effect here by setting it again since the computation is async + // and does not check whether the node is master. This should have little to no practical impact. But it may + // lead to unexpected behaviours for tests. See also https://github.com/elastic/elasticsearch/pull/116904 () -> setCurrentDesiredBalance( desiredBalanceComputer.compute( initialDesiredBalance, @@ -236,6 +240,10 @@ public void allocate(RoutingAllocation allocation, ActionListener listener currentDesiredBalanceRef.compareAndSet(DesiredBalance.NOT_MASTER, DesiredBalance.INITIAL); desiredBalanceComputation.onNewInput(DesiredBalanceInput.create(index, allocation)); + if (allocation.routingTable().indicesRouting().isEmpty()) { + logger.debug("No eager reconciliation needed for empty routing table"); + return; + } // Starts reconciliation towards desired balance that might have not been updated with a recent calculation yet. // This is fine as balance should have incremental rather than radical changes. // This should speed up achieving the desired balance in cases current state is still different from it (due to THROTTLING). diff --git a/server/src/main/java/org/elasticsearch/cluster/service/TransportFeatures.java b/server/src/main/java/org/elasticsearch/cluster/service/TransportFeatures.java deleted file mode 100644 index 6e0a8afd6cf8e..0000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/service/TransportFeatures.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.service; - -import org.elasticsearch.Version; -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Map; - -public class TransportFeatures implements FeatureSpecification { - @Override - public Map getHistoricalFeatures() { - // transport version was introduced in 8.8.0, but we need to wait until all nodes are >8.8.0 - // to properly detect when we need to fix transport versions - return Map.of(TransportVersionsFixupListener.FIX_TRANSPORT_VERSION, Version.V_8_8_1); - } -} diff --git a/server/src/main/java/org/elasticsearch/cluster/service/TransportVersionsFixupListener.java b/server/src/main/java/org/elasticsearch/cluster/service/TransportVersionsFixupListener.java deleted file mode 100644 index 0ae0f8b10aed7..0000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/service/TransportVersionsFixupListener.java +++ /dev/null @@ -1,229 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.service; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.client.internal.ClusterAdminClient; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.ClusterStateTaskExecutor; -import org.elasticsearch.cluster.ClusterStateTaskListener; -import org.elasticsearch.cluster.version.CompatibilityVersions; -import org.elasticsearch.common.Priority; -import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; -import org.elasticsearch.threadpool.Scheduler; -import org.elasticsearch.threadpool.ThreadPool; - -import java.util.Collections; -import java.util.HashSet; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.Executor; -import java.util.stream.Collectors; - -import static org.elasticsearch.cluster.ClusterState.INFERRED_TRANSPORT_VERSION; - -/** - * This fixes up the transport version from pre-8.8.0 cluster state that was inferred as the minimum possible, - * due to the master node not understanding cluster state with transport versions added in 8.8.0. - * Any nodes with the inferred placeholder cluster state is then refreshed with their actual transport version - */ -@UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // this can be removed in v9 -public class TransportVersionsFixupListener implements ClusterStateListener { - - private static final Logger logger = LogManager.getLogger(TransportVersionsFixupListener.class); - - static final NodeFeature FIX_TRANSPORT_VERSION = new NodeFeature("transport.fix_transport_version"); - - private static final TimeValue RETRY_TIME = TimeValue.timeValueSeconds(30); - - private final MasterServiceTaskQueue taskQueue; - private final ClusterAdminClient client; - private final Scheduler scheduler; - private final Executor executor; - private final Set pendingNodes = Collections.synchronizedSet(new HashSet<>()); - private final FeatureService featureService; - - public TransportVersionsFixupListener( - ClusterService service, - ClusterAdminClient client, - FeatureService featureService, - ThreadPool threadPool - ) { - // there tends to be a lot of state operations on an upgrade - this one is not time-critical, - // so use LOW priority. It just needs to be run at some point after upgrade. - this( - service.createTaskQueue("fixup-transport-versions", Priority.LOW, new TransportVersionUpdater()), - client, - featureService, - threadPool, - threadPool.executor(ThreadPool.Names.CLUSTER_COORDINATION) - ); - } - - TransportVersionsFixupListener( - MasterServiceTaskQueue taskQueue, - ClusterAdminClient client, - FeatureService featureService, - Scheduler scheduler, - Executor executor - ) { - this.taskQueue = taskQueue; - this.client = client; - this.featureService = featureService; - this.scheduler = scheduler; - this.executor = executor; - } - - class NodeTransportVersionTask implements ClusterStateTaskListener { - private final Map results; - private final int retryNum; - - NodeTransportVersionTask(Map results, int retryNum) { - this.results = results; - this.retryNum = retryNum; - } - - @Override - public void onFailure(Exception e) { - logger.error("Could not apply transport version for nodes {} to cluster state", results.keySet(), e); - scheduleRetry(results.keySet(), retryNum); - } - - public Map results() { - return results; - } - } - - private static class TransportVersionUpdater implements ClusterStateTaskExecutor { - @Override - public ClusterState execute(BatchExecutionContext context) throws Exception { - ClusterState.Builder builder = ClusterState.builder(context.initialState()); - boolean modified = false; - for (var c : context.taskContexts()) { - for (var e : c.getTask().results().entrySet()) { - // this node's transport version might have been updated already/node has gone away - var cvMap = builder.compatibilityVersions(); - TransportVersion recordedTv = Optional.ofNullable(cvMap.get(e.getKey())) - .map(CompatibilityVersions::transportVersion) - .orElse(null); - assert (recordedTv != null) || (context.initialState().nodes().nodeExists(e.getKey()) == false) - : "Node " + e.getKey() + " is in the cluster but does not have an associated transport version recorded"; - if (Objects.equals(recordedTv, INFERRED_TRANSPORT_VERSION)) { - builder.putCompatibilityVersions(e.getKey(), e.getValue(), Map.of()); // unknown mappings versions - modified = true; - } - } - c.success(() -> {}); - } - return modified ? builder.build() : context.initialState(); - } - } - - @SuppressForbidden(reason = "maintaining ClusterState#compatibilityVersions requires reading them") - private static Map getCompatibilityVersions(ClusterState clusterState) { - return clusterState.compatibilityVersions(); - } - - @Override - public void clusterChanged(ClusterChangedEvent event) { - if (event.localNodeMaster() == false) return; // only if we're master - - // if the min node version > 8.8.0, and the cluster state has some transport versions == 8.8.0, - // then refresh all inferred transport versions to their real versions - // now that everything should understand cluster state with transport versions - if (featureService.clusterHasFeature(event.state(), FIX_TRANSPORT_VERSION) - && event.state().getMinTransportVersion().equals(INFERRED_TRANSPORT_VERSION)) { - - // find all the relevant nodes - Set nodes = getCompatibilityVersions(event.state()).entrySet() - .stream() - .filter(e -> e.getValue().transportVersion().equals(INFERRED_TRANSPORT_VERSION)) - .map(Map.Entry::getKey) - .collect(Collectors.toSet()); - - updateTransportVersions(nodes, 0); - } - } - - private void scheduleRetry(Set nodes, int thisRetryNum) { - // just keep retrying until this succeeds - logger.debug("Scheduling retry {} for nodes {}", thisRetryNum + 1, nodes); - scheduler.schedule(() -> updateTransportVersions(nodes, thisRetryNum + 1), RETRY_TIME, executor); - } - - private void updateTransportVersions(Set nodes, int retryNum) { - // some might already be in-progress - Set outstandingNodes = Sets.newHashSetWithExpectedSize(nodes.size()); - synchronized (pendingNodes) { - for (String n : nodes) { - if (pendingNodes.add(n)) { - outstandingNodes.add(n); - } - } - } - if (outstandingNodes.isEmpty()) { - // all nodes already have in-progress requests - return; - } - - NodesInfoRequest request = new NodesInfoRequest(outstandingNodes.toArray(String[]::new)); - request.clear(); // only requesting base data - client.nodesInfo(request, new ActionListener<>() { - @Override - public void onResponse(NodesInfoResponse response) { - pendingNodes.removeAll(outstandingNodes); - handleResponse(response, retryNum); - } - - @Override - public void onFailure(Exception e) { - pendingNodes.removeAll(outstandingNodes); - logger.warn("Could not read transport versions for nodes {}", outstandingNodes, e); - scheduleRetry(outstandingNodes, retryNum); - } - }); - } - - private void handleResponse(NodesInfoResponse response, int retryNum) { - if (response.hasFailures()) { - Set failedNodes = new HashSet<>(); - for (FailedNodeException fne : response.failures()) { - logger.warn("Failed to read transport version info from node {}", fne.nodeId(), fne); - failedNodes.add(fne.nodeId()); - } - scheduleRetry(failedNodes, retryNum); - } - // carry on and read what we can - - Map results = response.getNodes() - .stream() - .collect(Collectors.toUnmodifiableMap(n -> n.getNode().getId(), NodeInfo::getTransportVersion)); - - if (results.isEmpty() == false) { - taskQueue.submitTask("update-transport-version", new NodeTransportVersionTask(results, retryNum), null); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java b/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java index 926056fec3ec8..c0fe0bc32fb08 100644 --- a/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java +++ b/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java @@ -82,6 +82,7 @@ public enum ReferenceDocs { CIRCUIT_BREAKER_ERRORS, ALLOCATION_EXPLAIN_NO_COPIES, ALLOCATION_EXPLAIN_MAX_RETRY, + SECURE_SETTINGS, // this comment keeps the ';' on the next line so every entry above has a trailing ',' which makes the diff for adding new links cleaner ; diff --git a/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java b/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java index ddcfc1ea7eed8..51e6512072e41 100644 --- a/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java +++ b/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java @@ -74,6 +74,29 @@ static ByteBuffer[] toByteBuffers(BytesReference reference) { } } + /** + * Allocates new buffer and copy bytes from given BytesReference. + * + * @deprecated copying bytes is a right place for performance regression and unnecessary allocations. + * This method exists to serve very few places that struggle to handle reference counted buffers. + */ + @Deprecated(forRemoval = true) + static BytesReference copyBytes(BytesReference bytesReference) { + byte[] arr = new byte[bytesReference.length()]; + int offset = 0; + final BytesRefIterator iterator = bytesReference.iterator(); + try { + BytesRef slice; + while ((slice = iterator.next()) != null) { + System.arraycopy(slice.bytes, slice.offset, arr, offset, slice.length); + offset += slice.length; + } + return new BytesArray(arr); + } catch (IOException e) { + throw new AssertionError(e); + } + } + /** * Returns BytesReference composed of the provided ByteBuffers. */ diff --git a/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java b/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java index 36ca2df08724d..3d4f0d2d9dbf7 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java @@ -185,9 +185,7 @@ private InsecureStringSetting(String name) { @Override public SecureString get(Settings settings) { if (ALLOW_INSECURE_SETTINGS == false && exists(settings)) { - throw new IllegalArgumentException( - "Setting [" + name + "] is insecure, " + "but property [allow_insecure_settings] is not set" - ); + throw new IllegalArgumentException("Setting [" + name + "] is insecure, use the elasticsearch keystore instead"); } return super.get(settings); } diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java index 9cfa22d0a3cfb..9464ccbcc7aa3 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Tuple; import org.elasticsearch.plugins.internal.XContentParserDecorator; import org.elasticsearch.xcontent.DeprecationHandler; @@ -626,7 +627,22 @@ public static BytesReference toXContent(ChunkedToXContent toXContent, XContentTy */ public static BytesReference toXContent(ToXContent toXContent, XContentType xContentType, Params params, boolean humanReadable) throws IOException { - try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { + return toXContent(toXContent, xContentType, RestApiVersion.current(), params, humanReadable); + } + + /** + * Returns the bytes that represent the XContent output of the provided {@link ToXContent} object, using the provided + * {@link XContentType}. Wraps the output into a new anonymous object according to the value returned + * by the {@link ToXContent#isFragment()} method returns. + */ + public static BytesReference toXContent( + ToXContent toXContent, + XContentType xContentType, + RestApiVersion restApiVersion, + Params params, + boolean humanReadable + ) throws IOException { + try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent(), restApiVersion)) { builder.humanReadable(humanReadable); if (toXContent.isFragment()) { builder.startObject(); diff --git a/server/src/main/java/org/elasticsearch/features/FeatureData.java b/server/src/main/java/org/elasticsearch/features/FeatureData.java index 991bb4d82be3d..65b95eae27e06 100644 --- a/server/src/main/java/org/elasticsearch/features/FeatureData.java +++ b/server/src/main/java/org/elasticsearch/features/FeatureData.java @@ -9,25 +9,19 @@ package org.elasticsearch.features; -import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.NavigableMap; import java.util.Set; -import java.util.TreeMap; - -import static org.elasticsearch.features.FeatureService.CLUSTER_FEATURES_ADDED_VERSION; /** - * Reads and consolidate features exposed by a list {@link FeatureSpecification}, grouping them into historical features and node - * features for the consumption of {@link FeatureService} + * Reads and consolidate features exposed by a list {@link FeatureSpecification}, + * grouping them together for the consumption of {@link FeatureService} */ public class FeatureData { @@ -40,19 +34,14 @@ public class FeatureData { } } - private final NavigableMap> historicalFeatures; private final Map nodeFeatures; - private FeatureData(NavigableMap> historicalFeatures, Map nodeFeatures) { - this.historicalFeatures = historicalFeatures; + private FeatureData(Map nodeFeatures) { this.nodeFeatures = nodeFeatures; } public static FeatureData createFromSpecifications(List specs) { Map allFeatures = new HashMap<>(); - - // Initialize historicalFeatures with empty version to guarantee there's a floor entry for every version - NavigableMap> historicalFeatures = new TreeMap<>(Map.of(Version.V_EMPTY, Set.of())); Map nodeFeatures = new HashMap<>(); for (FeatureSpecification spec : specs) { Set specFeatures = spec.getFeatures(); @@ -61,39 +50,6 @@ public static FeatureData createFromSpecifications(List new HashSet<>()).add(hfe.getKey().id()); - } - for (NodeFeature f : specFeatures) { FeatureSpecification existing = allFeatures.putIfAbsent(f.id(), spec); if (existing != null && existing.getClass() != spec.getClass()) { @@ -106,24 +62,7 @@ public static FeatureData createFromSpecifications(List> consolidateHistoricalFeatures( - NavigableMap> declaredHistoricalFeatures - ) { - // update each version by adding in all features from previous versions - Set featureAggregator = new HashSet<>(); - for (Map.Entry> versions : declaredHistoricalFeatures.entrySet()) { - featureAggregator.addAll(versions.getValue()); - versions.setValue(Set.copyOf(featureAggregator)); - } - - return Collections.unmodifiableNavigableMap(declaredHistoricalFeatures); - } - - public NavigableMap> getHistoricalFeatures() { - return historicalFeatures; + return new FeatureData(Map.copyOf(nodeFeatures)); } public Map getNodeFeatures() { diff --git a/server/src/main/java/org/elasticsearch/features/FeatureService.java b/server/src/main/java/org/elasticsearch/features/FeatureService.java index 1d911a75a4838..9a0ac7cafc183 100644 --- a/server/src/main/java/org/elasticsearch/features/FeatureService.java +++ b/server/src/main/java/org/elasticsearch/features/FeatureService.java @@ -9,7 +9,6 @@ package org.elasticsearch.features; -import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.logging.LogManager; @@ -17,8 +16,6 @@ import java.util.List; import java.util.Map; -import java.util.NavigableMap; -import java.util.Set; /** * Manages information on the features supported by nodes in the cluster. @@ -34,9 +31,6 @@ public class FeatureService { private static final Logger logger = LogManager.getLogger(FeatureService.class); - public static final Version CLUSTER_FEATURES_ADDED_VERSION = Version.V_8_12_0; - - private final NavigableMap> historicalFeatures; private final Map nodeFeatures; /** @@ -47,13 +41,12 @@ public FeatureService(List specs) { var featureData = FeatureData.createFromSpecifications(specs); nodeFeatures = featureData.getNodeFeatures(); - historicalFeatures = featureData.getHistoricalFeatures(); logger.info("Registered local node features {}", nodeFeatures.keySet().stream().sorted().toList()); } /** - * The non-historical features supported by this node. + * The features supported by this node. * @return Map of {@code feature-id} to its declaring {@code NodeFeature} object. */ public Map getNodeFeatures() { @@ -65,11 +58,6 @@ public Map getNodeFeatures() { */ @SuppressForbidden(reason = "We need basic feature information from cluster state") public boolean clusterHasFeature(ClusterState state, NodeFeature feature) { - if (state.clusterFeatures().clusterHasFeature(feature)) { - return true; - } - - var features = historicalFeatures.floorEntry(state.getNodes().getMinNodeVersion()); - return features != null && features.getValue().contains(feature.id()); + return state.clusterFeatures().clusterHasFeature(feature); } } diff --git a/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java b/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java index 03f0dd89f172e..c37bc4488f109 100644 --- a/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java +++ b/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java @@ -9,9 +9,6 @@ package org.elasticsearch.features; -import org.elasticsearch.Version; - -import java.util.Map; import java.util.Set; /** @@ -49,12 +46,4 @@ default Set getFeatures() { default Set getTestFeatures() { return Set.of(); } - - /** - * Returns information on historical features that should be deemed to be present on all nodes - * on or above the {@link Version} specified. - */ - default Map getHistoricalFeatures() { - return Map.of(); - } } diff --git a/server/src/main/java/org/elasticsearch/health/HealthFeatures.java b/server/src/main/java/org/elasticsearch/health/HealthFeatures.java index 6d106199610d6..091dbc0eae742 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthFeatures.java +++ b/server/src/main/java/org/elasticsearch/health/HealthFeatures.java @@ -9,34 +9,17 @@ package org.elasticsearch.health; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import java.util.Map; import java.util.Set; public class HealthFeatures implements FeatureSpecification { - public static final NodeFeature SUPPORTS_HEALTH = new NodeFeature("health.supports_health"); - public static final NodeFeature SUPPORTS_HEALTH_REPORT_API = new NodeFeature("health.supports_health_report_api"); - public static final NodeFeature SUPPORTS_SHARDS_CAPACITY_INDICATOR = new NodeFeature("health.shards_capacity_indicator"); public static final NodeFeature SUPPORTS_EXTENDED_REPOSITORY_INDICATOR = new NodeFeature("health.extended_repository_indicator"); @Override public Set getFeatures() { return Set.of(SUPPORTS_EXTENDED_REPOSITORY_INDICATOR); } - - @Override - public Map getHistoricalFeatures() { - return Map.of( - SUPPORTS_HEALTH, - Version.V_8_5_0, // health accessible via /_internal/_health - SUPPORTS_HEALTH_REPORT_API, - Version.V_8_7_0, // health accessible via /_health_report - SUPPORTS_SHARDS_CAPACITY_INDICATOR, - Version.V_8_8_0 - ); - } } diff --git a/server/src/main/java/org/elasticsearch/health/metadata/HealthMetadataService.java b/server/src/main/java/org/elasticsearch/health/metadata/HealthMetadataService.java index 44fc65fab534f..0d30e157a3a09 100644 --- a/server/src/main/java/org/elasticsearch/health/metadata/HealthMetadataService.java +++ b/server/src/main/java/org/elasticsearch/health/metadata/HealthMetadataService.java @@ -28,7 +28,6 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.features.FeatureService; import org.elasticsearch.gateway.GatewayService; -import org.elasticsearch.health.HealthFeatures; import java.util.List; import java.util.stream.Stream; @@ -137,7 +136,7 @@ private void updateOnHealthNodeEnabledChange(boolean enabled) { private boolean canPostClusterStateUpdates(ClusterState state) { // Wait until every node in the cluster supports health checks - return isMaster && state.clusterRecovered() && featureService.clusterHasFeature(state, HealthFeatures.SUPPORTS_HEALTH); + return isMaster && state.clusterRecovered(); } private void updateOnClusterStateChange(ClusterChangedEvent event) { diff --git a/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java index e38ce7ac92a05..c975e1d1abd91 100644 --- a/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.Diagnosis; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; import org.elasticsearch.health.HealthIndicatorResult; @@ -91,15 +90,6 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources ClusterState clusterState = clusterService.state(); Map diskHealthInfoMap = healthInfo.diskInfoByNode(); if (diskHealthInfoMap == null || diskHealthInfoMap.isEmpty()) { - if (featureService.clusterHasFeature(clusterState, HealthFeatures.SUPPORTS_HEALTH) == false) { - return createIndicator( - HealthStatus.GREEN, - "No disk usage data available. The cluster currently has mixed versions (an upgrade may be in progress).", - HealthIndicatorDetails.EMPTY, - List.of(), - List.of() - ); - } /* * If there is no disk health info, that either means that a new health node was just elected, or something is seriously * wrong with health data collection on the health node. Either way, we immediately return UNKNOWN. If there are at least diff --git a/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java b/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java index a08de9abb4aed..aab9e972cba73 100644 --- a/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java +++ b/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.util.concurrent.RunOnce; import org.elasticsearch.core.TimeValue; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.metadata.HealthMetadata; import org.elasticsearch.health.node.action.HealthNodeNotDiscoveredException; import org.elasticsearch.health.node.selection.HealthNode; @@ -200,7 +199,6 @@ public void clusterChanged(ClusterChangedEvent event) { } } prerequisitesFulfilled = event.state().clusterRecovered() - && featureService.clusterHasFeature(event.state(), HealthFeatures.SUPPORTS_HEALTH) && HealthMetadata.getFromClusterState(event.state()) != null && currentHealthNode != null && currentMasterNode != null; diff --git a/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java index b02bbd95bb9ae..4dd94cfc046c9 100644 --- a/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.Diagnosis; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; import org.elasticsearch.health.HealthIndicatorResult; @@ -111,15 +110,6 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources var state = clusterService.state(); var healthMetadata = HealthMetadata.getFromClusterState(state); if (healthMetadata == null || healthMetadata.getShardLimitsMetadata() == null) { - if (featureService.clusterHasFeature(state, HealthFeatures.SUPPORTS_SHARDS_CAPACITY_INDICATOR) == false) { - return createIndicator( - HealthStatus.GREEN, - "No shard limits configured yet. The cluster currently has mixed versions (an upgrade may be in progress).", - HealthIndicatorDetails.EMPTY, - List.of(), - List.of() - ); - } return unknownIndicator(); } diff --git a/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java b/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java index 3357936e5f10c..3efad1aee26b0 100644 --- a/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.persistent.PersistentTaskParams; @@ -157,11 +156,8 @@ public PersistentTasksCustomMetadata.Assignment getAssignment( // visible for testing void startTask(ClusterChangedEvent event) { - // Wait until every node in the cluster supports health checks - if (event.localNodeMaster() - && event.state().clusterRecovered() - && HealthNode.findTask(event.state()) == null - && featureService.clusterHasFeature(event.state(), HealthFeatures.SUPPORTS_HEALTH)) { + // Wait until master is stable before starting health task + if (event.localNodeMaster() && event.state().clusterRecovered() && HealthNode.findTask(event.state()) == null) { persistentTasksService.sendStartRequest( TASK_NAME, TASK_NAME, diff --git a/server/src/main/java/org/elasticsearch/http/HttpBody.java b/server/src/main/java/org/elasticsearch/http/HttpBody.java index a10487502ed3c..6571125677fab 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpBody.java +++ b/server/src/main/java/org/elasticsearch/http/HttpBody.java @@ -9,7 +9,6 @@ package org.elasticsearch.http; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.core.Nullable; @@ -21,11 +20,11 @@ public sealed interface HttpBody extends Releasable permits HttpBody.Full, HttpBody.Stream { static Full fromBytesReference(BytesReference bytesRef) { - return new ByteRefHttpBody(bytesRef); + return new ByteRefHttpBody(ReleasableBytesReference.wrap(bytesRef)); } static Full empty() { - return new ByteRefHttpBody(BytesArray.EMPTY); + return new ByteRefHttpBody(ReleasableBytesReference.empty()); } default boolean isFull() { @@ -56,7 +55,7 @@ default Stream asStream() { * Full content represents a complete http body content that can be accessed immediately. */ non-sealed interface Full extends HttpBody { - BytesReference bytes(); + ReleasableBytesReference bytes(); @Override default void close() {} @@ -114,5 +113,5 @@ interface ChunkHandler extends Releasable { default void close() {} } - record ByteRefHttpBody(BytesReference bytes) implements Full {} + record ByteRefHttpBody(ReleasableBytesReference bytes) implements Full {} } diff --git a/server/src/main/java/org/elasticsearch/http/HttpRequest.java b/server/src/main/java/org/elasticsearch/http/HttpRequest.java index ca6e51f2cec08..b4b1bb84433c9 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpRequest.java +++ b/server/src/main/java/org/elasticsearch/http/HttpRequest.java @@ -52,10 +52,4 @@ enum HttpVersion { */ void release(); - /** - * If this instances uses any pooled resources, creates a copy of this instance that does not use any pooled resources and releases - * any resources associated with this instance. If the instance does not use any shared resources, returns itself. - * @return a safe unpooled http request - */ - HttpRequest releaseAndCopy(); } diff --git a/server/src/main/java/org/elasticsearch/http/HttpTracer.java b/server/src/main/java/org/elasticsearch/http/HttpTracer.java index 3d8360e6ee3fa..d6daf11c0539a 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpTracer.java +++ b/server/src/main/java/org/elasticsearch/http/HttpTracer.java @@ -94,7 +94,7 @@ HttpTracer maybeLogRequest(RestRequest restRequest, @Nullable Exception e) { private void logFullContent(RestRequest restRequest) { try (var stream = HttpBodyTracer.getBodyOutputStream(restRequest.getRequestId(), HttpBodyTracer.Type.REQUEST)) { - restRequest.content().writeTo(stream); + restRequest.releasableContent().writeTo(stream); } catch (Exception e2) { assert false : e2; // no real IO here } diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java index 6a553d5dc5440..8c997a9766baa 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java @@ -11,6 +11,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedFunction; @@ -54,7 +55,7 @@ Settings getAdditionalIndexSettings( /** * Infrastructure class that holds services that can be used by {@link IndexSettingProvider} instances. */ - record Parameters(CheckedFunction mapperServiceFactory) { + record Parameters(ClusterService clusterService, CheckedFunction mapperServiceFactory) { } diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 5746bea12a2d8..7a5f469a57fa1 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -134,6 +134,7 @@ private static Version parseUnchecked(String version) { public static final IndexVersion UPGRADE_TO_LUCENE_10_0_0 = def(9_000_00_0, Version.LUCENE_10_0_0); public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT = def(9_001_00_0, Version.LUCENE_10_0_0); public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID = def(9_002_00_0, Version.LUCENE_10_0_0); + public static final IndexVersion DEPRECATE_SOURCE_MODE_MAPPER = def(9_003_00_0, Version.LUCENE_10_0_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java index f80e8a89f5cf2..43ae38fea6018 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java +++ b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java @@ -105,6 +105,9 @@ public class IndexingPressure { private final AtomicLong replicaRejections = new AtomicLong(0); private final AtomicLong primaryDocumentRejections = new AtomicLong(0); + private final AtomicLong lowWaterMarkSplits = new AtomicLong(0); + private final AtomicLong highWaterMarkSplits = new AtomicLong(0); + private final long lowWatermark; private final long lowWatermarkSize; private final long highWatermark; @@ -265,11 +268,20 @@ public Releasable markReplicaOperationStarted(int operations, long bytes, boolea public boolean shouldSplitBulk(long size) { long currentUsage = (currentCombinedCoordinatingAndPrimaryBytes.get() + currentReplicaBytes.get()); - return (currentUsage >= lowWatermark && size >= lowWatermarkSize) || (currentUsage >= highWatermark && size >= highWatermarkSize); + if (currentUsage >= highWatermark && size >= highWatermarkSize) { + highWaterMarkSplits.getAndIncrement(); + logger.trace(() -> Strings.format("Split bulk due to high watermark: current bytes [%d] and size [%d]", currentUsage, size)); + return (true); + } + if (currentUsage >= lowWatermark && size >= lowWatermarkSize) { + lowWaterMarkSplits.getAndIncrement(); + logger.trace(() -> Strings.format("Split bulk due to low watermark: current bytes [%d] and size [%d]", currentUsage, size)); + return (true); + } + return (false); } public IndexingPressureStats stats() { - // TODO: Update stats with new primary/replica/coordinating limits and add throttling stats return new IndexingPressureStats( totalCombinedCoordinatingAndPrimaryBytes.get(), totalCoordinatingBytes.get(), @@ -290,7 +302,9 @@ public IndexingPressureStats stats() { currentPrimaryOps.get(), currentReplicaOps.get(), primaryDocumentRejections.get(), - totalCoordinatingRequests.get() + totalCoordinatingRequests.get(), + lowWaterMarkSplits.get(), + highWaterMarkSplits.get() ); } } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index 2c1175648c219..91c4b780db0bd 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -189,7 +189,11 @@ public Analyzer getAnalyzer(String analyzer) throws IOException { } }); } - return analyzerProvider.get(environment, analyzer).get(); + + return overridePositionIncrementGap( + (NamedAnalyzer) analyzerProvider.get(environment, analyzer).get(), + TextFieldMapper.Defaults.POSITION_INCREMENT_GAP + ); } @Override @@ -720,13 +724,8 @@ private static NamedAnalyzer produceAnalyzer( throw new IllegalArgumentException("analyzer [" + analyzerFactory.name() + "] created null analyzer"); } NamedAnalyzer analyzer; - if (analyzerF instanceof NamedAnalyzer) { - // if we got a named analyzer back, use it... - analyzer = (NamedAnalyzer) analyzerF; - if (overridePositionIncrementGap >= 0 && analyzer.getPositionIncrementGap(analyzer.name()) != overridePositionIncrementGap) { - // unless the positionIncrementGap needs to be overridden - analyzer = new NamedAnalyzer(analyzer, overridePositionIncrementGap); - } + if (analyzerF instanceof NamedAnalyzer namedAnalyzer) { + analyzer = overridePositionIncrementGap(namedAnalyzer, overridePositionIncrementGap); } else { analyzer = new NamedAnalyzer(name, analyzerFactory.scope(), analyzerF, overridePositionIncrementGap); } @@ -734,6 +733,13 @@ private static NamedAnalyzer produceAnalyzer( return analyzer; } + private static NamedAnalyzer overridePositionIncrementGap(NamedAnalyzer analyzer, int overridePositionIncrementGap) { + if (overridePositionIncrementGap >= 0 && analyzer.getPositionIncrementGap(analyzer.name()) != overridePositionIncrementGap) { + analyzer = new NamedAnalyzer(analyzer, overridePositionIncrementGap); + } + return analyzer; + } + private static void processNormalizerFactory( String name, AnalyzerProvider normalizerFactory, diff --git a/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java index 59607fadc0dd9..33a8487bb33a3 100644 --- a/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java +++ b/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java @@ -58,8 +58,6 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; -import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; - /** * This is a cache for {@link BitDocIdSet} based filters and is unbounded by size or time. *

@@ -105,10 +103,7 @@ static boolean shouldLoadRandomAccessFiltersEagerly(IndexSettings settings) { boolean loadFiltersEagerlySetting = settings.getValue(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING); boolean isStateless = DiscoveryNode.isStateless(settings.getNodeSettings()); if (isStateless) { - return loadFiltersEagerlySetting - && (DiscoveryNode.hasRole(settings.getNodeSettings(), DiscoveryNodeRole.SEARCH_ROLE) - || (DiscoveryNode.hasRole(settings.getNodeSettings(), DiscoveryNodeRole.INDEX_ROLE) - && INDEX_FAST_REFRESH_SETTING.get(settings.getSettings()))); + return loadFiltersEagerlySetting && DiscoveryNode.hasRole(settings.getNodeSettings(), DiscoveryNodeRole.SEARCH_ROLE); } else { return loadFiltersEagerlySetting; } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/BQSpaceUtils.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/BQSpaceUtils.java index 68363b5926a6b..f9fad74835683 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/BQSpaceUtils.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/BQSpaceUtils.java @@ -23,56 +23,38 @@ public class BQSpaceUtils { public static final short B_QUERY = 4; - // the first four bits masked - private static final int B_QUERY_MASK = 15; /** * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 + * Transpose the query vector into a byte array allowing for efficient bitwise operations with the + * index bit vectors. The idea here is to organize the query vector bits such that the first bit + * of every dimension is in the first set dimensions bits, or (dimensions/8) bytes. The second, + * third, and fourth bits are in the second, third, and fourth set of dimensions bits, + * respectively. This allows for direct bitwise comparisons with the stored index vectors through + * summing the bitwise results with the relative required bit shifts. + * * @param q the query vector, assumed to be half-byte quantized with values between 0 and 15 - * @param dimensions the number of dimensions in the query vector * @param quantQueryByte the byte array to store the transposed query vector */ - public static void transposeBin(byte[] q, int dimensions, byte[] quantQueryByte) { - // TODO: rewrite this in Panama Vector API - int qOffset = 0; - final byte[] v1 = new byte[4]; - final byte[] v = new byte[32]; - for (int i = 0; i < dimensions; i += 32) { - // for every four bytes we shift left (with remainder across those bytes) - for (int j = 0; j < v.length; j += 4) { - v[j] = (byte) (q[qOffset + j] << B_QUERY | ((q[qOffset + j] >>> B_QUERY) & B_QUERY_MASK)); - v[j + 1] = (byte) (q[qOffset + j + 1] << B_QUERY | ((q[qOffset + j + 1] >>> B_QUERY) & B_QUERY_MASK)); - v[j + 2] = (byte) (q[qOffset + j + 2] << B_QUERY | ((q[qOffset + j + 2] >>> B_QUERY) & B_QUERY_MASK)); - v[j + 3] = (byte) (q[qOffset + j + 3] << B_QUERY | ((q[qOffset + j + 3] >>> B_QUERY) & B_QUERY_MASK)); - } - for (int j = 0; j < B_QUERY; j++) { - moveMaskEpi8Byte(v, v1); - for (int k = 0; k < 4; k++) { - quantQueryByte[(B_QUERY - j - 1) * (dimensions / 8) + i / 8 + k] = v1[k]; - v1[k] = 0; - } - for (int k = 0; k < v.length; k += 4) { - v[k] = (byte) (v[k] + v[k]); - v[k + 1] = (byte) (v[k + 1] + v[k + 1]); - v[k + 2] = (byte) (v[k + 2] + v[k + 2]); - v[k + 3] = (byte) (v[k + 3] + v[k + 3]); - } - } - qOffset += 32; - } - } - - private static void moveMaskEpi8Byte(byte[] v, byte[] v1b) { - int m = 0; - for (int k = 0; k < v.length; k++) { - if ((v[k] & 0b10000000) == 0b10000000) { - v1b[m] |= 0b00000001; - } - if (k % 8 == 7) { - m++; - } else { - v1b[m] <<= 1; + public static void transposeHalfByte(byte[] q, byte[] quantQueryByte) { + for (int i = 0; i < q.length;) { + assert q[i] >= 0 && q[i] <= 15; + int lowerByte = 0; + int lowerMiddleByte = 0; + int upperMiddleByte = 0; + int upperByte = 0; + for (int j = 7; j >= 0 && i < q.length; j--) { + lowerByte |= (q[i] & 1) << j; + lowerMiddleByte |= ((q[i] >> 1) & 1) << j; + upperMiddleByte |= ((q[i] >> 2) & 1) << j; + upperByte |= ((q[i] >> 3) & 1) << j; + i++; } + int index = ((i + 7) / 8) - 1; + quantQueryByte[index] = (byte) lowerByte; + quantQueryByte[index + quantQueryByte.length / 4] = (byte) lowerMiddleByte; + quantQueryByte[index + quantQueryByte.length / 2] = (byte) upperMiddleByte; + quantQueryByte[index + 3 * quantQueryByte.length / 4] = (byte) upperByte; } } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/BinaryQuantizer.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/BinaryQuantizer.java index 192fb9092ac3a..aa72904fe1341 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/BinaryQuantizer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/BinaryQuantizer.java @@ -223,9 +223,7 @@ public QueryAndIndexResults quantizeQueryAndIndex(float[] vector, byte[] indexDe // q¯ = Δ · q¯𝑢 + 𝑣𝑙 · 1𝐷 // q¯ is an approximation of q′ (scalar quantized approximation) - // FIXME: vectors need to be padded but that's expensive; update transponseBin to deal - byteQuery = BQVectorUtils.pad(byteQuery, discretizedDimensions); - BQSpaceUtils.transposeBin(byteQuery, discretizedDimensions, queryDestination); + BQSpaceUtils.transposeHalfByte(byteQuery, queryDestination); QueryFactors factors = new QueryFactors(quantResult.quantizedSum, distToC, lower, width, normVmC, vDotC); final float[] indexCorrections; if (similarityFunction == EUCLIDEAN) { @@ -366,9 +364,7 @@ public QueryFactors quantizeForQuery(float[] vector, byte[] destination, float[] // q¯ = Δ · q¯𝑢 + 𝑣𝑙 · 1𝐷 // q¯ is an approximation of q′ (scalar quantized approximation) - // FIXME: vectors need to be padded but that's expensive; update transponseBin to deal - byteQuery = BQVectorUtils.pad(byteQuery, discretizedDimensions); - BQSpaceUtils.transposeBin(byteQuery, discretizedDimensions, destination); + BQSpaceUtils.transposeHalfByte(byteQuery, destination); QueryFactors factors; if (similarityFunction != EUCLIDEAN) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index 5743baeec536d..333c37381c587 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -57,6 +57,10 @@ public Set getFeatures() { ); } + public static final NodeFeature CONSTANT_KEYWORD_SYNTHETIC_SOURCE_WRITE_FIX = new NodeFeature( + "mapper.constant_keyword.synthetic_source_write_fix" + ); + @Override public Set getTestFeatures() { return Set.of( @@ -66,7 +70,8 @@ public Set getTestFeatures() { SourceFieldMapper.SOURCE_MODE_FROM_INDEX_SETTING, IgnoredSourceFieldMapper.IGNORED_SOURCE_AS_TOP_LEVEL_METADATA_ARRAY_FIELD, IgnoredSourceFieldMapper.ALWAYS_STORE_OBJECT_ARRAYS_IN_NESTED_OBJECTS, - MapperService.LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT + MapperService.LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT, + CONSTANT_KEYWORD_SYNTHETIC_SOURCE_WRITE_FIX ); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java index 9a2c9517dfd05..31aa787c3f758 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java @@ -10,16 +10,13 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Iterator; import java.util.Map; -import java.util.Set; import java.util.function.Function; /** @@ -135,8 +132,6 @@ public final MetadataFieldMapper build(MapperBuilderContext context) { return build(); } - private static final Set UNSUPPORTED_PARAMETERS_8_6_0 = Set.of("type", "fields", "copy_to", "boost"); - public final void parseMetadataField(String name, MappingParserContext parserContext, Map fieldNode) { final Parameter[] params = getParameters(); Map> paramsMap = Maps.newHashMapWithExpectedSize(params.length); @@ -149,20 +144,6 @@ public final void parseMetadataField(String name, MappingParserContext parserCon final Object propNode = entry.getValue(); Parameter parameter = paramsMap.get(propName); if (parameter == null) { - if (UNSUPPORTED_PARAMETERS_8_6_0.contains(propName)) { - if (parserContext.indexVersionCreated().onOrAfter(IndexVersions.V_8_6_0)) { - // silently ignore type, and a few other parameters: sadly we've been doing this for a long time - deprecationLogger.warn( - DeprecationCategory.API, - propName, - "Parameter [{}] has no effect on metadata field [{}] and will be removed in future", - propName, - name - ); - } - iterator.remove(); - continue; - } throw new MapperParsingException("unknown parameter [" + propName + "] on metadata field [" + name + "]"); } parameter.parse(name, parserContext, propNode); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index dd25cd6eb80a3..e5b12f748543f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -38,6 +39,7 @@ import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.Map; public class SourceFieldMapper extends MetadataFieldMapper { public static final NodeFeature SYNTHETIC_SOURCE_FALLBACK = new NodeFeature("mapper.source.synthetic_source_fallback"); @@ -68,6 +70,9 @@ public class SourceFieldMapper extends MetadataFieldMapper { return indexMode.defaultSourceMode().name(); }, "index.mapping.source.mode", value -> {}, Setting.Property.Final, Setting.Property.IndexScope); + public static final String DEPRECATION_WARNING = "Configuring source mode in mappings is deprecated and will be removed " + + "in future versions. Use [index.mapping.source.mode] index setting instead."; + /** The source mode */ public enum Mode { DISABLED, @@ -79,28 +84,32 @@ public enum Mode { null, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY + Strings.EMPTY_ARRAY, + false ); private static final SourceFieldMapper STORED = new SourceFieldMapper( Mode.STORED, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY + Strings.EMPTY_ARRAY, + false ); private static final SourceFieldMapper SYNTHETIC = new SourceFieldMapper( Mode.SYNTHETIC, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY + Strings.EMPTY_ARRAY, + false ); private static final SourceFieldMapper DISABLED = new SourceFieldMapper( Mode.DISABLED, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY + Strings.EMPTY_ARRAY, + false ); public static class Defaults { @@ -134,16 +143,7 @@ public static class Builder extends MetadataFieldMapper.Builder { * The default mode for TimeSeries is left empty on purpose, so that mapping printings include the synthetic * source mode. */ - private final Parameter mode = new Parameter<>( - "mode", - true, - () -> null, - (n, c, o) -> Mode.valueOf(o.toString().toUpperCase(Locale.ROOT)), - m -> toType(m).enabled.explicit() ? null : toType(m).mode, - (b, n, v) -> b.field(n, v.toString().toLowerCase(Locale.ROOT)), - v -> v.toString().toLowerCase(Locale.ROOT) - ).setMergeValidator((previous, current, conflicts) -> (previous == current) || current != Mode.STORED) - .setSerializerCheck((includeDefaults, isConfigured, value) -> value != null); // don't emit if `enabled` is configured + private final Parameter mode; private final Parameter> includes = Parameter.stringArrayParam( "includes", false, @@ -158,15 +158,28 @@ public static class Builder extends MetadataFieldMapper.Builder { private final Settings settings; private final IndexMode indexMode; + private boolean serializeMode; private final boolean supportsNonDefaultParameterValues; - public Builder(IndexMode indexMode, final Settings settings, boolean supportsCheckForNonDefaultParams) { + public Builder(IndexMode indexMode, final Settings settings, boolean supportsCheckForNonDefaultParams, boolean serializeMode) { super(Defaults.NAME); this.settings = settings; this.indexMode = indexMode; this.supportsNonDefaultParameterValues = supportsCheckForNonDefaultParams == false || settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); + this.serializeMode = serializeMode; + this.mode = new Parameter<>( + "mode", + true, + () -> null, + (n, c, o) -> Mode.valueOf(o.toString().toUpperCase(Locale.ROOT)), + m -> toType(m).enabled.explicit() ? null : toType(m).mode, + (b, n, v) -> b.field(n, v.toString().toLowerCase(Locale.ROOT)), + v -> v.toString().toLowerCase(Locale.ROOT) + ).setMergeValidator((previous, current, conflicts) -> (previous == current) || current != Mode.STORED) + // don't emit if `enabled` is configured + .setSerializerCheck((includeDefaults, isConfigured, value) -> serializeMode && value != null); } public Builder setSynthetic() { @@ -219,21 +232,22 @@ public SourceFieldMapper build() { if (sourceMode == Mode.SYNTHETIC && (includes.getValue().isEmpty() == false || excludes.getValue().isEmpty() == false)) { throw new IllegalArgumentException("filtering the stored _source is incompatible with synthetic source"); } - - SourceFieldMapper sourceFieldMapper; - if (isDefault()) { + if (mode.isConfigured()) { + serializeMode = true; + } + final SourceFieldMapper sourceFieldMapper; + if (isDefault() && sourceMode == null) { // Needed for bwc so that "mode" is not serialized in case of a standard index with stored source. - if (sourceMode == null) { - sourceFieldMapper = DEFAULT; - } else { - sourceFieldMapper = resolveStaticInstance(sourceMode); - } + sourceFieldMapper = DEFAULT; + } else if (isDefault() && serializeMode == false && sourceMode != null) { + sourceFieldMapper = resolveStaticInstance(sourceMode); } else { sourceFieldMapper = new SourceFieldMapper( sourceMode, enabled.get(), includes.getValue().toArray(Strings.EMPTY_ARRAY), - excludes.getValue().toArray(Strings.EMPTY_ARRAY) + excludes.getValue().toArray(Strings.EMPTY_ARRAY), + serializeMode ); } if (indexMode != null) { @@ -283,15 +297,29 @@ private static SourceFieldMapper resolveStaticInstance(final Mode sourceMode) { if (indexMode == IndexMode.STANDARD && settingSourceMode == Mode.STORED) { return DEFAULT; } - - return resolveStaticInstance(settingSourceMode); + if (c.indexVersionCreated().onOrAfter(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER)) { + return resolveStaticInstance(settingSourceMode); + } else { + return new SourceFieldMapper(settingSourceMode, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, true); + } }, c -> new Builder( c.getIndexSettings().getMode(), c.getSettings(), - c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK) + c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), + c.indexVersionCreated().before(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER) ) - ); + ) { + @Override + public MetadataFieldMapper.Builder parse(String name, Map node, MappingParserContext parserContext) + throws MapperParsingException { + assert name.equals(SourceFieldMapper.NAME) : name; + if (parserContext.indexVersionCreated().after(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER) && node.containsKey("mode")) { + deprecationLogger.critical(DeprecationCategory.MAPPINGS, "mapping_source_mode", SourceFieldMapper.DEPRECATION_WARNING); + } + return super.parse(name, node, parserContext); + } + }; static final class SourceFieldType extends MappedFieldType { private final boolean enabled; @@ -330,8 +358,9 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { } } - // nullable for bwc reasons + // nullable for bwc reasons - TODO: fold this into serializeMode private final @Nullable Mode mode; + private final boolean serializeMode; private final Explicit enabled; /** indicates whether the source will always exist and be complete, for use by features like the update API */ @@ -341,7 +370,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { private final String[] excludes; private final SourceFilter sourceFilter; - private SourceFieldMapper(Mode mode, Explicit enabled, String[] includes, String[] excludes) { + private SourceFieldMapper(Mode mode, Explicit enabled, String[] includes, String[] excludes, boolean serializeMode) { super(new SourceFieldType((enabled.explicit() && enabled.value()) || (enabled.explicit() == false && mode != Mode.DISABLED))); this.mode = mode; this.enabled = enabled; @@ -349,6 +378,7 @@ private SourceFieldMapper(Mode mode, Explicit enabled, String[] include this.includes = includes; this.excludes = excludes; this.complete = stored() && sourceFilter == null; + this.serializeMode = serializeMode; } private static SourceFilter buildSourceFilter(String[] includes, String[] excludes) { @@ -419,7 +449,7 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(null, Settings.EMPTY, false).init(this); + return new Builder(null, Settings.EMPTY, false, serializeMode).init(this); } /** diff --git a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index 626875c75a5fe..83bca7d27aeeb 100644 --- a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -112,6 +112,13 @@ public QueryBuilder query() { return query; } + /** + * Returns path to the searched nested object. + */ + public String path() { + return path; + } + /** * Returns inner hit definition in the scope of this query and reusing the defined type and query. */ diff --git a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java index f1081d06d649d..9f6a2be8cdbc7 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java @@ -45,10 +45,6 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i public static final ParseField LTE_FIELD = new ParseField("lte"); public static final ParseField GTE_FIELD = new ParseField("gte"); - public static final ParseField FROM_FIELD = new ParseField("from").withAllDeprecated(); - public static final ParseField TO_FIELD = new ParseField("to").withAllDeprecated(); - private static final ParseField INCLUDE_LOWER_FIELD = new ParseField("include_lower").withAllDeprecated(); - private static final ParseField INCLUDE_UPPER_FIELD = new ParseField("include_upper").withAllDeprecated(); public static final ParseField GT_FIELD = new ParseField("gt"); public static final ParseField LT_FIELD = new ParseField("lt"); private static final ParseField TIME_ZONE_FIELD = new ParseField("time_zone"); @@ -367,15 +363,7 @@ public static RangeQueryBuilder fromXContent(XContentParser parser) throws IOExc if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else { - if (FROM_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - from = maybeConvertToBytesRef(parser.objectBytes()); - } else if (TO_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - to = maybeConvertToBytesRef(parser.objectBytes()); - } else if (INCLUDE_LOWER_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - includeLower = parser.booleanValue(); - } else if (INCLUDE_UPPER_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - includeUpper = parser.booleanValue(); - } else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { boost = parser.floatValue(); } else if (GT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { from = maybeConvertToBytesRef(parser.objectBytes()); diff --git a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java index b5197274dd519..0a56db56b2c95 100644 --- a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java +++ b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java @@ -36,6 +36,12 @@ public class IndexingPressureStats implements Writeable, ToXContentFragment { private final long primaryDocumentRejections; private final long memoryLimit; + /* Count number of splits due to SPLIT_BULK_LOW_WATERMARK and SPLIT_BULK_HIGH_WATERMARK + These 2 stats are not serialized via X content yet. + */ + private final long lowWaterMarkSplits; + private final long highWaterMarkSplits; + // These fields will be used for additional back-pressure and metrics in the future private final long totalCoordinatingOps; private final long totalCoordinatingRequests; @@ -85,6 +91,14 @@ public IndexingPressureStats(StreamInput in) throws IOException { } else { totalCoordinatingRequests = -1L; } + + if (in.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_THROTTLING_STATS)) { + lowWaterMarkSplits = in.readVLong(); + highWaterMarkSplits = in.readVLong(); + } else { + lowWaterMarkSplits = -1L; + highWaterMarkSplits = -1L; + } } public IndexingPressureStats( @@ -107,7 +121,9 @@ public IndexingPressureStats( long currentPrimaryOps, long currentReplicaOps, long primaryDocumentRejections, - long totalCoordinatingRequests + long totalCoordinatingRequests, + long lowWaterMarkSplits, + long highWaterMarkSplits ) { this.totalCombinedCoordinatingAndPrimaryBytes = totalCombinedCoordinatingAndPrimaryBytes; this.totalCoordinatingBytes = totalCoordinatingBytes; @@ -131,6 +147,9 @@ public IndexingPressureStats( this.primaryDocumentRejections = primaryDocumentRejections; this.totalCoordinatingRequests = totalCoordinatingRequests; + + this.lowWaterMarkSplits = lowWaterMarkSplits; + this.highWaterMarkSplits = highWaterMarkSplits; } @Override @@ -160,6 +179,11 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeVLong(totalCoordinatingRequests); } + + if (out.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_THROTTLING_STATS)) { + out.writeVLong(lowWaterMarkSplits); + out.writeVLong(highWaterMarkSplits); + } } public long getTotalCombinedCoordinatingAndPrimaryBytes() { @@ -242,6 +266,14 @@ public long getTotalCoordinatingRequests() { return totalCoordinatingRequests; } + public long getHighWaterMarkSplits() { + return highWaterMarkSplits; + } + + public long getLowWaterMarkSplits() { + return lowWaterMarkSplits; + } + private static final String COMBINED = "combined_coordinating_and_primary"; private static final String COMBINED_IN_BYTES = "combined_coordinating_and_primary_in_bytes"; private static final String COORDINATING = "coordinating"; diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 3ac61bbca1a21..27d832241bfed 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.ResolvedIndices; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.TransportAutoPutMappingAction; -import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction; import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; @@ -79,7 +78,6 @@ import org.elasticsearch.env.ShardLock; import org.elasticsearch.env.ShardLockObtainFailedException; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.gateway.MetaStateService; import org.elasticsearch.gateway.MetadataStateFormat; import org.elasticsearch.index.CloseUtils; @@ -211,8 +209,6 @@ public class IndicesService extends AbstractLifecycleComponent Setting.Property.NodeScope ); - static final NodeFeature SUPPORTS_AUTO_PUT = new NodeFeature("indices.auto_put_supported"); - /** * The node's settings. */ @@ -910,9 +906,7 @@ public void createShard( .setConcreteIndex(shardRouting.index()) .source(mapping.source().string(), XContentType.JSON); client.execute( - featureService.clusterHasFeature(clusterService.state(), SUPPORTS_AUTO_PUT) - ? TransportAutoPutMappingAction.TYPE - : TransportPutMappingAction.TYPE, + TransportAutoPutMappingAction.TYPE, putMappingRequestAcknowledgedRequest.ackTimeout(TimeValue.MAX_VALUE).masterNodeTimeout(TimeValue.MAX_VALUE), new RefCountAwareThreadedActionListener<>(threadPool.generic(), listener.map(ignored -> null)) ); diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java index 856b30d1c19e8..42cda4da1a9e6 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java @@ -110,7 +110,7 @@ public class SystemIndices { public static final String SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY = "_system_index_access_allowed"; public static final String EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY = "_external_system_index_access_origin"; - public static final String UPGRADED_INDEX_SUFFIX = "-reindexed-for-8"; + public static final String UPGRADED_INDEX_SUFFIX = "-reindexed-for-9"; private static final Automaton EMPTY = Automata.makeEmpty(); diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index ce61f197b4831..1494d2a46f9d0 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -519,7 +519,7 @@ public static boolean isNoOpPipelineUpdate(ClusterState state, PutPipelineReques && currentIngestMetadata.getPipelines().containsKey(request.getId())) { var pipelineConfig = XContentHelper.convertToMap(request.getSource(), false, request.getXContentType()).v2(); var currentPipeline = currentIngestMetadata.getPipelines().get(request.getId()); - if (currentPipeline.getConfigAsMap().equals(pipelineConfig)) { + if (currentPipeline.getConfig().equals(pipelineConfig)) { return true; } } @@ -1292,7 +1292,7 @@ synchronized void innerUpdatePipelines(IngestMetadata newIngestMetadata) { try { Pipeline newPipeline = Pipeline.create( newConfiguration.getId(), - newConfiguration.getConfigAsMap(), + newConfiguration.getConfig(false), processorFactories, scriptService ); @@ -1416,7 +1416,7 @@ public

Collection getPipelineWithProcessorType(Cla public synchronized void reloadPipeline(String id) throws Exception { PipelineHolder holder = pipelines.get(id); - Pipeline updatedPipeline = Pipeline.create(id, holder.configuration.getConfigAsMap(), processorFactories, scriptService); + Pipeline updatedPipeline = Pipeline.create(id, holder.configuration.getConfig(false), processorFactories, scriptService); Map updatedPipelines = new HashMap<>(this.pipelines); updatedPipelines.put(id, new PipelineHolder(holder.configuration, updatedPipeline)); this.pipelines = Map.copyOf(updatedPipelines); diff --git a/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java b/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java index 9067cdb2040fd..64142caf4189d 100644 --- a/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java +++ b/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java @@ -9,12 +9,14 @@ package org.elasticsearch.ingest; +import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.SimpleDiffable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.xcontent.ContextParser; import org.elasticsearch.xcontent.ObjectParser; @@ -22,26 +24,32 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; -import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.Objects; /** - * Encapsulates a pipeline's id and configuration as a blob + * Encapsulates a pipeline's id and configuration as a loosely typed map -- see {@link Pipeline} for the + * parsed and processed object(s) that a pipeline configuration will become. This class is used for things + * like keeping track of pipelines in the cluster state (where a pipeline is 'just some json') whereas the + * {@link Pipeline} class is used in the actual processing of ingest documents through pipelines in the + * {@link IngestService}. */ public final class PipelineConfiguration implements SimpleDiffable, ToXContentObject { private static final ObjectParser PARSER = new ObjectParser<>("pipeline_config", true, Builder::new); static { PARSER.declareString(Builder::setId, new ParseField("id")); - PARSER.declareField((parser, builder, aVoid) -> { - XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent()); - contentBuilder.generator().copyCurrentStructure(parser); - builder.setConfig(BytesReference.bytes(contentBuilder), contentBuilder.contentType()); - }, new ParseField("config"), ObjectParser.ValueType.OBJECT); - + PARSER.declareField( + (parser, builder, aVoid) -> builder.setConfig(parser.map()), + new ParseField("config"), + ObjectParser.ValueType.OBJECT + ); } public static ContextParser getParser() { @@ -51,56 +59,94 @@ public static ContextParser getParser() { private static class Builder { private String id; - private BytesReference config; - private XContentType xContentType; + private Map config; void setId(String id) { this.id = id; } - void setConfig(BytesReference config, XContentType xContentType) { + void setConfig(Map config) { this.config = config; - this.xContentType = xContentType; } PipelineConfiguration build() { - return new PipelineConfiguration(id, config, xContentType); + return new PipelineConfiguration(id, config); } } private final String id; - // Store config as bytes reference, because the config is only used when the pipeline store reads the cluster state - // and the way the map of maps config is read requires a deep copy (it removes instead of gets entries to check for unused options) - // also the get pipeline api just directly returns this to the caller - private final BytesReference config; - private final XContentType xContentType; + private final Map config; - public PipelineConfiguration(String id, BytesReference config, XContentType xContentType) { + public PipelineConfiguration(String id, Map config) { this.id = Objects.requireNonNull(id); - this.config = Objects.requireNonNull(config); - this.xContentType = Objects.requireNonNull(xContentType); + this.config = deepCopy(config, true); // defensive deep copy + } + + /** + * A convenience constructor that parses some bytes as a map representing a pipeline's config and then delegates to the + * conventional {@link #PipelineConfiguration(String, Map)} constructor. + * + * @param id the id of the pipeline + * @param config a parse-able bytes reference that will return a pipeline configuration + * @param xContentType the content-type to use while parsing the pipeline configuration + */ + public PipelineConfiguration(String id, BytesReference config, XContentType xContentType) { + this(id, XContentHelper.convertToMap(config, true, xContentType).v2()); } public String getId() { return id; } - public Map getConfigAsMap() { - return XContentHelper.convertToMap(config, true, xContentType).v2(); + /** + * @return a reference to the unmodifiable configuration map for this pipeline + */ + public Map getConfig() { + return getConfig(true); } - // pkg-private for tests - XContentType getXContentType() { - return xContentType; + /** + * @param unmodifiable whether the returned map should be unmodifiable or not + * @return a reference to the unmodifiable config map (if unmodifiable is true) or + * a reference to a freshly-created mutable deep copy of the config map (if unmodifiable is false) + */ + public Map getConfig(boolean unmodifiable) { + if (unmodifiable) { + return config; // already unmodifiable + } else { + return deepCopy(config, false); + } + } + + @SuppressWarnings("unchecked") + private static T deepCopy(final T value, final boolean unmodifiable) { + return (T) innerDeepCopy(value, unmodifiable); } - // pkg-private for tests - BytesReference getConfig() { - return config; + private static Object innerDeepCopy(final Object value, final boolean unmodifiable) { + if (value instanceof Map mapValue) { + final Map copy = Maps.newLinkedHashMapWithExpectedSize(mapValue.size()); // n.b. maintain ordering + for (Map.Entry entry : mapValue.entrySet()) { + copy.put(innerDeepCopy(entry.getKey(), unmodifiable), innerDeepCopy(entry.getValue(), unmodifiable)); + } + return unmodifiable ? Collections.unmodifiableMap(copy) : copy; + } else if (value instanceof List listValue) { + final List copy = new ArrayList<>(listValue.size()); + for (Object itemValue : listValue) { + copy.add(innerDeepCopy(itemValue, unmodifiable)); + } + return unmodifiable ? Collections.unmodifiableList(copy) : copy; + } else { + // if this list of expected value types ends up not being exhaustive, then we want to learn about that + // at development time, but it's probably better to err on the side of passing through the value at runtime + assert (value == null || value instanceof String || value instanceof Number || value instanceof Boolean) + : "unexpected value type [" + value.getClass() + "]"; + return value; + } } public Integer getVersion() { - Object o = getConfigAsMap().get("version"); + Object o = config.get("version"); if (o == null) { return null; } else if (o instanceof Number number) { @@ -114,13 +160,22 @@ public Integer getVersion() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("id", id); - builder.field("config", getConfigAsMap()); + builder.field("config", config); builder.endObject(); return builder; } public static PipelineConfiguration readFrom(StreamInput in) throws IOException { - return new PipelineConfiguration(in.readString(), in.readBytesReference(), in.readEnum(XContentType.class)); + final String id = in.readString(); + final Map config; + if (in.getTransportVersion().onOrAfter(TransportVersions.INGEST_PIPELINE_CONFIGURATION_AS_MAP)) { + config = in.readGenericMap(); + } else { + final BytesReference bytes = in.readSlicedBytesReference(); + final XContentType type = in.readEnum(XContentType.class); + config = XContentHelper.convertToMap(bytes, true, type).v2(); + } + return new PipelineConfiguration(id, config); } public static Diff readDiffFrom(StreamInput in) throws IOException { @@ -135,8 +190,14 @@ public String toString() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(id); - out.writeBytesReference(config); - XContentHelper.writeTo(out, xContentType); + if (out.getTransportVersion().onOrAfter(TransportVersions.INGEST_PIPELINE_CONFIGURATION_AS_MAP)) { + out.writeGenericMap(config); + } else { + XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent).prettyPrint(); + builder.map(config); + out.writeBytesReference(BytesReference.bytes(builder)); + XContentHelper.writeTo(out, XContentType.JSON); + } } @Override @@ -147,14 +208,14 @@ public boolean equals(Object o) { PipelineConfiguration that = (PipelineConfiguration) o; if (id.equals(that.id) == false) return false; - return getConfigAsMap().equals(that.getConfigAsMap()); + return config.equals(that.config); } @Override public int hashCode() { int result = id.hashCode(); - result = 31 * result + getConfigAsMap().hashCode(); + result = 31 * result + config.hashCode(); return result; } @@ -164,7 +225,7 @@ public int hashCode() { *

The given upgrader is applied to the config map for any processor of the given type. */ PipelineConfiguration maybeUpgradeProcessors(String type, IngestMetadata.ProcessorConfigUpgrader upgrader) { - Map mutableConfigMap = getConfigAsMap(); + Map mutableConfigMap = getConfig(false); boolean changed = false; // This should be a List of Maps, where the keys are processor types and the values are config maps. // But we'll skip upgrading rather than fail if not. @@ -180,11 +241,7 @@ PipelineConfiguration maybeUpgradeProcessors(String type, IngestMetadata.Process } } if (changed) { - try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { - return new PipelineConfiguration(id, BytesReference.bytes(builder.map(mutableConfigMap)), xContentType); - } catch (IOException e) { - throw new UncheckedIOException(e); - } + return new PipelineConfiguration(id, mutableConfigMap); } else { return this; } diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java index acc26a42e4745..94395193622e0 100644 --- a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java @@ -643,6 +643,34 @@ private void registerAsyncMetrics(MeterRegistry registry) { ) ); + metrics.add( + registry.registerLongAsyncCounter( + "es.indexing.coordinating.low_watermark_splits.total", + "Total number of times bulk requests are split due to SPLIT_BULK_LOW_WATERMARK", + "operations", + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(NodeStats::getIndexingPressureStats) + .map(IndexingPressureStats::getLowWaterMarkSplits) + .orElse(0L) + ) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.indexing.coordinating.high_watermark_splits.total", + "Total number of times bulk requests are split due to SPLIT_BULK_HIGH_WATERMARK", + "operations", + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(NodeStats::getIndexingPressureStats) + .map(IndexingPressureStats::getHighWaterMarkSplits) + .orElse(0L) + ) + ) + ); + metrics.add( registry.registerLongAsyncCounter( "es.flush.total.time", diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index ec4a534fc883b..80c9aafaa84b4 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -69,6 +69,7 @@ import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.MetadataUpgrader; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.readiness.ReadinessService; import org.elasticsearch.repositories.RepositoriesService; @@ -180,8 +181,8 @@ public class Node implements Closeable { * * @param environment the initial environment for this node, which will be added to by plugins */ - public Node(Environment environment) { - this(NodeConstruction.prepareConstruction(environment, new NodeServiceProvider(), true)); + public Node(Environment environment, PluginsLoader pluginsLoader) { + this(NodeConstruction.prepareConstruction(environment, pluginsLoader, new NodeServiceProvider(), true)); } /** diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index e8b9d18a1dd08..2488ac894a612 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -42,7 +42,6 @@ import org.elasticsearch.cluster.coordination.Coordinator; import org.elasticsearch.cluster.coordination.MasterHistoryService; import org.elasticsearch.cluster.coordination.StableMasterHealthIndicatorService; -import org.elasticsearch.cluster.features.NodeFeaturesFixupListener; import org.elasticsearch.cluster.metadata.DataStreamGlobalRetentionSettings; import org.elasticsearch.cluster.metadata.IndexMetadataVerifier; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -62,7 +61,6 @@ import org.elasticsearch.cluster.routing.allocation.DiskThresholdMonitor; import org.elasticsearch.cluster.routing.allocation.WriteLoadForecaster; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.cluster.service.TransportVersionsFixupListener; import org.elasticsearch.cluster.version.CompatibilityVersions; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.component.LifecycleComponent; @@ -166,6 +164,7 @@ import org.elasticsearch.plugins.NetworkPlugin; import org.elasticsearch.plugins.PersistentTaskPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.RecoveryPlannerPlugin; import org.elasticsearch.plugins.ReloadablePlugin; @@ -188,6 +187,7 @@ import org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider; import org.elasticsearch.reservedstate.action.ReservedClusterSettingsAction; import org.elasticsearch.reservedstate.service.FileSettingsService; +import org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService; import org.elasticsearch.rest.action.search.SearchResponseMetrics; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; @@ -262,6 +262,7 @@ class NodeConstruction { */ static NodeConstruction prepareConstruction( Environment initialEnvironment, + PluginsLoader pluginsLoader, NodeServiceProvider serviceProvider, boolean forbidPrivateIndexSettings ) { @@ -269,7 +270,7 @@ static NodeConstruction prepareConstruction( try { NodeConstruction constructor = new NodeConstruction(closeables); - Settings settings = constructor.createEnvironment(initialEnvironment, serviceProvider); + Settings settings = constructor.createEnvironment(initialEnvironment, serviceProvider, pluginsLoader); constructor.loadLoggingDataProviders(); TelemetryProvider telemetryProvider = constructor.createTelemetryProvider(settings); ThreadPool threadPool = constructor.createThreadPool(settings, telemetryProvider.getMeterRegistry()); @@ -402,7 +403,7 @@ private static Optional getSinglePlugin(Stream plugins, Class plugi return Optional.of(plugin); } - private Settings createEnvironment(Environment initialEnvironment, NodeServiceProvider serviceProvider) { + private Settings createEnvironment(Environment initialEnvironment, NodeServiceProvider serviceProvider, PluginsLoader pluginsLoader) { // Pass the node settings to the DeprecationLogger class so that it can have the deprecation.skip_deprecated_settings setting: Settings envSettings = initialEnvironment.settings(); DeprecationLogger.initialize(envSettings); @@ -475,7 +476,7 @@ private Settings createEnvironment(Environment initialEnvironment, NodeServicePr (e, apmConfig) -> logger.error("failed to delete temporary APM config file [{}], reason: [{}]", apmConfig, e.getMessage()) ); - pluginsService = serviceProvider.newPluginService(initialEnvironment, envSettings); + pluginsService = serviceProvider.newPluginService(initialEnvironment, pluginsLoader); modules.bindToInstance(PluginsService.class, pluginsService); Settings settings = Node.mergePluginSettings(pluginsService.pluginMap(), envSettings); @@ -788,10 +789,6 @@ private void construct( if (DiscoveryNode.isMasterNode(settings)) { clusterService.addListener(new SystemIndexMappingUpdateService(systemIndices, client)); - clusterService.addListener( - new TransportVersionsFixupListener(clusterService, client.admin().cluster(), featureService, threadPool) - ); - clusterService.addListener(new NodeFeaturesFixupListener(clusterService, client.admin().cluster(), threadPool)); } SourceFieldMetrics sourceFieldMetrics = new SourceFieldMetrics( @@ -826,7 +823,7 @@ private void construct( .searchOperationListeners(searchOperationListeners) .build(); - final var parameters = new IndexSettingProvider.Parameters(indicesService::createIndexMapperServiceForValidation); + final var parameters = new IndexSettingProvider.Parameters(clusterService, indicesService::createIndexMapperServiceForValidation); IndexSettingProviders indexSettingProviders = new IndexSettingProviders( Sets.union( builtinIndexSettingProviders(), @@ -1036,10 +1033,12 @@ private void construct( actionModule.getReservedClusterStateService().installStateHandler(new ReservedRepositoryAction(repositoriesService)); actionModule.getReservedClusterStateService().installStateHandler(new ReservedPipelineAction()); + FileSettingsHealthIndicatorService fileSettingsHealthIndicatorService = new FileSettingsHealthIndicatorService(); FileSettingsService fileSettingsService = new FileSettingsService( clusterService, actionModule.getReservedClusterStateService(), - environment + environment, + fileSettingsHealthIndicatorService ); RestoreService restoreService = new RestoreService( @@ -1133,7 +1132,8 @@ private void construct( featureService, threadPool, telemetryProvider, - repositoriesService + repositoriesService, + fileSettingsHealthIndicatorService ) ); @@ -1305,7 +1305,8 @@ private Module loadDiagnosticServices( FeatureService featureService, ThreadPool threadPool, TelemetryProvider telemetryProvider, - RepositoriesService repositoriesService + RepositoriesService repositoriesService, + FileSettingsHealthIndicatorService fileSettingsHealthIndicatorService ) { MasterHistoryService masterHistoryService = new MasterHistoryService(transportService, threadPool, clusterService); @@ -1320,7 +1321,8 @@ private Module loadDiagnosticServices( new StableMasterHealthIndicatorService(coordinationDiagnosticsService, clusterService), new RepositoryIntegrityHealthIndicatorService(clusterService, featureService), new DiskHealthIndicatorService(clusterService, featureService), - new ShardsCapacityHealthIndicatorService(clusterService, featureService) + new ShardsCapacityHealthIndicatorService(clusterService, featureService), + fileSettingsHealthIndicatorService ); var pluginHealthIndicatorServices = pluginsService.filterPlugins(HealthPlugin.class) .flatMap(plugin -> plugin.getHealthIndicatorServices().stream()); diff --git a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java index f18655afb8f02..8f2dc4e532ae0 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java +++ b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java @@ -27,6 +27,7 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.recovery.RecoverySettings; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.readiness.ReadinessService; import org.elasticsearch.script.ScriptContext; @@ -51,9 +52,9 @@ */ class NodeServiceProvider { - PluginsService newPluginService(Environment environment, Settings settings) { + PluginsService newPluginService(Environment initialEnvironment, PluginsLoader pluginsLoader) { // this creates a PluginsService with an empty list of classpath plugins - return new PluginsService(settings, environment.configFile(), environment.modulesFile(), environment.pluginsFile()); + return new PluginsService(initialEnvironment.settings(), initialEnvironment.configFile(), pluginsLoader); } ScriptService newScriptService( diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java b/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java new file mode 100644 index 0000000000000..6b3eda6c0c9b4 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java @@ -0,0 +1,461 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.plugins; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.PathUtils; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.jdk.JarHell; +import org.elasticsearch.jdk.ModuleQualifiedExportsService; + +import java.io.IOException; +import java.lang.ModuleLayer.Controller; +import java.lang.module.Configuration; +import java.lang.module.ModuleFinder; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.net.URLClassLoader; +import java.nio.file.Path; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Stream; + +import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory; +import static org.elasticsearch.jdk.ModuleQualifiedExportsService.addExportsService; +import static org.elasticsearch.jdk.ModuleQualifiedExportsService.exposeQualifiedExportsAndOpens; + +/** + * This class is used to load modules and module layers for each plugin during + * node initialization prior to enablement of entitlements. This allows entitlements + * to have all the plugin information they need prior to starting. + */ +public class PluginsLoader { + + /** + * Contains information about the {@link ClassLoader} required to load a plugin + */ + public interface PluginLayer { + /** + * @return Information about the bundle of jars used in this plugin + */ + PluginBundle pluginBundle(); + + /** + * @return The {@link ClassLoader} used to instantiate the main class for the plugin + */ + ClassLoader pluginClassLoader(); + } + + /** + * Contains information about the {@link ClassLoader}s and {@link ModuleLayer} required for loading a plugin + * @param pluginBundle Information about the bundle of jars used in this plugin + * @param pluginClassLoader The {@link ClassLoader} used to instantiate the main class for the plugin + * @param spiClassLoader The exported {@link ClassLoader} visible to other Java modules + * @param spiModuleLayer The exported {@link ModuleLayer} visible to other Java modules + */ + private record LoadedPluginLayer( + PluginBundle pluginBundle, + ClassLoader pluginClassLoader, + ClassLoader spiClassLoader, + ModuleLayer spiModuleLayer + ) implements PluginLayer { + + public LoadedPluginLayer { + Objects.requireNonNull(pluginBundle); + Objects.requireNonNull(pluginClassLoader); + Objects.requireNonNull(spiClassLoader); + Objects.requireNonNull(spiModuleLayer); + } + } + + /** + * Tuple of module layer and loader. + * Modular Plugins have a plugin specific loader and layer. + * Non-Modular plugins have a plugin specific loader and the boot layer. + */ + public record LayerAndLoader(ModuleLayer layer, ClassLoader loader) { + + public LayerAndLoader { + Objects.requireNonNull(layer); + Objects.requireNonNull(loader); + } + + public static LayerAndLoader ofLoader(ClassLoader loader) { + return new LayerAndLoader(ModuleLayer.boot(), loader); + } + } + + private static final Logger logger = LogManager.getLogger(PluginsLoader.class); + private static final Module serverModule = PluginsLoader.class.getModule(); + + private final List moduleDescriptors; + private final List pluginDescriptors; + private final Map loadedPluginLayers; + + /** + * Constructs a new PluginsLoader + * + * @param modulesDirectory The directory modules exist in, or null if modules should not be loaded from the filesystem + * @param pluginsDirectory The directory plugins exist in, or null if plugins should not be loaded from the filesystem + */ + @SuppressWarnings("this-escape") + public PluginsLoader(Path modulesDirectory, Path pluginsDirectory) { + + Map> qualifiedExports = new HashMap<>(ModuleQualifiedExportsService.getBootServices()); + addServerExportsService(qualifiedExports); + + Set seenBundles = new LinkedHashSet<>(); + + // load (elasticsearch) module layers + if (modulesDirectory != null) { + try { + Set modules = PluginsUtils.getModuleBundles(modulesDirectory); + moduleDescriptors = modules.stream().map(PluginBundle::pluginDescriptor).toList(); + seenBundles.addAll(modules); + } catch (IOException ex) { + throw new IllegalStateException("Unable to initialize modules", ex); + } + } else { + moduleDescriptors = Collections.emptyList(); + } + + // load plugin layers + if (pluginsDirectory != null) { + try { + // TODO: remove this leniency, but tests bogusly rely on it + if (isAccessibleDirectory(pluginsDirectory, logger)) { + PluginsUtils.checkForFailedPluginRemovals(pluginsDirectory); + Set plugins = PluginsUtils.getPluginBundles(pluginsDirectory); + pluginDescriptors = plugins.stream().map(PluginBundle::pluginDescriptor).toList(); + seenBundles.addAll(plugins); + } else { + pluginDescriptors = Collections.emptyList(); + } + } catch (IOException ex) { + throw new IllegalStateException("Unable to initialize plugins", ex); + } + } else { + pluginDescriptors = Collections.emptyList(); + } + + this.loadedPluginLayers = Collections.unmodifiableMap(loadPluginLayers(seenBundles, qualifiedExports)); + } + + public List moduleDescriptors() { + return moduleDescriptors; + } + + public List pluginDescriptors() { + return pluginDescriptors; + } + + public Stream pluginLayers() { + return loadedPluginLayers.values().stream().map(Function.identity()); + } + + private Map loadPluginLayers( + Set bundles, + Map> qualifiedExports + ) { + Map loaded = new LinkedHashMap<>(); + Map> transitiveUrls = new HashMap<>(); + List sortedBundles = PluginsUtils.sortBundles(bundles); + if (sortedBundles.isEmpty() == false) { + Set systemLoaderURLs = JarHell.parseModulesAndClassPath(); + for (PluginBundle bundle : sortedBundles) { + PluginsUtils.checkBundleJarHell(systemLoaderURLs, bundle, transitiveUrls); + loadPluginLayer(bundle, loaded, qualifiedExports); + } + } + + return loaded; + } + + private void loadPluginLayer( + PluginBundle bundle, + Map loaded, + Map> qualifiedExports + ) { + String name = bundle.plugin.getName(); + logger.debug(() -> "Loading bundle: " + name); + + PluginsUtils.verifyCompatibility(bundle.plugin); + + // collect the list of extended plugins + List extendedPlugins = new ArrayList<>(); + for (String extendedPluginName : bundle.plugin.getExtendedPlugins()) { + LoadedPluginLayer extendedPlugin = loaded.get(extendedPluginName); + assert extendedPlugin != null; + assert extendedPlugin.spiClassLoader() != null : "All non-classpath plugins should be loaded with a classloader"; + extendedPlugins.add(extendedPlugin); + } + + final ClassLoader parentLoader = ExtendedPluginsClassLoader.create( + getClass().getClassLoader(), + extendedPlugins.stream().map(LoadedPluginLayer::spiClassLoader).toList() + ); + LayerAndLoader spiLayerAndLoader = null; + if (bundle.hasSPI()) { + spiLayerAndLoader = createSPI(bundle, parentLoader, extendedPlugins, qualifiedExports); + } + + final ClassLoader pluginParentLoader = spiLayerAndLoader == null ? parentLoader : spiLayerAndLoader.loader(); + final LayerAndLoader pluginLayerAndLoader = createPlugin( + bundle, + pluginParentLoader, + extendedPlugins, + spiLayerAndLoader, + qualifiedExports + ); + final ClassLoader pluginClassLoader = pluginLayerAndLoader.loader(); + + if (spiLayerAndLoader == null) { + // use full implementation for plugins extending this one + spiLayerAndLoader = pluginLayerAndLoader; + } + + loaded.put(name, new LoadedPluginLayer(bundle, pluginClassLoader, spiLayerAndLoader.loader, spiLayerAndLoader.layer)); + } + + static LayerAndLoader createSPI( + PluginBundle bundle, + ClassLoader parentLoader, + List extendedPlugins, + Map> qualifiedExports + ) { + final PluginDescriptor plugin = bundle.plugin; + if (plugin.getModuleName().isPresent()) { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", creating spi, modular"); + return createSpiModuleLayer( + bundle.spiUrls, + parentLoader, + extendedPlugins.stream().map(LoadedPluginLayer::spiModuleLayer).toList(), + qualifiedExports + ); + } else { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", creating spi, non-modular"); + return LayerAndLoader.ofLoader(URLClassLoader.newInstance(bundle.spiUrls.toArray(new URL[0]), parentLoader)); + } + } + + static LayerAndLoader createPlugin( + PluginBundle bundle, + ClassLoader pluginParentLoader, + List extendedPlugins, + LayerAndLoader spiLayerAndLoader, + Map> qualifiedExports + ) { + final PluginDescriptor plugin = bundle.plugin; + if (plugin.getModuleName().isPresent()) { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", modular"); + var parentLayers = Stream.concat( + Stream.ofNullable(spiLayerAndLoader != null ? spiLayerAndLoader.layer() : null), + extendedPlugins.stream().map(LoadedPluginLayer::spiModuleLayer) + ).toList(); + return createPluginModuleLayer(bundle, pluginParentLoader, parentLayers, qualifiedExports); + } else if (plugin.isStable()) { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", non-modular as synthetic module"); + return LayerAndLoader.ofLoader( + UberModuleClassLoader.getInstance( + pluginParentLoader, + ModuleLayer.boot(), + "synthetic." + toModuleName(plugin.getName()), + bundle.allUrls, + Set.of("org.elasticsearch.server") // TODO: instead of denying server, allow only jvm + stable API modules + ) + ); + } else { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", non-modular"); + return LayerAndLoader.ofLoader(URLClassLoader.newInstance(bundle.urls.toArray(URL[]::new), pluginParentLoader)); + } + } + + static LayerAndLoader createSpiModuleLayer( + Set urls, + ClassLoader parentLoader, + List parentLayers, + Map> qualifiedExports + ) { + // assert bundle.plugin.getModuleName().isPresent(); + return createModuleLayer( + null, // no entry point + spiModuleName(urls), + urlsToPaths(urls), + parentLoader, + parentLayers, + qualifiedExports + ); + } + + static LayerAndLoader createPluginModuleLayer( + PluginBundle bundle, + ClassLoader parentLoader, + List parentLayers, + Map> qualifiedExports + ) { + assert bundle.plugin.getModuleName().isPresent(); + return createModuleLayer( + bundle.plugin.getClassname(), + bundle.plugin.getModuleName().get(), + urlsToPaths(bundle.urls), + parentLoader, + parentLayers, + qualifiedExports + ); + } + + static LayerAndLoader createModuleLayer( + String className, + String moduleName, + Path[] paths, + ClassLoader parentLoader, + List parentLayers, + Map> qualifiedExports + ) { + logger.debug(() -> "Loading bundle: creating module layer and loader for module " + moduleName); + var finder = ModuleFinder.of(paths); + + var configuration = Configuration.resolveAndBind( + ModuleFinder.of(), + parentConfigurationOrBoot(parentLayers), + finder, + Set.of(moduleName) + ); + var controller = privilegedDefineModulesWithOneLoader(configuration, parentLayersOrBoot(parentLayers), parentLoader); + var pluginModule = controller.layer().findModule(moduleName).get(); + ensureEntryPointAccessible(controller, pluginModule, className); + // export/open upstream modules to this plugin module + exposeQualifiedExportsAndOpens(pluginModule, qualifiedExports); + // configure qualified exports/opens to other modules/plugins + addPluginExportsServices(qualifiedExports, controller); + logger.debug(() -> "Loading bundle: created module layer and loader for module " + moduleName); + return new LayerAndLoader(controller.layer(), privilegedFindLoader(controller.layer(), moduleName)); + } + + /** Determines the module name of the SPI module, given its URL. */ + static String spiModuleName(Set spiURLS) { + ModuleFinder finder = ModuleFinder.of(urlsToPaths(spiURLS)); + var mrefs = finder.findAll(); + assert mrefs.size() == 1 : "Expected a single module, got:" + mrefs; + return mrefs.stream().findFirst().get().descriptor().name(); + } + + // package-visible for testing + static String toModuleName(String name) { + String result = name.replaceAll("\\W+", ".") // replace non-alphanumeric character strings with dots + .replaceAll("(^[^A-Za-z_]*)", "") // trim non-alpha or underscore characters from start + .replaceAll("\\.$", "") // trim trailing dot + .toLowerCase(Locale.getDefault()); + assert ModuleSupport.isPackageName(result); + return result; + } + + static final String toPackageName(String className) { + assert className.endsWith(".") == false; + int index = className.lastIndexOf('.'); + if (index == -1) { + throw new IllegalStateException("invalid class name:" + className); + } + return className.substring(0, index); + } + + @SuppressForbidden(reason = "I need to convert URL's to Paths") + static final Path[] urlsToPaths(Set urls) { + return urls.stream().map(PluginsLoader::uncheckedToURI).map(PathUtils::get).toArray(Path[]::new); + } + + static final URI uncheckedToURI(URL url) { + try { + return url.toURI(); + } catch (URISyntaxException e) { + throw new AssertionError(new IOException(e)); + } + } + + private static List parentConfigurationOrBoot(List parentLayers) { + if (parentLayers == null || parentLayers.isEmpty()) { + return List.of(ModuleLayer.boot().configuration()); + } else { + return parentLayers.stream().map(ModuleLayer::configuration).toList(); + } + } + + /** Ensures that the plugins main class (its entry point), if any, is accessible to the server. */ + private static void ensureEntryPointAccessible(Controller controller, Module pluginModule, String className) { + if (className != null) { + controller.addOpens(pluginModule, toPackageName(className), serverModule); + } + } + + @SuppressWarnings("removal") + static Controller privilegedDefineModulesWithOneLoader(Configuration cf, List parentLayers, ClassLoader parentLoader) { + return AccessController.doPrivileged( + (PrivilegedAction) () -> ModuleLayer.defineModulesWithOneLoader(cf, parentLayers, parentLoader) + ); + } + + @SuppressWarnings("removal") + static ClassLoader privilegedFindLoader(ModuleLayer layer, String name) { + return AccessController.doPrivileged((PrivilegedAction) () -> layer.findLoader(name)); + } + + private static List parentLayersOrBoot(List parentLayers) { + if (parentLayers == null || parentLayers.isEmpty()) { + return List.of(ModuleLayer.boot()); + } else { + return parentLayers; + } + } + + protected void addServerExportsService(Map> qualifiedExports) { + var exportsService = new ModuleQualifiedExportsService(serverModule) { + @Override + protected void addExports(String pkg, Module target) { + serverModule.addExports(pkg, target); + } + + @Override + protected void addOpens(String pkg, Module target) { + serverModule.addOpens(pkg, target); + } + }; + addExportsService(qualifiedExports, exportsService, serverModule.getName()); + } + + private static void addPluginExportsServices(Map> qualifiedExports, Controller controller) { + for (Module module : controller.layer().modules()) { + var exportsService = new ModuleQualifiedExportsService(module) { + @Override + protected void addExports(String pkg, Module target) { + controller.addExports(module, pkg, target); + } + + @Override + protected void addOpens(String pkg, Module target) { + controller.addOpens(module, pkg, target); + } + }; + addExportsService(qualifiedExports, exportsService, module.getName()); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index d5dd6d62d615e..6ef3cd17ba2e9 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -23,34 +23,22 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.core.PathUtils; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.Tuple; -import org.elasticsearch.jdk.JarHell; -import org.elasticsearch.jdk.ModuleQualifiedExportsService; import org.elasticsearch.node.ReportingService; +import org.elasticsearch.plugins.PluginsLoader.PluginLayer; import org.elasticsearch.plugins.scanners.StablePluginsRegistry; import org.elasticsearch.plugins.spi.SPIClassIterator; import java.io.IOException; -import java.lang.ModuleLayer.Controller; -import java.lang.module.Configuration; -import java.lang.module.ModuleFinder; import java.lang.reflect.Constructor; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.net.URLClassLoader; import java.nio.file.Path; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; -import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -63,10 +51,6 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory; -import static org.elasticsearch.jdk.ModuleQualifiedExportsService.addExportsService; -import static org.elasticsearch.jdk.ModuleQualifiedExportsService.exposeQualifiedExportsAndOpens; - public class PluginsService implements ReportingService { public StablePluginsRegistry getStablePluginRegistry() { @@ -77,33 +61,18 @@ public StablePluginsRegistry getStablePluginRegistry() { * A loaded plugin is one for which Elasticsearch has successfully constructed an instance of the plugin's class * @param descriptor Metadata about the plugin, usually loaded from plugin properties * @param instance The constructed instance of the plugin's main class - * @param loader The classloader for the plugin - * @param layer The module layer for the plugin */ - record LoadedPlugin(PluginDescriptor descriptor, Plugin instance, ClassLoader loader, ModuleLayer layer) { + record LoadedPlugin(PluginDescriptor descriptor, Plugin instance, ClassLoader classLoader) { LoadedPlugin { Objects.requireNonNull(descriptor); Objects.requireNonNull(instance); - Objects.requireNonNull(loader); - Objects.requireNonNull(layer); - } - - /** - * Creates a loaded classpath plugin. A classpath plugin is a plugin loaded - * by the system classloader and defined to the unnamed module of the boot layer. - */ - LoadedPlugin(PluginDescriptor descriptor, Plugin instance) { - this(descriptor, instance, PluginsService.class.getClassLoader(), ModuleLayer.boot()); } } private static final Logger logger = LogManager.getLogger(PluginsService.class); private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(PluginsService.class); - private final Settings settings; - private final Path configPath; - /** * We keep around a list of plugins and modules. The order of * this list is that which the plugins and modules were loaded in. @@ -117,69 +86,32 @@ record LoadedPlugin(PluginDescriptor descriptor, Plugin instance, ClassLoader lo /** * Constructs a new PluginService * - * @param settings The settings of the system - * @param modulesDirectory The directory modules exist in, or null if modules should not be loaded from the filesystem - * @param pluginsDirectory The directory plugins exist in, or null if plugins should not be loaded from the filesystem + * @param settings The settings for this node + * @param configPath The configuration path for this node + * @param pluginsLoader the information required to complete loading of plugins */ - @SuppressWarnings("this-escape") - public PluginsService(Settings settings, Path configPath, Path modulesDirectory, Path pluginsDirectory) { - this.settings = settings; - this.configPath = configPath; - - Map> qualifiedExports = new HashMap<>(ModuleQualifiedExportsService.getBootServices()); - addServerExportsService(qualifiedExports); - - Set seenBundles = new LinkedHashSet<>(); - - // load modules - List modulesList = new ArrayList<>(); - Set moduleNameList = new HashSet<>(); - if (modulesDirectory != null) { - try { - Set modules = PluginsUtils.getModuleBundles(modulesDirectory); - modules.stream().map(PluginBundle::pluginDescriptor).forEach(m -> { - modulesList.add(m); - moduleNameList.add(m.getName()); - }); - seenBundles.addAll(modules); - } catch (IOException ex) { - throw new IllegalStateException("Unable to initialize modules", ex); - } - } + public PluginsService(Settings settings, Path configPath, PluginsLoader pluginsLoader) { + Map loadedPlugins = loadPluginBundles(settings, configPath, pluginsLoader); - // load plugins - List pluginsList = new ArrayList<>(); - if (pluginsDirectory != null) { - try { - // TODO: remove this leniency, but tests bogusly rely on it - if (isAccessibleDirectory(pluginsDirectory, logger)) { - PluginsUtils.checkForFailedPluginRemovals(pluginsDirectory); - Set plugins = PluginsUtils.getPluginBundles(pluginsDirectory); - plugins.stream().map(PluginBundle::pluginDescriptor).forEach(pluginsList::add); - seenBundles.addAll(plugins); - } - } catch (IOException ex) { - throw new IllegalStateException("Unable to initialize plugins", ex); - } - } - - LinkedHashMap loadedPlugins = loadBundles(seenBundles, qualifiedExports); + var modulesDescriptors = pluginsLoader.moduleDescriptors(); + var pluginDescriptors = pluginsLoader.pluginDescriptors(); var inspector = PluginIntrospector.getInstance(); - this.info = new PluginsAndModules(getRuntimeInfos(inspector, pluginsList, loadedPlugins), modulesList); + this.info = new PluginsAndModules(getRuntimeInfos(inspector, pluginDescriptors, loadedPlugins), modulesDescriptors); this.plugins = List.copyOf(loadedPlugins.values()); - checkDeprecations(inspector, pluginsList, loadedPlugins); + checkDeprecations(inspector, pluginDescriptors, loadedPlugins); checkMandatoryPlugins( - pluginsList.stream().map(PluginDescriptor::getName).collect(Collectors.toSet()), + pluginDescriptors.stream().map(PluginDescriptor::getName).collect(Collectors.toSet()), new HashSet<>(MANDATORY_SETTING.get(settings)) ); // we don't log jars in lib/ we really shouldn't log modules, // but for now: just be transparent so we can debug any potential issues + Set moduleNames = new HashSet<>(modulesDescriptors.stream().map(PluginDescriptor::getName).toList()); for (String name : loadedPlugins.keySet()) { - if (moduleNameList.contains(name)) { + if (moduleNames.contains(name)) { logger.info("loaded module [{}]", name); } else { logger.info("loaded plugin [{}]", name); @@ -282,23 +214,11 @@ protected List plugins() { return this.plugins; } - private LinkedHashMap loadBundles( - Set bundles, - Map> qualifiedExports - ) { - LinkedHashMap loaded = new LinkedHashMap<>(); - Map> transitiveUrls = new HashMap<>(); - List sortedBundles = PluginsUtils.sortBundles(bundles); - if (sortedBundles.isEmpty() == false) { - Set systemLoaderURLs = JarHell.parseModulesAndClassPath(); - for (PluginBundle bundle : sortedBundles) { - PluginsUtils.checkBundleJarHell(systemLoaderURLs, bundle, transitiveUrls); - loadBundle(bundle, loaded, qualifiedExports); - } - } - - loadExtensions(loaded.values()); - return loaded; + private Map loadPluginBundles(Settings settings, Path configPath, PluginsLoader pluginsLoader) { + Map loadedPlugins = new LinkedHashMap<>(); + pluginsLoader.pluginLayers().forEach(pl -> loadBundle(pl, loadedPlugins, settings, configPath)); + loadExtensions(loadedPlugins.values()); + return loadedPlugins; } // package-private for test visibility @@ -443,68 +363,43 @@ private static String extensionConstructorMessage(Class extensi return "constructor for extension [" + extensionClass.getName() + "] of type [" + extensionPointType.getName() + "]"; } - private void loadBundle( - PluginBundle bundle, - Map loaded, - Map> qualifiedExports - ) { - String name = bundle.plugin.getName(); - logger.debug(() -> "Loading bundle: " + name); - - PluginsUtils.verifyCompatibility(bundle.plugin); + private void loadBundle(PluginLayer pluginLayer, Map loadedPlugins, Settings settings, Path configPath) { + String name = pluginLayer.pluginBundle().plugin.getName(); + logger.debug(() -> "Loading plugin bundle: " + name); - // collect the list of extended plugins + // validate the list of extended plugins List extendedPlugins = new ArrayList<>(); - for (String extendedPluginName : bundle.plugin.getExtendedPlugins()) { - LoadedPlugin extendedPlugin = loaded.get(extendedPluginName); + for (String extendedPluginName : pluginLayer.pluginBundle().plugin.getExtendedPlugins()) { + LoadedPlugin extendedPlugin = loadedPlugins.get(extendedPluginName); assert extendedPlugin != null; if (ExtensiblePlugin.class.isInstance(extendedPlugin.instance()) == false) { throw new IllegalStateException("Plugin [" + name + "] cannot extend non-extensible plugin [" + extendedPluginName + "]"); } - assert extendedPlugin.loader() != null : "All non-classpath plugins should be loaded with a classloader"; extendedPlugins.add(extendedPlugin); logger.debug( - () -> "Loading bundle: " + name + ", ext plugins: " + extendedPlugins.stream().map(lp -> lp.descriptor().getName()).toList() + () -> "Loading plugin bundle: " + + name + + ", ext plugins: " + + extendedPlugins.stream().map(lp -> lp.descriptor().getName()).toList() ); } - final ClassLoader parentLoader = ExtendedPluginsClassLoader.create( - getClass().getClassLoader(), - extendedPlugins.stream().map(LoadedPlugin::loader).toList() - ); - LayerAndLoader spiLayerAndLoader = null; - if (bundle.hasSPI()) { - spiLayerAndLoader = createSPI(bundle, parentLoader, extendedPlugins, qualifiedExports); - } - - final ClassLoader pluginParentLoader = spiLayerAndLoader == null ? parentLoader : spiLayerAndLoader.loader(); - final LayerAndLoader pluginLayerAndLoader = createPlugin( - bundle, - pluginParentLoader, - extendedPlugins, - spiLayerAndLoader, - qualifiedExports - ); - final ClassLoader pluginClassLoader = pluginLayerAndLoader.loader(); - - if (spiLayerAndLoader == null) { - // use full implementation for plugins extending this one - spiLayerAndLoader = pluginLayerAndLoader; - } + PluginBundle pluginBundle = pluginLayer.pluginBundle(); + ClassLoader pluginClassLoader = pluginLayer.pluginClassLoader(); // reload SPI with any new services from the plugin - reloadLuceneSPI(pluginClassLoader); + reloadLuceneSPI(pluginLayer.pluginClassLoader()); ClassLoader cl = Thread.currentThread().getContextClassLoader(); try { // Set context class loader to plugin's class loader so that plugins // that have dependencies with their own SPI endpoints have a chance to load // and initialize them appropriately. - privilegedSetContextClassLoader(pluginClassLoader); + privilegedSetContextClassLoader(pluginLayer.pluginClassLoader()); Plugin plugin; - if (bundle.pluginDescriptor().isStable()) { - stablePluginsRegistry.scanBundleForStablePlugins(bundle, pluginClassLoader); + if (pluginBundle.pluginDescriptor().isStable()) { + stablePluginsRegistry.scanBundleForStablePlugins(pluginBundle, pluginClassLoader); /* Contrary to old plugins we don't need an instance of the plugin here. Stable plugin register components (like CharFilterFactory) in stable plugin registry, which is then used in AnalysisModule @@ -514,16 +409,16 @@ Stable plugin register components (like CharFilterFactory) in stable plugin regi We need to pass a name though so that we can show that a plugin was loaded (via cluster state api) This might need to be revisited once support for settings is added */ - plugin = new StablePluginPlaceHolder(bundle.plugin.getName()); + plugin = new StablePluginPlaceHolder(pluginBundle.plugin.getName()); } else { - Class pluginClass = loadPluginClass(bundle.plugin.getClassname(), pluginClassLoader); + Class pluginClass = loadPluginClass(pluginBundle.plugin.getClassname(), pluginClassLoader); if (pluginClassLoader != pluginClass.getClassLoader()) { throw new IllegalStateException( "Plugin [" + name + "] must reference a class loader local Plugin class [" - + bundle.plugin.getClassname() + + pluginBundle.plugin.getClassname() + "] (class loader [" + pluginClass.getClassLoader() + "])" @@ -531,75 +426,12 @@ We need to pass a name though so that we can show that a plugin was loaded (via } plugin = loadPlugin(pluginClass, settings, configPath); } - loaded.put(name, new LoadedPlugin(bundle.plugin, plugin, spiLayerAndLoader.loader(), spiLayerAndLoader.layer())); + loadedPlugins.put(name, new LoadedPlugin(pluginBundle.plugin, plugin, pluginLayer.pluginClassLoader())); } finally { privilegedSetContextClassLoader(cl); } } - static LayerAndLoader createSPI( - PluginBundle bundle, - ClassLoader parentLoader, - List extendedPlugins, - Map> qualifiedExports - ) { - final PluginDescriptor plugin = bundle.plugin; - if (plugin.getModuleName().isPresent()) { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", creating spi, modular"); - return createSpiModuleLayer( - bundle.spiUrls, - parentLoader, - extendedPlugins.stream().map(LoadedPlugin::layer).toList(), - qualifiedExports - ); - } else { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", creating spi, non-modular"); - return LayerAndLoader.ofLoader(URLClassLoader.newInstance(bundle.spiUrls.toArray(new URL[0]), parentLoader)); - } - } - - static LayerAndLoader createPlugin( - PluginBundle bundle, - ClassLoader pluginParentLoader, - List extendedPlugins, - LayerAndLoader spiLayerAndLoader, - Map> qualifiedExports - ) { - final PluginDescriptor plugin = bundle.plugin; - if (plugin.getModuleName().isPresent()) { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", modular"); - var parentLayers = Stream.concat( - Stream.ofNullable(spiLayerAndLoader != null ? spiLayerAndLoader.layer() : null), - extendedPlugins.stream().map(LoadedPlugin::layer) - ).toList(); - return createPluginModuleLayer(bundle, pluginParentLoader, parentLayers, qualifiedExports); - } else if (plugin.isStable()) { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", non-modular as synthetic module"); - return LayerAndLoader.ofLoader( - UberModuleClassLoader.getInstance( - pluginParentLoader, - ModuleLayer.boot(), - "synthetic." + toModuleName(plugin.getName()), - bundle.allUrls, - Set.of("org.elasticsearch.server") // TODO: instead of denying server, allow only jvm + stable API modules - ) - ); - } else { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", non-modular"); - return LayerAndLoader.ofLoader(URLClassLoader.newInstance(bundle.urls.toArray(URL[]::new), pluginParentLoader)); - } - } - - // package-visible for testing - static String toModuleName(String name) { - String result = name.replaceAll("\\W+", ".") // replace non-alphanumeric character strings with dots - .replaceAll("(^[^A-Za-z_]*)", "") // trim non-alpha or underscore characters from start - .replaceAll("\\.$", "") // trim trailing dot - .toLowerCase(Locale.getDefault()); - assert ModuleSupport.isPackageName(result); - return result; - } - private static void checkDeprecations( PluginIntrospector inspector, List pluginDescriptors, @@ -706,173 +538,6 @@ public final Stream filterPlugins(Class type) { return plugins().stream().filter(x -> type.isAssignableFrom(x.instance().getClass())).map(p -> ((T) p.instance())); } - static LayerAndLoader createPluginModuleLayer( - PluginBundle bundle, - ClassLoader parentLoader, - List parentLayers, - Map> qualifiedExports - ) { - assert bundle.plugin.getModuleName().isPresent(); - return createModuleLayer( - bundle.plugin.getClassname(), - bundle.plugin.getModuleName().get(), - urlsToPaths(bundle.urls), - parentLoader, - parentLayers, - qualifiedExports - ); - } - - static final LayerAndLoader createSpiModuleLayer( - Set urls, - ClassLoader parentLoader, - List parentLayers, - Map> qualifiedExports - ) { - // assert bundle.plugin.getModuleName().isPresent(); - return createModuleLayer( - null, // no entry point - spiModuleName(urls), - urlsToPaths(urls), - parentLoader, - parentLayers, - qualifiedExports - ); - } - - private static final Module serverModule = PluginsService.class.getModule(); - - static LayerAndLoader createModuleLayer( - String className, - String moduleName, - Path[] paths, - ClassLoader parentLoader, - List parentLayers, - Map> qualifiedExports - ) { - logger.debug(() -> "Loading bundle: creating module layer and loader for module " + moduleName); - var finder = ModuleFinder.of(paths); - - var configuration = Configuration.resolveAndBind( - ModuleFinder.of(), - parentConfigurationOrBoot(parentLayers), - finder, - Set.of(moduleName) - ); - var controller = privilegedDefineModulesWithOneLoader(configuration, parentLayersOrBoot(parentLayers), parentLoader); - var pluginModule = controller.layer().findModule(moduleName).get(); - ensureEntryPointAccessible(controller, pluginModule, className); - // export/open upstream modules to this plugin module - exposeQualifiedExportsAndOpens(pluginModule, qualifiedExports); - // configure qualified exports/opens to other modules/plugins - addPluginExportsServices(qualifiedExports, controller); - logger.debug(() -> "Loading bundle: created module layer and loader for module " + moduleName); - return new LayerAndLoader(controller.layer(), privilegedFindLoader(controller.layer(), moduleName)); - } - - private static List parentLayersOrBoot(List parentLayers) { - if (parentLayers == null || parentLayers.isEmpty()) { - return List.of(ModuleLayer.boot()); - } else { - return parentLayers; - } - } - - private static List parentConfigurationOrBoot(List parentLayers) { - if (parentLayers == null || parentLayers.isEmpty()) { - return List.of(ModuleLayer.boot().configuration()); - } else { - return parentLayers.stream().map(ModuleLayer::configuration).toList(); - } - } - - /** Ensures that the plugins main class (its entry point), if any, is accessible to the server. */ - private static void ensureEntryPointAccessible(Controller controller, Module pluginModule, String className) { - if (className != null) { - controller.addOpens(pluginModule, toPackageName(className), serverModule); - } - } - - protected void addServerExportsService(Map> qualifiedExports) { - final Module serverModule = PluginsService.class.getModule(); - var exportsService = new ModuleQualifiedExportsService(serverModule) { - @Override - protected void addExports(String pkg, Module target) { - serverModule.addExports(pkg, target); - } - - @Override - protected void addOpens(String pkg, Module target) { - serverModule.addOpens(pkg, target); - } - }; - addExportsService(qualifiedExports, exportsService, serverModule.getName()); - } - - private static void addPluginExportsServices(Map> qualifiedExports, Controller controller) { - for (Module module : controller.layer().modules()) { - var exportsService = new ModuleQualifiedExportsService(module) { - @Override - protected void addExports(String pkg, Module target) { - controller.addExports(module, pkg, target); - } - - @Override - protected void addOpens(String pkg, Module target) { - controller.addOpens(module, pkg, target); - } - }; - addExportsService(qualifiedExports, exportsService, module.getName()); - } - } - - /** Determines the module name of the SPI module, given its URL. */ - static String spiModuleName(Set spiURLS) { - ModuleFinder finder = ModuleFinder.of(urlsToPaths(spiURLS)); - var mrefs = finder.findAll(); - assert mrefs.size() == 1 : "Expected a single module, got:" + mrefs; - return mrefs.stream().findFirst().get().descriptor().name(); - } - - /** - * Tuple of module layer and loader. - * Modular Plugins have a plugin specific loader and layer. - * Non-Modular plugins have a plugin specific loader and the boot layer. - */ - record LayerAndLoader(ModuleLayer layer, ClassLoader loader) { - - LayerAndLoader { - Objects.requireNonNull(layer); - Objects.requireNonNull(loader); - } - - static LayerAndLoader ofLoader(ClassLoader loader) { - return new LayerAndLoader(ModuleLayer.boot(), loader); - } - } - - @SuppressForbidden(reason = "I need to convert URL's to Paths") - static final Path[] urlsToPaths(Set urls) { - return urls.stream().map(PluginsService::uncheckedToURI).map(PathUtils::get).toArray(Path[]::new); - } - - static final URI uncheckedToURI(URL url) { - try { - return url.toURI(); - } catch (URISyntaxException e) { - throw new AssertionError(new IOException(e)); - } - } - - static final String toPackageName(String className) { - assert className.endsWith(".") == false; - int index = className.lastIndexOf('.'); - if (index == -1) { - throw new IllegalStateException("invalid class name:" + className); - } - return className.substring(0, index); - } - @SuppressWarnings("removal") private static void privilegedSetContextClassLoader(ClassLoader loader) { AccessController.doPrivileged((PrivilegedAction) () -> { @@ -880,16 +545,4 @@ private static void privilegedSetContextClassLoader(ClassLoader loader) { return null; }); } - - @SuppressWarnings("removal") - static Controller privilegedDefineModulesWithOneLoader(Configuration cf, List parentLayers, ClassLoader parentLoader) { - return AccessController.doPrivileged( - (PrivilegedAction) () -> ModuleLayer.defineModulesWithOneLoader(cf, parentLayers, parentLoader) - ); - } - - @SuppressWarnings("removal") - static ClassLoader privilegedFindLoader(ModuleLayer layer, String name) { - return AccessController.doPrivileged((PrivilegedAction) () -> layer.findLoader(name)); - } } diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsUtils.java b/server/src/main/java/org/elasticsearch/plugins/PluginsUtils.java index 44fb531f8610e..155cff57a0ebf 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsUtils.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsUtils.java @@ -210,12 +210,12 @@ public static void checkForFailedPluginRemovals(final Path pluginsDirectory) thr } /** Get bundles for plugins installed in the given modules directory. */ - static Set getModuleBundles(Path modulesDirectory) throws IOException { + public static Set getModuleBundles(Path modulesDirectory) throws IOException { return findBundles(modulesDirectory, "module"); } /** Get bundles for plugins installed in the given plugins directory. */ - static Set getPluginBundles(final Path pluginsDirectory) throws IOException { + public static Set getPluginBundles(final Path pluginsDirectory) throws IOException { return findBundles(pluginsDirectory, "plugin"); } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java index 3a210199065b7..2cd6e2b11ef7a 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java @@ -127,7 +127,16 @@ public static Map createAttributesMap( OperationPurpose purpose, String operation ) { - return Map.of("repo_type", repositoryMetadata.type(), "operation", operation, "purpose", purpose.getKey()); + return Map.of( + "repo_type", + repositoryMetadata.type(), + "repo_name", + repositoryMetadata.name(), + "operation", + operation, + "purpose", + purpose.getKey() + ); } } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java index 2b95fbc69199f..1c4b7cfdab4ef 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java @@ -283,12 +283,22 @@ public RegisterRepositoryTask(final RepositoriesService repositoriesService, fin @Override public ClusterState execute(ClusterState currentState) { - RepositoryMetadata newRepositoryMetadata = new RepositoryMetadata(request.name(), request.type(), request.settings()); Metadata.Builder mdBuilder = Metadata.builder(currentState.metadata()); RepositoriesMetadata repositories = RepositoriesMetadata.get(currentState); List repositoriesMetadata = new ArrayList<>(repositories.repositories().size() + 1); for (RepositoryMetadata repositoryMetadata : repositories.repositories()) { - if (repositoryMetadata.name().equals(newRepositoryMetadata.name())) { + if (repositoryMetadata.name().equals(request.name())) { + final RepositoryMetadata newRepositoryMetadata = new RepositoryMetadata( + request.name(), + // Copy the UUID from the existing instance rather than resetting it back to MISSING_UUID which would force us to + // re-read the RepositoryData to get it again. In principle the new RepositoryMetadata might point to a different + // underlying repository at this point, but if so that'll cause things to fail in clear ways and eventually (before + // writing anything) we'll read the RepositoryData again and update the UUID in the RepositoryMetadata to match. See + // also #109936. + repositoryMetadata.uuid(), + request.type(), + request.settings() + ); Repository existing = repositoriesService.repositories.get(request.name()); if (existing == null) { existing = repositoriesService.internalRepositories.get(request.name()); diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 8c847da344fe5..f1c3d82b74cab 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -205,6 +205,8 @@ private class ShutdownLogger { public static final String STATELESS_SHARD_WRITE_THREAD_NAME = "stateless_shard_write"; public static final String STATELESS_CLUSTER_STATE_READ_WRITE_THREAD_NAME = "stateless_cluster_state"; public static final String STATELESS_SHARD_PREWARMING_THREAD_NAME = "stateless_prewarm"; + public static final String SEARCHABLE_SNAPSHOTS_CACHE_FETCH_ASYNC_THREAD_NAME = "searchable_snapshots_cache_fetch_async"; + public static final String SEARCHABLE_SNAPSHOTS_CACHE_PREWARMING_THREAD_NAME = "searchable_snapshots_cache_prewarming"; /** * Prefix for the name of the root {@link RepositoryData} blob. @@ -2188,7 +2190,9 @@ private void assertSnapshotOrStatelessPermittedThreadPool() { STATELESS_TRANSLOG_THREAD_NAME, STATELESS_SHARD_WRITE_THREAD_NAME, STATELESS_CLUSTER_STATE_READ_WRITE_THREAD_NAME, - STATELESS_SHARD_PREWARMING_THREAD_NAME + STATELESS_SHARD_PREWARMING_THREAD_NAME, + SEARCHABLE_SNAPSHOTS_CACHE_FETCH_ASYNC_THREAD_NAME, + SEARCHABLE_SNAPSHOTS_CACHE_PREWARMING_THREAD_NAME ); } diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java index ae9ae6f8b5bf9..5f907572641a6 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java @@ -22,14 +22,27 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.file.MasterNodeFileWatchingService; import org.elasticsearch.env.Environment; +import org.elasticsearch.health.HealthIndicatorDetails; +import org.elasticsearch.health.HealthIndicatorImpact; +import org.elasticsearch.health.HealthIndicatorResult; +import org.elasticsearch.health.HealthIndicatorService; +import org.elasticsearch.health.SimpleHealthIndicatorDetails; +import org.elasticsearch.health.node.HealthInfo; import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentParserConfiguration; import java.io.BufferedInputStream; import java.io.IOException; import java.nio.file.Files; +import java.util.List; +import java.util.Map; import java.util.concurrent.ExecutionException; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; +import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.YELLOW; +import static org.elasticsearch.health.ImpactArea.DEPLOYMENT_MANAGEMENT; import static org.elasticsearch.reservedstate.service.ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION; import static org.elasticsearch.reservedstate.service.ReservedStateVersionCheck.HIGHER_VERSION_ONLY; import static org.elasticsearch.xcontent.XContentType.JSON; @@ -53,6 +66,7 @@ public class FileSettingsService extends MasterNodeFileWatchingService implement public static final String NAMESPACE = "file_settings"; public static final String OPERATOR_DIRECTORY = "operator"; private final ReservedClusterStateService stateService; + private final FileSettingsHealthIndicatorService healthIndicatorService; /** * Constructs the {@link FileSettingsService} @@ -60,10 +74,21 @@ public class FileSettingsService extends MasterNodeFileWatchingService implement * @param clusterService so we can register ourselves as a cluster state change listener * @param stateService an instance of the immutable cluster state controller, so we can perform the cluster state changes * @param environment we need the environment to pull the location of the config and operator directories + * @param healthIndicatorService tracks the success or failure of file-based settings */ - public FileSettingsService(ClusterService clusterService, ReservedClusterStateService stateService, Environment environment) { + public FileSettingsService( + ClusterService clusterService, + ReservedClusterStateService stateService, + Environment environment, + FileSettingsHealthIndicatorService healthIndicatorService + ) { super(clusterService, environment.configFile().toAbsolutePath().resolve(OPERATOR_DIRECTORY).resolve(SETTINGS_FILE_NAME)); this.stateService = stateService; + this.healthIndicatorService = healthIndicatorService; + } + + public FileSettingsHealthIndicatorService healthIndicatorService() { + return healthIndicatorService; } /** @@ -121,6 +146,7 @@ protected boolean shouldRefreshFileState(ClusterState clusterState) { @Override protected void processFileChanges() throws ExecutionException, InterruptedException, IOException { logger.info("processing path [{}] for [{}]", watchedFile(), NAMESPACE); + healthIndicatorService.changeOccurred(); processFileChanges(HIGHER_VERSION_ONLY); } @@ -131,6 +157,7 @@ protected void processFileChanges() throws ExecutionException, InterruptedExcept @Override protected void processFileOnServiceStart() throws IOException, ExecutionException, InterruptedException { logger.info("processing path [{}] for [{}] on service start", watchedFile(), NAMESPACE); + healthIndicatorService.changeOccurred(); processFileChanges(HIGHER_OR_SAME_VERSION); } @@ -146,6 +173,16 @@ private void processFileChanges(ReservedStateVersionCheck versionCheck) throws I completion.get(); } + private void completeProcessing(Exception e, PlainActionFuture completion) { + if (e != null) { + healthIndicatorService.failureOccurred(e.toString()); + completion.onFailure(e); + } else { + completion.onResponse(null); + healthIndicatorService.successOccurred(); + } + } + @Override protected void onProcessFileChangesException(Exception e) { if (e instanceof ExecutionException) { @@ -172,11 +209,61 @@ protected void processInitialFileMissing() throws ExecutionException, Interrupte completion.get(); } - private static void completeProcessing(Exception e, PlainActionFuture completion) { - if (e != null) { - completion.onFailure(e); - } else { - completion.onResponse(null); + public static class FileSettingsHealthIndicatorService implements HealthIndicatorService { + static final String NAME = "file_settings"; + static final String NO_CHANGES_SYMPTOM = "No file-based setting changes have occurred"; + static final String SUCCESS_SYMPTOM = "The most recent file-based settings were applied successfully"; + static final String FAILURE_SYMPTOM = "The most recent file-based settings encountered an error"; + + static final List STALE_SETTINGS_IMPACT = List.of( + new HealthIndicatorImpact( + NAME, + "stale", + 3, + "The most recent file-based settings changes have not been applied.", + List.of(DEPLOYMENT_MANAGEMENT) + ) + ); + + private final AtomicLong changeCount = new AtomicLong(0); + private final AtomicLong failureStreak = new AtomicLong(0); + private final AtomicReference mostRecentFailure = new AtomicReference<>(); + + public void changeOccurred() { + changeCount.incrementAndGet(); + } + + public void successOccurred() { + failureStreak.set(0); + } + + public void failureOccurred(String description) { + failureStreak.incrementAndGet(); + mostRecentFailure.set(description); + } + + @Override + public String name() { + return NAME; + } + + @Override + public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResourcesCount, HealthInfo healthInfo) { + if (0 == changeCount.get()) { + return createIndicator(GREEN, NO_CHANGES_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()); + } + long numFailures = failureStreak.get(); + if (0 == numFailures) { + return createIndicator(GREEN, SUCCESS_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()); + } else { + return createIndicator( + YELLOW, + FAILURE_SYMPTOM, + new SimpleHealthIndicatorDetails(Map.of("failure_streak", numFailures, "most_recent_failure", mostRecentFailure.get())), + STALE_SETTINGS_IMPACT, + List.of() + ); + } } } } diff --git a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index f1b59ed14cefb..4564a37dacf4a 100644 --- a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -122,6 +122,7 @@ public final void handleRequest(RestRequest request, RestChannel channel, NodeCl ); } + usageCount.increment(); if (request.isStreamedContent()) { assert action instanceof RequestBodyChunkConsumer; var chunkConsumer = (RequestBodyChunkConsumer) action; @@ -137,11 +138,11 @@ public void close() { chunkConsumer.streamClose(); } }); + action.accept(channel); + } else { + action.accept(channel); + request.getHttpRequest().release(); } - - usageCount.increment(); - // execute the action - action.accept(channel); } } diff --git a/server/src/main/java/org/elasticsearch/rest/FilterRestHandler.java b/server/src/main/java/org/elasticsearch/rest/FilterRestHandler.java index cb5155cb0de0b..21a44ac9af5c8 100644 --- a/server/src/main/java/org/elasticsearch/rest/FilterRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/FilterRestHandler.java @@ -43,11 +43,6 @@ public boolean canTripCircuitBreaker() { return delegate.canTripCircuitBreaker(); } - @Override - public boolean allowsUnsafeBuffers() { - return delegate.allowsUnsafeBuffers(); - } - @Override public boolean supportsBulkContent() { return delegate.supportsBulkContent(); diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 7446ec5bb6717..49fe794bbe615 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -432,10 +432,6 @@ private void dispatchRequest( } // iff we could reserve bytes for the request we need to send the response also over this channel responseChannel = new ResourceHandlingHttpChannel(channel, circuitBreakerService, contentLength, methodHandlers); - // TODO: Count requests double in the circuit breaker if they need copying? - if (handler.allowsUnsafeBuffers() == false) { - request.ensureSafeBuffers(); - } if (handler.allowSystemIndexAccessByDefault() == false) { // The ELASTIC_PRODUCT_ORIGIN_HTTP_HEADER indicates that the request is coming from an Elastic product and diff --git a/server/src/main/java/org/elasticsearch/rest/RestFeatures.java b/server/src/main/java/org/elasticsearch/rest/RestFeatures.java index 8d546f7aa43f8..e72b30526c8e3 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestFeatures.java +++ b/server/src/main/java/org/elasticsearch/rest/RestFeatures.java @@ -9,13 +9,10 @@ package org.elasticsearch.rest; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.rest.action.admin.cluster.RestClusterGetSettingsAction; import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; -import java.util.Map; import java.util.Set; import static org.elasticsearch.search.fetch.subphase.highlight.DefaultHighlighter.UNIFIED_HIGHLIGHTER_MATCHED_FIELDS; @@ -29,9 +26,4 @@ public Set getFeatures() { UNIFIED_HIGHLIGHTER_MATCHED_FIELDS ); } - - @Override - public Map getHistoricalFeatures() { - return Map.of(RestClusterGetSettingsAction.SUPPORTS_GET_SETTINGS_ACTION, Version.V_8_3_0); - } } diff --git a/server/src/main/java/org/elasticsearch/rest/RestHandler.java b/server/src/main/java/org/elasticsearch/rest/RestHandler.java index cf66e402d3691..572e92e369a63 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/RestHandler.java @@ -69,18 +69,6 @@ default Scope getServerlessScope() { return serverlessScope == null ? null : serverlessScope.value(); } - /** - * Indicates if the RestHandler supports working with pooled buffers. If the request handler will not escape the return - * {@link RestRequest#content()} or any buffers extracted from it then there is no need to make a copies of any pooled buffers in the - * {@link RestRequest} instance before passing a request to this handler. If this instance does not support pooled/unsafe buffers - * {@link RestRequest#ensureSafeBuffers()} should be called on any request before passing it to {@link #handleRequest}. - * - * @return true iff the handler supports requests that make use of pooled buffers - */ - default boolean allowsUnsafeBuffers() { - return false; - } - /** * The list of {@link Route}s that this RestHandler is responsible for handling. */ diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequest.java b/server/src/main/java/org/elasticsearch/rest/RestRequest.java index 17eda305b5ccf..17d85a8eabb1c 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestRequest.java +++ b/server/src/main/java/org/elasticsearch/rest/RestRequest.java @@ -16,17 +16,21 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.http.HttpBody; import org.elasticsearch.http.HttpChannel; import org.elasticsearch.http.HttpRequest; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.telemetry.tracing.Traceable; import org.elasticsearch.xcontent.ParsedMediaType; import org.elasticsearch.xcontent.ToXContent; @@ -51,6 +55,8 @@ public class RestRequest implements ToXContent.Params, Traceable { + private static final Logger logger = LogManager.getLogger(RestRequest.class); + /** * Internal marker request parameter to indicate that a request was made in serverless mode. Use this parameter, together with * {@link #OPERATOR_REQUEST} if you need to toggle behavior for serverless, for example to enforce partial API restrictions @@ -187,15 +193,6 @@ protected RestRequest(RestRequest other) { } } - /** - * Invoke {@link HttpRequest#releaseAndCopy()} on the http request in this instance and replace a pooled http request - * with an unpooled copy. This is supposed to be used before passing requests to {@link RestHandler} instances that can not safely - * handle http requests that use pooled buffers as determined by {@link RestHandler#allowsUnsafeBuffers()}. - */ - void ensureSafeBuffers() { - httpRequest = httpRequest.releaseAndCopy(); - } - /** * Creates a new REST request. * @@ -306,9 +303,31 @@ public boolean isFullContent() { return httpRequest.body().isFull(); } + /** + * Returns a copy of HTTP content. The copy is GC-managed and does not require reference counting. + * Please use {@link #releasableContent()} to avoid content copy. + */ + @SuppressForbidden(reason = "temporarily support content copy while migrating RestHandlers to ref counted pooled buffers") public BytesReference content() { + return BytesReference.copyBytes(releasableContent()); + } + + /** + * Returns a direct reference to the network buffer containing the request body. The HTTP layers will release their references to this + * buffer as soon as they have finished the synchronous steps of processing the request on the network thread, which will by default + * release the buffer back to the pool where it may be re-used for another request. If you need to keep the buffer alive past the end of + * these synchronous steps, acquire your own reference to this buffer and release it once it's no longer needed. + */ + public ReleasableBytesReference releasableContent() { this.contentConsumed = true; - return httpRequest.body().asFull().bytes(); + var bytes = httpRequest.body().asFull().bytes(); + if (bytes.hasReferences() == false) { + var e = new IllegalStateException("http releasable content accessed after release"); + logger.error(e.getMessage(), e); + assert false : e; + throw e; + } + return bytes; } public boolean isStreamedContent() { @@ -319,18 +338,32 @@ public HttpBody.Stream contentStream() { return httpRequest.body().asStream(); } - /** - * @return content of the request body or throw an exception if the body or content type is missing - */ - public final BytesReference requiredContent() { + private void ensureContent() { if (hasContent() == false) { throw new ElasticsearchParseException("request body is required"); } else if (xContentType.get() == null) { throwValidationException("unknown content type"); } + } + + /** + * @return copy of the request body or throw an exception if the body or content type is missing. + * See {@link #content()}. Please use {@link #requiredReleasableContent()} to avoid content copy. + */ + public final BytesReference requiredContent() { + ensureContent(); return content(); } + /** + * Returns reference to the network buffer of HTTP content or throw an exception if the body or content type is missing. + * See {@link #releasableContent()}. It's a recommended method to handle HTTP content without copying it. + */ + public ReleasableBytesReference requiredReleasableContent() { + ensureContent(); + return releasableContent(); + } + private static void throwValidationException(String msg) { ValidationException unknownContentType = new ValidationException(); unknownContentType.addValidationError(msg); diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java b/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java index e4105363e1bce..57b4d2990c8e0 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java +++ b/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java @@ -12,6 +12,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Tuple; @@ -44,10 +45,10 @@ public boolean hasContent() { } @Override - public BytesReference content() { + public ReleasableBytesReference releasableContent() { if (filteredBytes == null) { Tuple> result = XContentHelper.convertToMap( - restRequest.requiredContent(), + restRequest.requiredReleasableContent(), true, restRequest.getXContentType() ); @@ -63,7 +64,7 @@ public BytesReference content() { throw new ElasticsearchException("failed to parse request", e); } } - return filteredBytes; + return ReleasableBytesReference.wrap(filteredBytes); } }; } else { diff --git a/server/src/main/java/org/elasticsearch/rest/RestResponse.java b/server/src/main/java/org/elasticsearch/rest/RestResponse.java index 29cae343fb09e..d043974055667 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestResponse.java +++ b/server/src/main/java/org/elasticsearch/rest/RestResponse.java @@ -22,6 +22,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -146,12 +147,12 @@ public RestResponse(RestChannel channel, RestStatus status, Exception e) throws params = new ToXContent.DelegatingMapParams(singletonMap(REST_EXCEPTION_SKIP_STACK_TRACE, "false"), params); } - if (channel.detailedErrorsEnabled() == false) { + if (channel.request().getRestApiVersion() == RestApiVersion.V_8 && channel.detailedErrorsEnabled() == false) { deprecationLogger.warn( DeprecationCategory.API, "http_detailed_errors", - "The JSON format of non-detailed errors will change in Elasticsearch 9.0 to match the JSON structure" - + " used for detailed errors. To keep using the existing format, use the V8 REST API." + "The JSON format of non-detailed errors has changed in Elasticsearch 9.0 to match the JSON structure" + + " used for detailed errors." ); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java index 946931e166363..ca9e4abcaeec7 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java @@ -11,13 +11,11 @@ import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsAction; import org.elasticsearch.action.admin.cluster.settings.RestClusterGetSettingsResponse; -import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -27,7 +25,6 @@ import java.io.IOException; import java.util.List; import java.util.Set; -import java.util.function.Predicate; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; @@ -35,23 +32,14 @@ @ServerlessScope(Scope.INTERNAL) public class RestClusterGetSettingsAction extends BaseRestHandler { - public static final NodeFeature SUPPORTS_GET_SETTINGS_ACTION = new NodeFeature("rest.get_settings_action"); - private final Settings settings; private final ClusterSettings clusterSettings; private final SettingsFilter settingsFilter; - private final Predicate clusterSupportsFeature; - public RestClusterGetSettingsAction( - Settings settings, - ClusterSettings clusterSettings, - SettingsFilter settingsFilter, - Predicate clusterSupportsFeature - ) { + public RestClusterGetSettingsAction(Settings settings, ClusterSettings clusterSettings, SettingsFilter settingsFilter) { this.settings = settings; this.clusterSettings = clusterSettings; this.settingsFilter = settingsFilter; - this.clusterSupportsFeature = clusterSupportsFeature; } @Override @@ -72,10 +60,6 @@ private static void setUpRequestParams(MasterNodeReadRequest clusterRequest, public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final boolean renderDefaults = request.paramAsBoolean("include_defaults", false); - if (clusterSupportsFeature.test(SUPPORTS_GET_SETTINGS_ACTION) == false) { - return prepareLegacyRequest(request, client, renderDefaults); - } - ClusterGetSettingsAction.Request clusterSettingsRequest = new ClusterGetSettingsAction.Request(getMasterNodeTimeout(request)); setUpRequestParams(clusterSettingsRequest, request); @@ -89,29 +73,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC ); } - private RestChannelConsumer prepareLegacyRequest(final RestRequest request, final NodeClient client, final boolean renderDefaults) { - ClusterStateRequest clusterStateRequest = new ClusterStateRequest(getMasterNodeTimeout(request)).routingTable(false).nodes(false); - setUpRequestParams(clusterStateRequest, request); - return channel -> client.admin() - .cluster() - .state( - clusterStateRequest, - new RestToXContentListener(channel).map( - r -> response( - new ClusterGetSettingsAction.Response( - r.getState().metadata().persistentSettings(), - r.getState().metadata().transientSettings(), - r.getState().metadata().settings() - ), - renderDefaults, - settingsFilter, - clusterSettings, - settings - ) - ) - ); - } - @Override protected Set responseParams() { return Settings.FORMAT_PARAMS; diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java index de3fd390ec86d..9428ef5390b2f 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java @@ -10,6 +10,7 @@ package org.elasticsearch.rest.action.document; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestParser; @@ -102,9 +103,11 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC boolean defaultRequireDataStream = request.paramAsBoolean(DocWriteRequest.REQUIRE_DATA_STREAM, false); bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT)); bulkRequest.setRefreshPolicy(request.param("refresh")); + ReleasableBytesReference content = request.requiredReleasableContent(); + try { bulkRequest.add( - request.requiredContent(), + content, defaultIndex, defaultRouting, defaultFetchSourceContext, @@ -119,8 +122,10 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC } catch (Exception e) { return channel -> new RestToXContentListener<>(channel).onFailure(parseFailureException(e)); } - - return channel -> client.bulk(bulkRequest, new RestRefCountedChunkedToXContentListener<>(channel)); + return channel -> { + content.mustIncRef(); + client.bulk(bulkRequest, ActionListener.releaseAfter(new RestRefCountedChunkedToXContentListener<>(channel), content)); + }; } else { String waitForActiveShards = request.param("wait_for_active_shards"); TimeValue timeout = request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT); @@ -270,11 +275,6 @@ public boolean supportsBulkContent() { return true; } - @Override - public boolean allowsUnsafeBuffers() { - return true; - } - @Override public Set supportedCapabilities() { return capabilities; diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java index c2437dcb96fa6..d81ac03492d59 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java @@ -9,12 +9,14 @@ package org.elasticsearch.rest.action.document; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -104,11 +106,12 @@ public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + ReleasableBytesReference source = request.requiredReleasableContent(); IndexRequest indexRequest = new IndexRequest(request.param("index")); indexRequest.id(request.param("id")); indexRequest.routing(request.param("routing")); indexRequest.setPipeline(request.param("pipeline")); - indexRequest.source(request.requiredContent(), request.getXContentType()); + indexRequest.source(source, request.getXContentType()); indexRequest.timeout(request.paramAsTime("timeout", IndexRequest.DEFAULT_TIMEOUT)); indexRequest.setRefreshPolicy(request.param("refresh")); indexRequest.version(RestActions.parseVersion(request)); @@ -126,10 +129,16 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC indexRequest.opType(sOpType); } - return channel -> client.index( - indexRequest, - new RestToXContentListener<>(channel, DocWriteResponse::status, r -> r.getLocation(indexRequest.routing())) - ); + return channel -> { + source.mustIncRef(); + client.index( + indexRequest, + ActionListener.releaseAfter( + new RestToXContentListener<>(channel, DocWriteResponse::status, r -> r.getLocation(indexRequest.routing())), + source + ) + ); + }; } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 80a85d3b9b748..ff062084a3cbb 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -423,8 +423,4 @@ protected Set responseParams() { return RESPONSE_PARAMS; } - @Override - public boolean allowsUnsafeBuffers() { - return true; - } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java index 7b57481ad5716..e5c4826bfce97 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java @@ -40,6 +40,10 @@ private SearchCapabilities() {} private static final String NESTED_RETRIEVER_INNER_HITS_SUPPORT = "nested_retriever_inner_hits_support"; /** Support multi-dense-vector script field access. */ private static final String MULTI_DENSE_VECTOR_SCRIPT_ACCESS = "multi_dense_vector_script_access"; + /** Initial support for multi-dense-vector maxSim functions access. */ + private static final String MULTI_DENSE_VECTOR_SCRIPT_MAX_SIM = "multi_dense_vector_script_max_sim"; + + private static final String RANDOM_SAMPLER_WITH_SCORED_SUBAGGS = "random_sampler_with_scored_subaggs"; public static final Set CAPABILITIES; static { @@ -50,9 +54,11 @@ private SearchCapabilities() {} capabilities.add(DENSE_VECTOR_DOCVALUE_FIELDS); capabilities.add(TRANSFORM_RANK_RRF_TO_RETRIEVER); capabilities.add(NESTED_RETRIEVER_INNER_HITS_SUPPORT); + capabilities.add(RANDOM_SAMPLER_WITH_SCORED_SUBAGGS); if (MultiDenseVectorFieldMapper.FEATURE_FLAG.isEnabled()) { capabilities.add(MULTI_DENSE_VECTOR_FIELD_MAPPER); capabilities.add(MULTI_DENSE_VECTOR_SCRIPT_ACCESS); + capabilities.add(MULTI_DENSE_VECTOR_SCRIPT_MAX_SIM); } if (Build.current().isSnapshot()) { capabilities.add(KQL_QUERY_SUPPORTED); diff --git a/server/src/main/java/org/elasticsearch/script/MultiVectorScoreScriptUtils.java b/server/src/main/java/org/elasticsearch/script/MultiVectorScoreScriptUtils.java new file mode 100644 index 0000000000000..136c5e7b57d4b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/MultiVectorScoreScriptUtils.java @@ -0,0 +1,372 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; +import org.elasticsearch.script.field.vectors.DenseVector; +import org.elasticsearch.script.field.vectors.MultiDenseVectorDocValuesField; + +import java.io.IOException; +import java.util.HexFormat; +import java.util.List; + +public class MultiVectorScoreScriptUtils { + + public static class MultiDenseVectorFunction { + protected final ScoreScript scoreScript; + protected final MultiDenseVectorDocValuesField field; + + public MultiDenseVectorFunction(ScoreScript scoreScript, MultiDenseVectorDocValuesField field) { + this.scoreScript = scoreScript; + this.field = field; + } + + void setNextVector() { + try { + field.setNextDocId(scoreScript._getDocId()); + } catch (IOException e) { + throw ExceptionsHelper.convertToElastic(e); + } + if (field.isEmpty()) { + throw new IllegalArgumentException("A document doesn't have a value for a multi-vector field!"); + } + } + } + + public static class ByteMultiDenseVectorFunction extends MultiDenseVectorFunction { + protected final byte[][] queryVector; + + /** + * Constructs a dense vector function used for byte-sized vectors. + * + * @param scoreScript The script in which this function was referenced. + * @param field The vector field. + * @param queryVector The query vector. + */ + public ByteMultiDenseVectorFunction(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field); + if (queryVector.isEmpty()) { + throw new IllegalArgumentException("The query vector is empty."); + } + field.getElementType().checkDimensions(field.get().getDims(), queryVector.get(0).size()); + this.queryVector = new byte[queryVector.size()][queryVector.get(0).size()]; + float[] validateValues = new float[queryVector.size()]; + int lastSize = -1; + for (int i = 0; i < queryVector.size(); i++) { + if (lastSize != -1 && lastSize != queryVector.get(i).size()) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have inconsistent number of dimensions." + ); + } + lastSize = queryVector.get(i).size(); + for (int j = 0; j < queryVector.get(i).size(); j++) { + final Number number = queryVector.get(i).get(j); + byte value = number.byteValue(); + this.queryVector[i][j] = value; + validateValues[i] = number.floatValue(); + } + field.getElementType().checkVectorBounds(validateValues); + } + } + + /** + * Constructs a dense vector function used for byte-sized vectors. + * + * @param scoreScript The script in which this function was referenced. + * @param field The vector field. + * @param queryVector The query vector. + */ + public ByteMultiDenseVectorFunction(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, byte[][] queryVector) { + super(scoreScript, field); + this.queryVector = queryVector; + } + } + + public static class FloatMultiDenseVectorFunction extends MultiDenseVectorFunction { + protected final float[][] queryVector; + + /** + * Constructs a dense vector function used for float vectors. + * + * @param scoreScript The script in which this function was referenced. + * @param field The vector field. + * @param queryVector The query vector. + */ + public FloatMultiDenseVectorFunction( + ScoreScript scoreScript, + MultiDenseVectorDocValuesField field, + List> queryVector + ) { + super(scoreScript, field); + if (queryVector.isEmpty()) { + throw new IllegalArgumentException("The query vector is empty."); + } + DenseVector.checkDimensions(field.get().getDims(), queryVector.get(0).size()); + + this.queryVector = new float[queryVector.size()][queryVector.get(0).size()]; + int lastSize = -1; + for (int i = 0; i < queryVector.size(); i++) { + if (lastSize != -1 && lastSize != queryVector.get(i).size()) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have inconsistent number of dimensions." + ); + } + lastSize = queryVector.get(i).size(); + for (int j = 0; j < queryVector.get(i).size(); j++) { + this.queryVector[i][j] = queryVector.get(i).get(j).floatValue(); + } + field.getElementType().checkVectorBounds(this.queryVector[i]); + } + } + } + + // Calculate Hamming distances between a query's dense vector and documents' dense vectors + public interface MaxSimInvHammingDistanceInterface { + float maxSimInvHamming(); + } + + public static class ByteMaxSimInvHammingDistance extends ByteMultiDenseVectorFunction implements MaxSimInvHammingDistanceInterface { + + public ByteMaxSimInvHammingDistance(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field, queryVector); + } + + public ByteMaxSimInvHammingDistance(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, byte[][] queryVector) { + super(scoreScript, field, queryVector); + } + + public float maxSimInvHamming() { + setNextVector(); + return field.get().maxSimInvHamming(queryVector); + } + } + + private record BytesOrList(byte[][] bytes, List> list) {} + + @SuppressWarnings("unchecked") + private static BytesOrList parseBytes(Object queryVector) { + if (queryVector instanceof List) { + // check if its a list of strings or list of lists + if (((List) queryVector).get(0) instanceof List) { + return new BytesOrList(null, ((List>) queryVector)); + } else if (((List) queryVector).get(0) instanceof String) { + byte[][] parsedQueryVector = new byte[((List) queryVector).size()][]; + int lastSize = -1; + for (int i = 0; i < ((List) queryVector).size(); i++) { + parsedQueryVector[i] = HexFormat.of().parseHex((String) ((List) queryVector).get(i)); + if (lastSize != -1 && lastSize != parsedQueryVector[i].length) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have inconsistent number of dimensions." + ); + } + lastSize = parsedQueryVector[i].length; + } + return new BytesOrList(parsedQueryVector, null); + } else { + throw new IllegalArgumentException("Unsupported input object for byte vectors: " + queryVector.getClass().getName()); + } + } else { + throw new IllegalArgumentException("Unsupported input object for byte vectors: " + queryVector.getClass().getName()); + } + } + + public static final class MaxSimInvHamming { + + private final MaxSimInvHammingDistanceInterface function; + + public MaxSimInvHamming(ScoreScript scoreScript, Object queryVector, String fieldName) { + MultiDenseVectorDocValuesField field = (MultiDenseVectorDocValuesField) scoreScript.field(fieldName); + if (field.getElementType() == DenseVectorFieldMapper.ElementType.FLOAT) { + throw new IllegalArgumentException("hamming distance is only supported for byte or bit vectors"); + } + BytesOrList bytesOrList = parseBytes(queryVector); + if (bytesOrList.bytes != null) { + this.function = new ByteMaxSimInvHammingDistance(scoreScript, field, bytesOrList.bytes); + } else { + this.function = new ByteMaxSimInvHammingDistance(scoreScript, field, bytesOrList.list); + } + } + + public double maxSimInvHamming() { + return function.maxSimInvHamming(); + } + } + + // Calculate a dot product between a query's dense vector and documents' dense vectors + public interface MaxSimDotProductInterface { + double maxSimDotProduct(); + } + + public static class MaxSimBitDotProduct extends MultiDenseVectorFunction implements MaxSimDotProductInterface { + private final byte[][] byteQueryVector; + private final float[][] floatQueryVector; + + public MaxSimBitDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, byte[][] queryVector) { + super(scoreScript, field); + if (field.getElementType() != DenseVectorFieldMapper.ElementType.BIT) { + throw new IllegalArgumentException("Cannot calculate bit dot product for non-bit vectors"); + } + int fieldDims = field.get().getDims(); + if (fieldDims != queryVector.length * Byte.SIZE && fieldDims != queryVector.length) { + throw new IllegalArgumentException( + "The query vector has an incorrect number of dimensions. Must be [" + + fieldDims / 8 + + "] for bitwise operations, or [" + + fieldDims + + "] for byte wise operations: provided [" + + queryVector.length + + "]." + ); + } + this.byteQueryVector = queryVector; + this.floatQueryVector = null; + } + + public MaxSimBitDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field); + if (queryVector.isEmpty()) { + throw new IllegalArgumentException("The query vector is empty."); + } + if (field.getElementType() != DenseVectorFieldMapper.ElementType.BIT) { + throw new IllegalArgumentException("cannot calculate bit dot product for non-bit vectors"); + } + float[][] floatQueryVector = new float[queryVector.size()][]; + byte[][] byteQueryVector = new byte[queryVector.size()][]; + boolean isFloat = false; + int lastSize = -1; + for (int i = 0; i < queryVector.size(); i++) { + if (lastSize != -1 && lastSize != queryVector.get(i).size()) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have inconsistent number of dimensions." + ); + } + lastSize = queryVector.get(i).size(); + floatQueryVector[i] = new float[queryVector.get(i).size()]; + if (isFloat == false) { + byteQueryVector[i] = new byte[queryVector.get(i).size()]; + } + for (int j = 0; j < queryVector.get(i).size(); j++) { + Number number = queryVector.get(i).get(j); + floatQueryVector[i][j] = number.floatValue(); + if (isFloat == false) { + byteQueryVector[i][j] = number.byteValue(); + } + if (isFloat + || floatQueryVector[i][j] % 1.0f != 0.0f + || floatQueryVector[i][j] < Byte.MIN_VALUE + || floatQueryVector[i][j] > Byte.MAX_VALUE) { + isFloat = true; + } + } + } + int fieldDims = field.get().getDims(); + if (isFloat) { + this.floatQueryVector = floatQueryVector; + this.byteQueryVector = null; + if (fieldDims != floatQueryVector[0].length) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have incorrect number of dimensions. Must be [" + + fieldDims + + "] for float wise operations: provided [" + + floatQueryVector[0].length + + "]." + ); + } + } else { + this.floatQueryVector = null; + this.byteQueryVector = byteQueryVector; + if (fieldDims != byteQueryVector[0].length * Byte.SIZE && fieldDims != byteQueryVector[0].length) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have incorrect number of dimensions. Must be [" + + fieldDims / 8 + + "] for bitwise operations, or [" + + fieldDims + + "] for byte wise operations: provided [" + + byteQueryVector[0].length + + "]." + ); + } + } + } + + @Override + public double maxSimDotProduct() { + setNextVector(); + return byteQueryVector != null ? field.get().maxSimDotProduct(byteQueryVector) : field.get().maxSimDotProduct(floatQueryVector); + } + } + + public static class MaxSimByteDotProduct extends ByteMultiDenseVectorFunction implements MaxSimDotProductInterface { + + public MaxSimByteDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field, queryVector); + } + + public MaxSimByteDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, byte[][] queryVector) { + super(scoreScript, field, queryVector); + } + + public double maxSimDotProduct() { + setNextVector(); + return field.get().maxSimDotProduct(queryVector); + } + } + + public static class MaxSimFloatDotProduct extends FloatMultiDenseVectorFunction implements MaxSimDotProductInterface { + + public MaxSimFloatDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field, queryVector); + } + + public double maxSimDotProduct() { + setNextVector(); + return field.get().maxSimDotProduct(queryVector); + } + } + + public static final class MaxSimDotProduct { + + private final MaxSimDotProductInterface function; + + @SuppressWarnings("unchecked") + public MaxSimDotProduct(ScoreScript scoreScript, Object queryVector, String fieldName) { + MultiDenseVectorDocValuesField field = (MultiDenseVectorDocValuesField) scoreScript.field(fieldName); + function = switch (field.getElementType()) { + case BIT -> { + BytesOrList bytesOrList = parseBytes(queryVector); + if (bytesOrList.bytes != null) { + yield new MaxSimBitDotProduct(scoreScript, field, bytesOrList.bytes); + } else { + yield new MaxSimBitDotProduct(scoreScript, field, bytesOrList.list); + } + } + case BYTE -> { + BytesOrList bytesOrList = parseBytes(queryVector); + if (bytesOrList.bytes != null) { + yield new MaxSimByteDotProduct(scoreScript, field, bytesOrList.bytes); + } else { + yield new MaxSimByteDotProduct(scoreScript, field, bytesOrList.list); + } + } + case FLOAT -> { + if (queryVector instanceof List) { + yield new MaxSimFloatDotProduct(scoreScript, field, (List>) queryVector); + } + throw new IllegalArgumentException("Unsupported input object for float vectors: " + queryVector.getClass().getName()); + } + }; + } + + public double maxSimDotProduct() { + return function.maxSimDotProduct(); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVector.java index 24e19a803ff38..7805816090d51 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVector.java @@ -10,11 +10,13 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; +import org.elasticsearch.simdvec.ESVectorUtil; -import java.util.Iterator; +import java.util.Arrays; public class BitMultiDenseVector extends ByteMultiDenseVector { - public BitMultiDenseVector(Iterator vectorValues, BytesRef magnitudesBytes, int numVecs, int dims) { + public BitMultiDenseVector(VectorIterator vectorValues, BytesRef magnitudesBytes, int numVecs, int dims) { super(vectorValues, magnitudesBytes, numVecs, dims); } @@ -31,6 +33,70 @@ public void checkDimensions(int qvDims) { } } + @Override + public float maxSimDotProduct(float[][] query) { + vectorValues.reset(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ESVectorUtil.ipFloatBit(query[i], vv)); + } + } + float sums = 0; + for (float m : maxes) { + sums += m; + } + return sums; + } + + @Override + public float maxSimDotProduct(byte[][] query) { + vectorValues.reset(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + if (query[0].length == dims) { + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ESVectorUtil.andBitCount(query[i], vv)); + } + } + } else { + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ESVectorUtil.ipByteBit(query[i], vv)); + } + } + } + float sum = 0; + for (float m : maxes) { + sum += m; + } + return sum; + } + + @Override + public float maxSimInvHamming(byte[][] query) { + vectorValues.reset(); + int bitCount = this.getDims(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ((bitCount - VectorUtil.xorBitCount(vv, query[i])) / (float) bitCount)); + } + } + float sum = 0; + for (float m : maxes) { + sum += m; + } + return sum; + } + @Override public int getDims() { return dims * Byte.SIZE; diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVector.java index e610d10146b2f..5e9d3e05746c8 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVector.java @@ -10,21 +10,22 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; import org.elasticsearch.index.mapper.vectors.VectorEncoderDecoder; +import java.util.Arrays; import java.util.Iterator; public class ByteMultiDenseVector implements MultiDenseVector { - protected final Iterator vectorValues; + protected final VectorIterator vectorValues; protected final int numVecs; protected final int dims; - private Iterator floatDocVectors; private float[] magnitudes; private final BytesRef magnitudesBytes; - public ByteMultiDenseVector(Iterator vectorValues, BytesRef magnitudesBytes, int numVecs, int dims) { + public ByteMultiDenseVector(VectorIterator vectorValues, BytesRef magnitudesBytes, int numVecs, int dims) { assert magnitudesBytes.length == numVecs * Float.BYTES; this.vectorValues = vectorValues; this.numVecs = numVecs; @@ -33,11 +34,50 @@ public ByteMultiDenseVector(Iterator vectorValues, BytesRef magnitudesBy } @Override - public Iterator getVectors() { - if (floatDocVectors == null) { - floatDocVectors = new ByteToFloatIteratorWrapper(vectorValues, dims); + public float maxSimDotProduct(float[][] query) { + throw new UnsupportedOperationException("use [float maxSimDotProduct(byte[][] queryVector)] instead"); + } + + @Override + public float maxSimDotProduct(byte[][] query) { + vectorValues.reset(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], VectorUtil.dotProduct(query[i], vv)); + } + } + float sum = 0; + for (float m : maxes) { + sum += m; + } + return sum; + } + + @Override + public float maxSimInvHamming(byte[][] query) { + vectorValues.reset(); + int bitCount = dims * Byte.SIZE; + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ((bitCount - VectorUtil.xorBitCount(vv, query[i])) / (float) bitCount)); + } + } + float sum = 0; + for (float m : maxes) { + sum += m; } - return floatDocVectors; + return sum; + } + + @Override + public Iterator getVectors() { + return new ByteToFloatIteratorWrapper(vectorValues.copy(), dims); } @Override diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVectorDocValuesField.java index d1e062e0a3dee..d45c5b85137f5 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVectorDocValuesField.java @@ -23,7 +23,7 @@ public class ByteMultiDenseVectorDocValuesField extends MultiDenseVectorDocValue private final BinaryDocValues magnitudes; protected final int dims; protected int numVecs; - protected Iterator vectorValue; + protected VectorIterator vectorValue; protected boolean decoded; protected BytesRef value; protected BytesRef magnitudesValue; @@ -111,7 +111,7 @@ public boolean isEmpty() { return value == null; } - static class ByteVectorIterator implements Iterator { + static class ByteVectorIterator implements VectorIterator { private final byte[] buffer; private final BytesRef vectorValues; private final int size; @@ -138,5 +138,15 @@ public byte[] next() { idx++; return buffer; } + + @Override + public Iterator copy() { + return new ByteVectorIterator(vectorValues, new byte[buffer.length], size); + } + + @Override + public void reset() { + idx = 0; + } } } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVector.java index 9ffe8b3b970c4..9c2f7eb6a86d4 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVector.java @@ -10,7 +10,9 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; +import java.util.Arrays; import java.util.Iterator; import static org.elasticsearch.index.mapper.vectors.VectorEncoderDecoder.getMultiMagnitudes; @@ -21,19 +23,47 @@ public class FloatMultiDenseVector implements MultiDenseVector { private float[] magnitudesArray = null; private final int dims; private final int numVectors; - private final Iterator decodedDocVector; + private final VectorIterator vectorValues; - public FloatMultiDenseVector(Iterator decodedDocVector, BytesRef magnitudes, int numVectors, int dims) { + public FloatMultiDenseVector(VectorIterator decodedDocVector, BytesRef magnitudes, int numVectors, int dims) { assert magnitudes.length == numVectors * Float.BYTES; - this.decodedDocVector = decodedDocVector; + this.vectorValues = decodedDocVector; this.magnitudes = magnitudes; this.numVectors = numVectors; this.dims = dims; } + @Override + public float maxSimDotProduct(float[][] query) { + vectorValues.reset(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + float[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], VectorUtil.dotProduct(query[i], vv)); + } + } + float sum = 0; + for (float m : maxes) { + sum += m; + } + return sum; + } + + @Override + public float maxSimDotProduct(byte[][] query) { + throw new UnsupportedOperationException("use [float maxSimDotProduct(float[][] queryVector)] instead"); + } + + @Override + public float maxSimInvHamming(byte[][] query) { + throw new UnsupportedOperationException("hamming distance is not supported for float vectors"); + } + @Override public Iterator getVectors() { - return decodedDocVector; + return vectorValues.copy(); } @Override diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVectorDocValuesField.java index 356db58d989c5..c7ac7842afd96 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVectorDocValuesField.java @@ -110,14 +110,16 @@ private void decodeVectorIfNecessary() { } } - static class FloatVectorIterator implements Iterator { + static class FloatVectorIterator implements VectorIterator { private final float[] buffer; private final FloatBuffer vectorValues; + private final BytesRef vectorValueBytesRef; private final int size; private int idx = 0; FloatVectorIterator(BytesRef vectorValues, float[] buffer, int size) { assert vectorValues.length == (buffer.length * Float.BYTES * size); + this.vectorValueBytesRef = vectorValues; this.vectorValues = ByteBuffer.wrap(vectorValues.bytes, vectorValues.offset, vectorValues.length) .order(ByteOrder.LITTLE_ENDIAN) .asFloatBuffer(); @@ -139,5 +141,16 @@ public float[] next() { idx++; return buffer; } + + @Override + public Iterator copy() { + return new FloatVectorIterator(vectorValueBytesRef, new float[buffer.length], size); + } + + @Override + public void reset() { + idx = 0; + vectorValues.rewind(); + } } } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVector.java index 85c851dbe545c..7d948cf5a74fa 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVector.java @@ -17,6 +17,12 @@ default void checkDimensions(int qvDims) { checkDimensions(getDims(), qvDims); } + float maxSimDotProduct(float[][] query); + + float maxSimDotProduct(byte[][] query); + + float maxSimInvHamming(byte[][] query); + Iterator getVectors(); float[] getMagnitudes(); @@ -63,6 +69,21 @@ public int getDims() { throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); } + @Override + public float maxSimDotProduct(float[][] query) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public float maxSimDotProduct(byte[][] query) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public float maxSimInvHamming(byte[][] query) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + @Override public int size() { return 0; diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/VectorIterator.java b/server/src/main/java/org/elasticsearch/script/field/vectors/VectorIterator.java new file mode 100644 index 0000000000000..b8615ac877254 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/VectorIterator.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script.field.vectors; + +import java.util.Iterator; + +public interface VectorIterator extends Iterator { + Iterator copy(); + + void reset(); + + static VectorIterator from(float[][] vectors) { + return new VectorIterator<>() { + private int i = 0; + + @Override + public boolean hasNext() { + return i < vectors.length; + } + + @Override + public float[] next() { + return vectors[i++]; + } + + @Override + public Iterator copy() { + return from(vectors); + } + + @Override + public void reset() { + i = 0; + } + }; + } + + static VectorIterator from(byte[][] vectors) { + return new VectorIterator<>() { + private int i = 0; + + @Override + public boolean hasNext() { + return i < vectors.length; + } + + @Override + public byte[] next() { + return vectors[i++]; + } + + @Override + public Iterator copy() { + return from(vectors); + } + + @Override + public void reset() { + i = 0; + } + }; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AdaptingAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/AdaptingAggregator.java index b4d5512331b42..d08a76e51c6bd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AdaptingAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AdaptingAggregator.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.aggregations; import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.search.profile.aggregation.InternalAggregationProfileTree; @@ -98,10 +99,10 @@ public final void postCollection() throws IOException { } @Override - public final InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public final InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { InternalAggregation[] delegateResults = delegate.buildAggregations(owningBucketOrds); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { + InternalAggregation[] result = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; + for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { result[ordIdx] = adapt(delegateResults[ordIdx]); } return result; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java index 0d36469dddfdc..aa8d9fba554c1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java @@ -13,6 +13,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasable; import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.search.sort.SortOrder; @@ -142,7 +144,7 @@ public interface BucketComparator { * @return the results for each ordinal, in the same order as the array * of ordinals */ - public abstract InternalAggregation[] buildAggregations(long[] ordsToCollect) throws IOException; + public abstract InternalAggregation[] buildAggregations(LongArray ordsToCollect) throws IOException; /** * Release this aggregation and its sub-aggregations. @@ -153,11 +155,11 @@ public interface BucketComparator { * Build the result of this aggregation if it is at the "top level" * of the aggregation tree. If, instead, it is a sub-aggregation of * another aggregation then the aggregation that contains it will call - * {@link #buildAggregations(long[])}. + * {@link #buildAggregations(LongArray)}. */ public final InternalAggregation buildTopLevel() throws IOException { assert parent() == null; - return buildAggregations(new long[] { 0 })[0]; + return buildAggregations(BigArrays.NON_RECYCLING_INSTANCE.newLongArray(1, true))[0]; } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java index bf9116207b375..11444edca080d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java @@ -13,6 +13,8 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.common.CheckedIntFunction; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.Maps; @@ -40,7 +42,7 @@ public abstract class AggregatorBase extends Aggregator { protected final String name; protected final Aggregator parent; - private final AggregationContext context; + protected final AggregationContext context; private final Map metadata; protected final Aggregator[] subAggregators; @@ -48,6 +50,8 @@ public abstract class AggregatorBase extends Aggregator { private Map subAggregatorbyName; private long requestBytesUsed; + private final CircuitBreaker breaker; + private int callCount; /** * Constructs a new Aggregator. @@ -72,6 +76,7 @@ protected AggregatorBase( this.metadata = metadata; this.parent = parent; this.context = context; + this.breaker = context.breaker(); assert factories != null : "sub-factories provided to BucketAggregator must not be null, use AggragatorFactories.EMPTY instead"; this.subAggregators = factories.createSubAggregators(this, subAggregatorCardinality); context.addReleasable(this); @@ -327,6 +332,30 @@ protected final InternalAggregations buildEmptySubAggregations() { return InternalAggregations.from(aggs); } + /** + * Builds the aggregations array with the provided size and populates it using the provided function. + */ + protected final InternalAggregation[] buildAggregations(int size, CheckedIntFunction aggFunction) + throws IOException { + final InternalAggregation[] results = new InternalAggregation[size]; + for (int i = 0; i < results.length; i++) { + checkRealMemoryCB("internal_aggregation"); + results[i] = aggFunction.apply(i); + } + return results; + } + + /** + * This method calls the circuit breaker from time to time in order to give it a chance to check available + * memory in the parent breaker (Which should be a real memory breaker) and break the execution if we are running out. + * To achieve that, we are passing 0 as the estimated bytes every 1024 calls + */ + protected final void checkRealMemoryCB(String label) { + if ((++callCount & 0x3FF) == 0) { + breaker.addEstimateBytesAndMaybeBreak(0, label); + } + } + @Override public String toString() { return name; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java index 8accc6b15d820..a32211fd4d8fb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.aggregations; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.support.AggregationContext; import java.io.IOException; @@ -39,11 +40,7 @@ public final LeafBucketCollector getLeafCollector(AggregationExecutionContext ag } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - results[ordIdx] = buildEmptyAggregation(); - } - return results; + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> buildEmptyAggregation()); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java index 231130c920349..44d76d31be0e7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java @@ -20,6 +20,7 @@ import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedLongValues; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongHash; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -146,7 +147,7 @@ public void postCollection() throws IOException { * Replay the wrapped collector, but only on a selection of buckets. */ @Override - public void prepareSelectedBuckets(long... selectedBuckets) throws IOException { + public void prepareSelectedBuckets(LongArray selectedBuckets) throws IOException { if (finished == false) { throw new IllegalStateException("Cannot replay yet, collection is not finished: postCollect() has not been called"); } @@ -154,9 +155,9 @@ public void prepareSelectedBuckets(long... selectedBuckets) throws IOException { throw new IllegalStateException("Already been replayed"); } - this.selectedBuckets = new LongHash(selectedBuckets.length, BigArrays.NON_RECYCLING_INSTANCE); - for (long ord : selectedBuckets) { - this.selectedBuckets.add(ord); + this.selectedBuckets = new LongHash(selectedBuckets.size(), BigArrays.NON_RECYCLING_INSTANCE); + for (long i = 0; i < selectedBuckets.size(); i++) { + this.selectedBuckets.add(selectedBuckets.get(i)); } boolean needsScores = scoreMode().needsScores(); @@ -232,21 +233,22 @@ private static void failInCaseOfBadScorer(String message) { * been collected directly. */ @Override - public Aggregator wrap(final Aggregator in) { + public Aggregator wrap(final Aggregator in, BigArrays bigArrays) { return new WrappedAggregator(in) { @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { if (selectedBuckets == null) { throw new IllegalStateException("Collection has not been replayed yet."); } - long[] rebasedOrds = new long[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - rebasedOrds[ordIdx] = selectedBuckets.find(owningBucketOrds[ordIdx]); - if (rebasedOrds[ordIdx] == -1) { - throw new IllegalStateException("Cannot build for a bucket which has not been collected"); + try (LongArray rebasedOrds = bigArrays.newLongArray(owningBucketOrds.size())) { + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + rebasedOrds.set(ordIdx, selectedBuckets.find(owningBucketOrds.get(ordIdx))); + if (rebasedOrds.get(ordIdx) == -1) { + throw new IllegalStateException("Cannot build for a bucket which has not been collected"); + } } + return in.buildAggregations(rebasedOrds); } - return in.buildAggregations(rebasedOrds); } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java index e6c26c4278807..ea667b821a7dd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java @@ -9,8 +9,9 @@ package org.elasticsearch.search.aggregations.bucket; import org.apache.lucene.index.LeafReaderContext; -import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.IntArray; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.core.Releasable; import org.elasticsearch.search.aggregations.AggregationErrors; import org.elasticsearch.search.aggregations.Aggregator; @@ -40,10 +41,9 @@ import java.util.function.ToLongFunction; public abstract class BucketsAggregator extends AggregatorBase { - private final CircuitBreaker breaker; + private LongArray docCounts; protected final DocCountProvider docCountProvider; - private int callCount; @SuppressWarnings("this-escape") public BucketsAggregator( @@ -55,7 +55,6 @@ public BucketsAggregator( Map metadata ) throws IOException { super(name, factories, aggCtx, parent, bucketCardinality, metadata); - breaker = aggCtx.breaker(); docCounts = bigArrays().newLongArray(1, true); docCountProvider = new DocCountProvider(); } @@ -81,7 +80,7 @@ public final void collectBucket(LeafBucketCollector subCollector, int doc, long grow(bucketOrd + 1); int docCount = docCountProvider.getDocCount(doc); if (docCounts.increment(bucketOrd, docCount) == docCount) { - updateCircuitBreaker("allocated_buckets"); + checkRealMemoryCB("allocated_buckets"); } subCollector.collect(doc, bucketOrd); } @@ -155,26 +154,26 @@ public final long bucketDocCount(long bucketOrd) { /** * Hook to allow taking an action before building the sub agg results. */ - protected void prepareSubAggs(long[] ordsToCollect) throws IOException {} + protected void prepareSubAggs(LongArray ordsToCollect) throws IOException {} /** * Build the results of the sub-aggregations of the buckets at each of * the provided ordinals. *

* Most aggregations should probably use something like - * {@link #buildSubAggsForAllBuckets(Object[][], ToLongFunction, BiConsumer)} - * or {@link #buildAggregationsForVariableBuckets(long[], LongKeyedBucketOrds, BucketBuilderForVariable, ResultBuilderForVariable)} - * or {@link #buildAggregationsForFixedBucketCount(long[], int, BucketBuilderForFixedCount, Function)} - * or {@link #buildAggregationsForSingleBucket(long[], SingleBucketResultBuilder)} + * {@link #buildSubAggsForAllBuckets(ObjectArray, ToLongFunction, BiConsumer)} + * or {@link #buildAggregationsForVariableBuckets(LongArray, LongKeyedBucketOrds, BucketBuilderForVariable, ResultBuilderForVariable)} + * or {@link #buildAggregationsForFixedBucketCount(LongArray, int, BucketBuilderForFixedCount, Function)} + * or {@link #buildAggregationsForSingleBucket(LongArray, SingleBucketResultBuilder)} * instead of calling this directly. * @return the sub-aggregation results in the same order as the provided * array of ordinals */ - protected final IntFunction buildSubAggsForBuckets(long[] bucketOrdsToCollect) throws IOException { + protected final IntFunction buildSubAggsForBuckets(LongArray bucketOrdsToCollect) throws IOException { prepareSubAggs(bucketOrdsToCollect); InternalAggregation[][] aggregations = new InternalAggregation[subAggregators.length][]; for (int i = 0; i < subAggregators.length; i++) { - updateCircuitBreaker("building_sub_aggregation"); + checkRealMemoryCB("building_sub_aggregation"); aggregations[i] = subAggregators[i].buildAggregations(bucketOrdsToCollect); } return subAggsForBucketFunction(aggregations); @@ -204,26 +203,28 @@ public int size() { * @param setAggs how to set the sub-aggregation results on a bucket */ protected final void buildSubAggsForAllBuckets( - B[][] buckets, + ObjectArray buckets, ToLongFunction bucketToOrd, BiConsumer setAggs ) throws IOException { - int totalBucketOrdsToCollect = 0; - for (B[] bucketsForOneResult : buckets) { - totalBucketOrdsToCollect += bucketsForOneResult.length; + long totalBucketOrdsToCollect = 0; + for (long b = 0; b < buckets.size(); b++) { + totalBucketOrdsToCollect += buckets.get(b).length; } - long[] bucketOrdsToCollect = new long[totalBucketOrdsToCollect]; - int s = 0; - for (B[] bucketsForOneResult : buckets) { - for (B bucket : bucketsForOneResult) { - bucketOrdsToCollect[s++] = bucketToOrd.applyAsLong(bucket); + + try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(totalBucketOrdsToCollect)) { + int s = 0; + for (long ord = 0; ord < buckets.size(); ord++) { + for (B bucket : buckets.get(ord)) { + bucketOrdsToCollect.set(s++, bucketToOrd.applyAsLong(bucket)); + } } - } - var results = buildSubAggsForBuckets(bucketOrdsToCollect); - s = 0; - for (B[] bucket : buckets) { - for (int b = 0; b < bucket.length; b++) { - setAggs.accept(bucket[b], results.apply(s++)); + var results = buildSubAggsForBuckets(bucketOrdsToCollect); + s = 0; + for (long ord = 0; ord < buckets.size(); ord++) { + for (B value : buckets.get(ord)) { + setAggs.accept(value, results.apply(s++)); + } } } } @@ -237,37 +238,37 @@ protected final void buildSubAggsForAllBuckets( * @param resultBuilder how to build a result from buckets */ protected final InternalAggregation[] buildAggregationsForFixedBucketCount( - long[] owningBucketOrds, + LongArray owningBucketOrds, int bucketsPerOwningBucketOrd, BucketBuilderForFixedCount bucketBuilder, Function, InternalAggregation> resultBuilder ) throws IOException { - int totalBuckets = owningBucketOrds.length * bucketsPerOwningBucketOrd; - long[] bucketOrdsToCollect = new long[totalBuckets]; - int bucketOrdIdx = 0; - for (long owningBucketOrd : owningBucketOrds) { - long ord = owningBucketOrd * bucketsPerOwningBucketOrd; - for (int offsetInOwningOrd = 0; offsetInOwningOrd < bucketsPerOwningBucketOrd; offsetInOwningOrd++) { - bucketOrdsToCollect[bucketOrdIdx++] = ord++; - } - } - bucketOrdIdx = 0; - var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int owningOrdIdx = 0; owningOrdIdx < owningBucketOrds.length; owningOrdIdx++) { - List buckets = new ArrayList<>(bucketsPerOwningBucketOrd); - for (int offsetInOwningOrd = 0; offsetInOwningOrd < bucketsPerOwningBucketOrd; offsetInOwningOrd++) { - buckets.add( - bucketBuilder.build( - offsetInOwningOrd, - bucketDocCount(bucketOrdsToCollect[bucketOrdIdx]), - subAggregationResults.apply(bucketOrdIdx++) - ) - ); + try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(owningBucketOrds.size() * bucketsPerOwningBucketOrd)) { + final int[] bucketOrdIdx = new int[] { 0 }; + for (long i = 0; i < owningBucketOrds.size(); i++) { + long ord = owningBucketOrds.get(i) * bucketsPerOwningBucketOrd; + for (int offsetInOwningOrd = 0; offsetInOwningOrd < bucketsPerOwningBucketOrd; offsetInOwningOrd++) { + bucketOrdsToCollect.set(bucketOrdIdx[0]++, ord++); + } } - results[owningOrdIdx] = resultBuilder.apply(buckets); + bucketOrdIdx[0] = 0; + var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); + + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { + List buckets = new ArrayList<>(bucketsPerOwningBucketOrd); + for (int offsetInOwningOrd = 0; offsetInOwningOrd < bucketsPerOwningBucketOrd; offsetInOwningOrd++) { + checkRealMemoryCBForInternalBucket(); + buckets.add( + bucketBuilder.build( + offsetInOwningOrd, + bucketDocCount(bucketOrdsToCollect.get(bucketOrdIdx[0])), + subAggregationResults.apply(bucketOrdIdx[0]++) + ) + ); + } + return resultBuilder.apply(buckets); + }); } - return results; } @FunctionalInterface @@ -280,19 +281,20 @@ protected interface BucketBuilderForFixedCount { * @param owningBucketOrds owning bucket ordinals for which to build the results * @param resultBuilder how to build a result from the sub aggregation results */ - protected final InternalAggregation[] buildAggregationsForSingleBucket(long[] owningBucketOrds, SingleBucketResultBuilder resultBuilder) - throws IOException { + protected final InternalAggregation[] buildAggregationsForSingleBucket( + LongArray owningBucketOrds, + SingleBucketResultBuilder resultBuilder + ) throws IOException { /* * It'd be entirely reasonable to call * `consumeBucketsAndMaybeBreak(owningBucketOrds.length)` * here but we don't because single bucket aggs never have. */ var subAggregationResults = buildSubAggsForBuckets(owningBucketOrds); - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - results[ordIdx] = resultBuilder.build(owningBucketOrds[ordIdx], subAggregationResults.apply(ordIdx)); - } - return results; + return buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> resultBuilder.build(owningBucketOrds.get(ordIdx), subAggregationResults.apply(ordIdx)) + ); } @FunctionalInterface @@ -307,54 +309,59 @@ protected interface SingleBucketResultBuilder { * @param bucketOrds hash of values to the bucket ordinal */ protected final InternalAggregation[] buildAggregationsForVariableBuckets( - long[] owningBucketOrds, + LongArray owningBucketOrds, LongKeyedBucketOrds bucketOrds, BucketBuilderForVariable bucketBuilder, ResultBuilderForVariable resultBuilder ) throws IOException { long totalOrdsToCollect = 0; - final int[] bucketsInOrd = new int[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - final long bucketCount = bucketOrds.bucketsInOrd(owningBucketOrds[ordIdx]); - bucketsInOrd[ordIdx] = (int) bucketCount; - totalOrdsToCollect += bucketCount; - } - if (totalOrdsToCollect > Integer.MAX_VALUE) { - // TODO: We should instrument this error. While it is correct for it to be a 400 class IllegalArgumentException, there is not - // much the user can do about that. If this occurs with any frequency, we should do something about it. - throw new IllegalArgumentException( - "Can't collect more than [" + Integer.MAX_VALUE + "] buckets but attempted [" + totalOrdsToCollect + "]" - ); - } - long[] bucketOrdsToCollect = new long[(int) totalOrdsToCollect]; - int b = 0; - for (long owningBucketOrd : owningBucketOrds) { - LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); - while (ordsEnum.next()) { - bucketOrdsToCollect[b++] = ordsEnum.ord(); + try (IntArray bucketsInOrd = bigArrays().newIntArray(owningBucketOrds.size())) { + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + final long bucketCount = bucketOrds.bucketsInOrd(owningBucketOrds.get(ordIdx)); + bucketsInOrd.set(ordIdx, (int) bucketCount); + totalOrdsToCollect += bucketCount; } - } - var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); - - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - b = 0; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - List buckets = new ArrayList<>(bucketsInOrd[ordIdx]); - LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - while (ordsEnum.next()) { - if (bucketOrdsToCollect[b] != ordsEnum.ord()) { - // If we hit this, something has gone horribly wrong and we need to investigate - throw AggregationErrors.iterationOrderChangedWithoutMutating( - bucketOrds.toString(), - ordsEnum.ord(), - bucketOrdsToCollect[b] - ); + if (totalOrdsToCollect > Integer.MAX_VALUE) { + // TODO: We should instrument this error. While it is correct for it to be a 400 class IllegalArgumentException, there is + // not + // much the user can do about that. If this occurs with any frequency, we should do something about it. + throw new IllegalArgumentException( + "Can't collect more than [" + Integer.MAX_VALUE + "] buckets but attempted [" + totalOrdsToCollect + "]" + ); + } + try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(totalOrdsToCollect)) { + final int[] b = new int[] { 0 }; + for (long i = 0; i < owningBucketOrds.size(); i++) { + LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(i)); + while (ordsEnum.next()) { + bucketOrdsToCollect.set(b[0]++, ordsEnum.ord()); + } } - buckets.add(bucketBuilder.build(ordsEnum.value(), bucketDocCount(ordsEnum.ord()), subAggregationResults.apply(b++))); + var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); + + b[0] = 0; + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { + final long owningBucketOrd = owningBucketOrds.get(ordIdx); + List buckets = new ArrayList<>(bucketsInOrd.get(ordIdx)); + LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); + while (ordsEnum.next()) { + if (bucketOrdsToCollect.get(b[0]) != ordsEnum.ord()) { + // If we hit this, something has gone horribly wrong and we need to investigate + throw AggregationErrors.iterationOrderChangedWithoutMutating( + bucketOrds.toString(), + ordsEnum.ord(), + bucketOrdsToCollect.get(b[0]) + ); + } + checkRealMemoryCBForInternalBucket(); + buckets.add( + bucketBuilder.build(ordsEnum.value(), bucketDocCount(ordsEnum.ord()), subAggregationResults.apply(b[0]++)) + ); + } + return resultBuilder.build(owningBucketOrd, buckets); + }); } - results[ordIdx] = resultBuilder.build(owningBucketOrds[ordIdx], buckets); } - return results; } @FunctionalInterface @@ -412,14 +419,9 @@ protected void preGetSubLeafCollectors(LeafReaderContext ctx) throws IOException docCountProvider.setLeafReaderContext(ctx); } - /** - * This method calls the circuit breaker from time to time in order to give it a chance to check available - * memory in the parent breaker (Which should be a real memory breaker) and break the execution if we are running out. - * To achieve that, we are passing 0 as the estimated bytes every 1024 calls - */ - private void updateCircuitBreaker(String label) { - if ((++callCount & 0x3FF) == 0) { - breaker.addEstimateBytesAndMaybeBreak(0, label); - } + /** This method should be called whenever a new bucket object is created. It will check the real memory + * circuit breaker in a sampling fashion. See {@link #checkRealMemoryCB(String)} */ + protected final void checkRealMemoryCBForInternalBucket() { + checkRealMemoryCB("internal_bucket"); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferableBucketAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferableBucketAggregator.java index 84a15b6d1c0eb..64744b705e222 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferableBucketAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferableBucketAggregator.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.aggregations.bucket; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.BucketCollector; @@ -65,7 +66,7 @@ protected void doPreCollection() throws IOException { } deferredAggregations.add(subAggregators[i]); deferredAggregationNames.add(subAggregators[i].name()); - subAggregators[i] = deferringCollector.wrap(subAggregators[i]); + subAggregators[i] = deferringCollector.wrap(subAggregators[i], bigArrays()); } else { collectors.add(subAggregators[i]); } @@ -87,7 +88,7 @@ protected DeferringBucketCollector deferringCollector() { /** * Build the {@link DeferringBucketCollector}. The default implementation * replays all hits against the buckets selected by - * {#link {@link DeferringBucketCollector#prepareSelectedBuckets(long...)}. + * {#link {@link DeferringBucketCollector#prepareSelectedBuckets(LongArray)}. */ protected DeferringBucketCollector buildDeferringCollector() { return new BestBucketsDeferringCollector(topLevelQuery(), searcher(), descendsFromGlobalAggregator(parent())); @@ -107,7 +108,7 @@ protected boolean shouldDefer(Aggregator aggregator) { } @Override - protected final void prepareSubAggs(long[] bucketOrdsToCollect) throws IOException { + protected final void prepareSubAggs(LongArray bucketOrdsToCollect) throws IOException { if (deferringCollector != null) { deferringCollector.prepareSelectedBuckets(bucketOrdsToCollect); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java index 44cff2651e273..468fec29a9420 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java @@ -10,6 +10,8 @@ package org.elasticsearch.search.aggregations.bucket; import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.BucketCollector; @@ -37,13 +39,13 @@ public DeferringBucketCollector() {} /** * Replay the deferred hits on the selected buckets. */ - public abstract void prepareSelectedBuckets(long... selectedBuckets) throws IOException; + public abstract void prepareSelectedBuckets(LongArray selectedBuckets) throws IOException; /** * Wrap the provided aggregator so that it behaves (almost) as if it had * been collected directly. */ - public Aggregator wrap(final Aggregator in) { + public Aggregator wrap(final Aggregator in, BigArrays bigArrays) { return new WrappedAggregator(in); } @@ -80,7 +82,7 @@ public Aggregator subAggregator(String name) { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return in.buildAggregations(owningBucketOrds); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java index 87ebec525a6fa..d39e90b44579e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java @@ -12,7 +12,6 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.HasAggregations; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.xcontent.ToXContent; import java.util.List; @@ -24,7 +23,7 @@ public interface MultiBucketsAggregation extends Aggregation { * A bucket represents a criteria to which all documents that fall in it adhere to. It is also uniquely identified * by a key, and can potentially hold sub-aggregations computed over all documents in it. */ - interface Bucket extends HasAggregations, ToXContent { + interface Bucket extends HasAggregations { /** * @return The key associated with the bucket */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index 9ee15306ce636..0baecf6e3f92b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -35,6 +35,7 @@ import org.apache.lucene.util.RoaringDocIdSet; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Rounding; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.Strings; import org.elasticsearch.index.IndexSortConfig; @@ -184,50 +185,51 @@ protected void doPostCollection() throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { // Composite aggregator must be at the top of the aggregation tree - assert owningBucketOrds.length == 1 && owningBucketOrds[0] == 0L; + assert owningBucketOrds.size() == 1 && owningBucketOrds.get(0) == 0L; if (deferredCollectors != NO_OP_BUCKET_COLLECTOR) { // Replay all documents that contain at least one top bucket (collected during the first pass). runDeferredCollections(); } - int num = Math.min(size, (int) queue.size()); + final int num = Math.min(size, (int) queue.size()); final InternalComposite.InternalBucket[] buckets = new InternalComposite.InternalBucket[num]; - long[] bucketOrdsToCollect = new long[(int) queue.size()]; - for (int i = 0; i < queue.size(); i++) { - bucketOrdsToCollect[i] = i; - } - var subAggsForBuckets = buildSubAggsForBuckets(bucketOrdsToCollect); - while (queue.size() > 0) { - int slot = queue.pop(); - CompositeKey key = queue.toCompositeKey(slot); - InternalAggregations aggs = subAggsForBuckets.apply(slot); - long docCount = queue.getDocCount(slot); - buckets[(int) queue.size()] = new InternalComposite.InternalBucket( - sourceNames, - formats, - key, - reverseMuls, - missingOrders, - docCount, - aggs - ); + try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(queue.size())) { + for (int i = 0; i < queue.size(); i++) { + bucketOrdsToCollect.set(i, i); + } + var subAggsForBuckets = buildSubAggsForBuckets(bucketOrdsToCollect); + while (queue.size() > 0) { + int slot = queue.pop(); + CompositeKey key = queue.toCompositeKey(slot); + InternalAggregations aggs = subAggsForBuckets.apply(slot); + long docCount = queue.getDocCount(slot); + buckets[(int) queue.size()] = new InternalComposite.InternalBucket( + sourceNames, + formats, + key, + reverseMuls, + missingOrders, + docCount, + aggs + ); + } + CompositeKey lastBucket = num > 0 ? buckets[num - 1].getRawKey() : null; + return new InternalAggregation[] { + new InternalComposite( + name, + size, + sourceNames, + formats, + Arrays.asList(buckets), + lastBucket, + reverseMuls, + missingOrders, + earlyTerminated, + metadata() + ) }; } - CompositeKey lastBucket = num > 0 ? buckets[num - 1].getRawKey() : null; - return new InternalAggregation[] { - new InternalComposite( - name, - size, - sourceNames, - formats, - Arrays.asList(buckets), - lastBucket, - reverseMuls, - missingOrders, - earlyTerminated, - metadata() - ) }; } @Override @@ -244,6 +246,7 @@ public InternalAggregation buildEmptyAggregation() { false, metadata() ); + } private void finishLeaf() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index 30c45ba46d9b7..8b3253418bc23 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -465,14 +465,6 @@ public int compareKey(InternalBucket other) { return 0; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - /** - * See {@link CompositeAggregation#bucketToXContent} - */ - throw new UnsupportedOperationException("not implemented"); - } - InternalBucket finalizeSampling(SamplingContext samplingContext) { return new InternalBucket( sourceNames, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java index af4d60bf424a7..344b90b06c4f6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java @@ -13,6 +13,8 @@ import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationExecutionContext; @@ -108,70 +110,80 @@ private void collectOrdinal(long bucketOrdinal, int doc, LeafBucketCollector sub } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - StringTerms.Bucket[][] topBucketsPerOrd = new StringTerms.Bucket[owningBucketOrds.length][]; - long[] otherDocCounts = new long[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - int size = (int) Math.min(bucketOrds.size(), bucketCountThresholds.getShardSize()); - - // as users can't control sort order, in practice we'll always sort by doc count descending - try ( - BucketPriorityQueue ordered = new BucketPriorityQueue<>( - size, - bigArrays(), - partiallyBuiltBucketComparator - ) - ) { - StringTerms.Bucket spare = null; - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - Supplier emptyBucketBuilder = () -> new StringTerms.Bucket(new BytesRef(), 0, null, false, 0, format); - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - otherDocCounts[ordIdx] += docCount; - if (spare == null) { - spare = emptyBucketBuilder.get(); + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + try ( + LongArray otherDocCounts = bigArrays().newLongArray(owningBucketOrds.size()); + ObjectArray topBucketsPerOrd = bigArrays().newObjectArray(owningBucketOrds.size()) + ) { + for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) { + int size = (int) Math.min(bucketOrds.size(), bucketCountThresholds.getShardSize()); + + // as users can't control sort order, in practice we'll always sort by doc count descending + try ( + BucketPriorityQueue ordered = new BucketPriorityQueue<>( + size, + bigArrays(), + partiallyBuiltBucketComparator + ) + ) { + StringTerms.Bucket spare = null; + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); + Supplier emptyBucketBuilder = () -> new StringTerms.Bucket( + new BytesRef(), + 0, + null, + false, + 0, + format + ); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + otherDocCounts.increment(ordIdx, docCount); + if (spare == null) { + checkRealMemoryCBForInternalBucket(); + spare = emptyBucketBuilder.get(); + } + ordsEnum.readValue(spare.getTermBytes()); + spare.setDocCount(docCount); + spare.setBucketOrd(ordsEnum.ord()); + spare = ordered.insertWithOverflow(spare); } - ordsEnum.readValue(spare.getTermBytes()); - spare.setDocCount(docCount); - spare.setBucketOrd(ordsEnum.ord()); - spare = ordered.insertWithOverflow(spare); - } - topBucketsPerOrd[ordIdx] = new StringTerms.Bucket[(int) ordered.size()]; - for (int i = (int) ordered.size() - 1; i >= 0; --i) { - topBucketsPerOrd[ordIdx][i] = ordered.pop(); - otherDocCounts[ordIdx] -= topBucketsPerOrd[ordIdx][i].getDocCount(); - topBucketsPerOrd[ordIdx][i].setTermBytes(BytesRef.deepCopyOf(topBucketsPerOrd[ordIdx][i].getTermBytes())); + topBucketsPerOrd.set(ordIdx, new StringTerms.Bucket[(int) ordered.size()]); + for (int i = (int) ordered.size() - 1; i >= 0; --i) { + topBucketsPerOrd.get(ordIdx)[i] = ordered.pop(); + otherDocCounts.increment(ordIdx, -topBucketsPerOrd.get(ordIdx)[i].getDocCount()); + topBucketsPerOrd.get(ordIdx)[i].setTermBytes(BytesRef.deepCopyOf(topBucketsPerOrd.get(ordIdx)[i].getTermBytes())); + } } } - } - buildSubAggsForAllBuckets(topBucketsPerOrd, InternalTerms.Bucket::getBucketOrd, InternalTerms.Bucket::setAggregations); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - final BucketOrder reduceOrder; - if (isKeyOrder(order) == false) { - reduceOrder = InternalOrder.key(true); - Arrays.sort(topBucketsPerOrd[ordIdx], reduceOrder.comparator()); - } else { - reduceOrder = order; - } - result[ordIdx] = new StringTerms( - name, - reduceOrder, - order, - bucketCountThresholds.getRequiredSize(), - bucketCountThresholds.getMinDocCount(), - metadata(), - format, - bucketCountThresholds.getShardSize(), - false, - otherDocCounts[ordIdx], - Arrays.asList(topBucketsPerOrd[ordIdx]), - null - ); + buildSubAggsForAllBuckets(topBucketsPerOrd, InternalTerms.Bucket::getBucketOrd, InternalTerms.Bucket::setAggregations); + + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { + final BucketOrder reduceOrder; + if (isKeyOrder(order) == false) { + reduceOrder = InternalOrder.key(true); + Arrays.sort(topBucketsPerOrd.get(ordIdx), reduceOrder.comparator()); + } else { + reduceOrder = order; + } + return new StringTerms( + name, + reduceOrder, + order, + bucketCountThresholds.getRequiredSize(), + bucketCountThresholds.getMinDocCount(), + metadata(), + format, + bucketCountThresholds.getShardSize(), + false, + otherDocCounts.get(ordIdx), + Arrays.asList(topBucketsPerOrd.get(ordIdx)), + null + ); + }); } - return result; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java index fede97c7fddee..a9ec0ba878ec0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AggregationExecutionContext; @@ -208,21 +209,15 @@ List filters() { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForFixedBucketCount( owningBucketOrds, filters.size() + (otherBucketKey == null ? 0 : 1), (offsetInOwningOrd, docCount, subAggregationResults) -> { if (offsetInOwningOrd < filters.size()) { - return new InternalFilters.InternalBucket( - filters.get(offsetInOwningOrd).key(), - docCount, - subAggregationResults, - keyed, - keyedBucket - ); + return new InternalFilters.InternalBucket(filters.get(offsetInOwningOrd).key(), docCount, subAggregationResults); } - return new InternalFilters.InternalBucket(otherBucketKey, docCount, subAggregationResults, keyed, keyedBucket); + return new InternalFilters.InternalBucket(otherBucketKey, docCount, subAggregationResults); }, buckets -> new InternalFilters(name, buckets, keyed, keyedBucket, metadata()) ); @@ -233,12 +228,12 @@ public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); List buckets = new ArrayList<>(filters.size() + (otherBucketKey == null ? 0 : 1)); for (QueryToFilterAdapter filter : filters) { - InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(filter.key(), 0, subAggs, keyed, keyedBucket); + InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(filter.key(), 0, subAggs); buckets.add(bucket); } if (otherBucketKey != null) { - InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(otherBucketKey, 0, subAggs, keyed, keyedBucket); + InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(otherBucketKey, 0, subAggs); buckets.add(bucket); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java index a5dfb0d8efafa..c05759582346a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java @@ -32,26 +32,20 @@ public class InternalFilters extends InternalMultiBucketAggregation implements Filters { public static class InternalBucket extends InternalMultiBucketAggregation.InternalBucket implements Filters.Bucket { - private final boolean keyed; - private final boolean keyedBucket; private final String key; private long docCount; InternalAggregations aggregations; - public InternalBucket(String key, long docCount, InternalAggregations aggregations, boolean keyed, boolean keyedBucket) { + public InternalBucket(String key, long docCount, InternalAggregations aggregations) { this.key = key; - this.keyedBucket = keyedBucket; this.docCount = docCount; this.aggregations = aggregations; - this.keyed = keyed; } /** * Read from a stream. */ - public InternalBucket(StreamInput in, boolean keyed, boolean keyedBucket) throws IOException { - this.keyed = keyed; - this.keyedBucket = keyedBucket; + public InternalBucket(StreamInput in) throws IOException { key = in.readOptionalString(); docCount = in.readVLong(); aggregations = InternalAggregations.readFrom(in); @@ -84,8 +78,7 @@ public InternalAggregations getAggregations() { return aggregations; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, boolean keyed, boolean keyedBucket) throws IOException { if (keyed && keyedBucket) { builder.startObject(key); } else { @@ -97,7 +90,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -110,24 +102,20 @@ public boolean equals(Object other) { } InternalBucket that = (InternalBucket) other; return Objects.equals(key, that.key) - && Objects.equals(keyed, that.keyed) - && Objects.equals(keyedBucket, that.keyedBucket) && Objects.equals(docCount, that.docCount) && Objects.equals(aggregations, that.aggregations); } @Override public int hashCode() { - return Objects.hash(getClass(), key, keyed, keyedBucket, docCount, aggregations); + return Objects.hash(getClass(), key, docCount, aggregations); } InternalBucket finalizeSampling(SamplingContext samplingContext) { return new InternalBucket( key, samplingContext.scaleUp(docCount), - InternalAggregations.finalizeSampling(aggregations, samplingContext), - keyed, - keyedBucket + InternalAggregations.finalizeSampling(aggregations, samplingContext) ); } } @@ -155,7 +143,7 @@ public InternalFilters(StreamInput in) throws IOException { int size = in.readVInt(); List buckets = new ArrayList<>(size); for (int i = 0; i < size; i++) { - buckets.add(new InternalBucket(in, keyed, keyedBucket)); + buckets.add(new InternalBucket(in)); } this.buckets = buckets; this.bucketMap = null; @@ -182,7 +170,7 @@ public InternalFilters create(List buckets) { @Override public InternalBucket createBucket(InternalAggregations aggregations, InternalBucket prototype) { - return new InternalBucket(prototype.key, prototype.docCount, aggregations, prototype.keyed, keyedBucket); + return new InternalBucket(prototype.key, prototype.docCount, aggregations); } @Override @@ -211,7 +199,7 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont ) { @Override protected InternalBucket createBucket(InternalBucket proto, long docCount, InternalAggregations aggregations) { - return new InternalBucket(proto.key, docCount, aggregations, proto.keyed, proto.keyedBucket); + return new InternalBucket(proto.key, docCount, aggregations); } }; @@ -252,7 +240,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (InternalBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed, keyedBucket); } if (keyed && keyedBucket) { builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java index cde26bb2214ed..1d3614af08768 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java @@ -12,6 +12,8 @@ import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -132,39 +134,40 @@ public void collect(int doc, long owningBucketOrd) throws IOException { protected abstract InternalGeoGridBucket newEmptyBucket(); @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - InternalGeoGridBucket[][] topBucketsPerOrd = new InternalGeoGridBucket[owningBucketOrds.length][]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - int size = (int) Math.min(bucketOrds.bucketsInOrd(owningBucketOrds[ordIdx]), shardSize); - - try (BucketPriorityQueue ordered = new BucketPriorityQueue<>(size, bigArrays())) { - InternalGeoGridBucket spare = null; - LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - while (ordsEnum.next()) { - if (spare == null) { - spare = newEmptyBucket(); - } + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + try (ObjectArray topBucketsPerOrd = bigArrays().newObjectArray(owningBucketOrds.size())) { + for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) { + int size = (int) Math.min(bucketOrds.bucketsInOrd(owningBucketOrds.get(ordIdx)), shardSize); + + try (BucketPriorityQueue ordered = new BucketPriorityQueue<>(size, bigArrays())) { + InternalGeoGridBucket spare = null; + LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); + while (ordsEnum.next()) { + if (spare == null) { + checkRealMemoryCBForInternalBucket(); + spare = newEmptyBucket(); + } - // need a special function to keep the source bucket - // up-to-date so it can get the appropriate key - spare.hashAsLong = ordsEnum.value(); - spare.docCount = bucketDocCount(ordsEnum.ord()); - spare.bucketOrd = ordsEnum.ord(); - spare = ordered.insertWithOverflow(spare); - } + // need a special function to keep the source bucket + // up-to-date so it can get the appropriate key + spare.hashAsLong = ordsEnum.value(); + spare.docCount = bucketDocCount(ordsEnum.ord()); + spare.bucketOrd = ordsEnum.ord(); + spare = ordered.insertWithOverflow(spare); + } - topBucketsPerOrd[ordIdx] = new InternalGeoGridBucket[(int) ordered.size()]; - for (int i = (int) ordered.size() - 1; i >= 0; --i) { - topBucketsPerOrd[ordIdx][i] = ordered.pop(); + topBucketsPerOrd.set(ordIdx, new InternalGeoGridBucket[(int) ordered.size()]); + for (int i = (int) ordered.size() - 1; i >= 0; --i) { + topBucketsPerOrd.get(ordIdx)[i] = ordered.pop(); + } } } + buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); + return buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildAggregation(name, requiredSize, Arrays.asList(topBucketsPerOrd.get(ordIdx)), metadata()) + ); } - buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - results[ordIdx] = buildAggregation(name, requiredSize, Arrays.asList(topBucketsPerOrd[ordIdx]), metadata()); - } - return results; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java index d56625ab28c51..6a32b41034503 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java @@ -152,7 +152,7 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (InternalGeoGridBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } builder.endArray(); return builder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java index c972845468c2b..9e3c96da2e70b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java @@ -13,6 +13,7 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; +import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -76,14 +77,12 @@ public int compareTo(InternalGeoGridBucket other) { return 0; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + final void bucketToXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(); builder.field(Aggregation.CommonFields.KEY.getPreferredName(), getKeyAsString()); builder.field(Aggregation.CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java index b5d3485e72f82..b83001c34377e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java @@ -14,6 +14,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.Weight; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.CardinalityUpperBound; @@ -62,8 +63,8 @@ public void setScorer(Scorable scorer) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - assert owningBucketOrds.length == 1 && owningBucketOrds[0] == 0 : "global aggregator can only be a top level aggregator"; + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + assert owningBucketOrds.size() == 1 && owningBucketOrds.get(0) == 0 : "global aggregator can only be a top level aggregator"; return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalGlobal( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java index b81d8b002b6b2..5ea8cd035e580 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.apache.lucene.util.CollectionUtil; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -79,11 +80,11 @@ public AbstractHistogramAggregator( } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, (bucketValue, docCount, subAggregationResults) -> { double roundKey = Double.longBitsToDouble(bucketValue); double key = roundKey * interval + offset; - return new InternalHistogram.Bucket(key, docCount, keyed, formatter, subAggregationResults); + return new InternalHistogram.Bucket(key, docCount, formatter, subAggregationResults); }, (owningBucketOrd, buckets) -> { // the contract of the histogram aggregation is that shards must return buckets ordered by key in ascending order CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java index 86c320d8dc319..1eb0226ad8c8c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java @@ -17,6 +17,7 @@ import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.Rounding.DateTimeUnit; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasables; @@ -337,9 +338,9 @@ private void addRoundedValue(long rounded, int doc, long owningBucketOrd, LeafBu } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, (bucketValue, docCount, subAggregationResults) -> { - return new InternalDateHistogram.Bucket(bucketValue, docCount, keyed, formatter, subAggregationResults); + return new InternalDateHistogram.Bucket(bucketValue, docCount, formatter, subAggregationResults); }, (owningBucketOrd, buckets) -> { // the contract of the histogram aggregation is that shards must return buckets ordered by key in ascending order CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); @@ -465,7 +466,6 @@ protected InternalAggregation adapt(InternalAggregation delegateResult) { new InternalDateHistogram.Bucket( rangeBucket.getFrom().toInstant().toEpochMilli(), rangeBucket.getDocCount(), - keyed, format, rangeBucket.getAggregations() ) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java index 2bfd85e5fe03a..5a104055d9aec 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java @@ -12,6 +12,7 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Rounding; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.FieldData; @@ -163,14 +164,13 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForVariableBuckets( owningBucketOrds, bucketOrds, (bucketValue, docCount, subAggregationResults) -> new InternalDateHistogram.Bucket( bucketValue, docCount, - keyed, formatter, subAggregationResults ), diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index 564abff2a9f97..d2badbeec4622 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -53,19 +53,17 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation< public static class Bucket extends AbstractHistogramBucket implements KeyComparable { final long key; - private final transient boolean keyed; - public Bucket(long key, long docCount, boolean keyed, DocValueFormat format, InternalAggregations aggregations) { + public Bucket(long key, long docCount, DocValueFormat format, InternalAggregations aggregations) { super(docCount, aggregations, format); - this.keyed = keyed; this.key = key; } /** * Read from a stream. */ - public static Bucket readFrom(StreamInput in, boolean keyed, DocValueFormat format) throws IOException { - return new Bucket(in.readLong(), in.readVLong(), keyed, format, InternalAggregations.readFrom(in)); + public static Bucket readFrom(StreamInput in, DocValueFormat format) throws IOException { + return new Bucket(in.readLong(), in.readVLong(), format, InternalAggregations.readFrom(in)); } @Override @@ -101,8 +99,7 @@ public Object getKey() { return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, boolean keyed) throws IOException { String keyAsString = format.format(key).toString(); if (keyed) { builder.startObject(keyAsString); @@ -116,7 +113,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -124,15 +120,10 @@ public int compareKey(Bucket other) { return Long.compare(key, other.key); } - public boolean getKeyed() { - return keyed; - } - Bucket finalizeSampling(SamplingContext samplingContext) { return new Bucket( key, samplingContext.scaleUp(docCount), - keyed, format, InternalAggregations.finalizeSampling(aggregations, samplingContext) ); @@ -237,7 +228,7 @@ public InternalDateHistogram(StreamInput in) throws IOException { } else { downsampledResultsOffset = false; } - buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, keyed, format)); + buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, format)); // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort if (in.getTransportVersion().between(TransportVersions.V_8_13_0, TransportVersions.V_8_14_0)) { // list is mutable by #readCollectionAsList contract @@ -301,7 +292,7 @@ public InternalDateHistogram create(List buckets) { @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(prototype.key, prototype.docCount, prototype.keyed, prototype.format, aggregations); + return new Bucket(prototype.key, prototype.docCount, prototype.format, aggregations); } private List reduceBuckets(final PriorityQueue> pq, AggregationReduceContext reduceContext) { @@ -398,7 +389,7 @@ public void accept(long key) { reduceContext.consumeBucketsAndMaybeBreak(size); size = 0; } - iter.add(new InternalDateHistogram.Bucket(key, 0, keyed, format, reducedEmptySubAggs)); + iter.add(new InternalDateHistogram.Bucket(key, 0, format, reducedEmptySubAggs)); } }); } @@ -546,7 +537,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); @@ -603,7 +594,7 @@ public InternalAggregation createAggregation(List { final double key; - private final transient boolean keyed; - public Bucket(double key, long docCount, boolean keyed, DocValueFormat format, InternalAggregations aggregations) { + public Bucket(double key, long docCount, DocValueFormat format, InternalAggregations aggregations) { super(docCount, aggregations, format); - this.keyed = keyed; this.key = key; } /** * Read from a stream. */ - public static Bucket readFrom(StreamInput in, boolean keyed, DocValueFormat format) throws IOException { - return new Bucket(in.readDouble(), in.readVLong(), keyed, format, InternalAggregations.readFrom(in)); + public static Bucket readFrom(StreamInput in, DocValueFormat format) throws IOException { + return new Bucket(in.readDouble(), in.readVLong(), format, InternalAggregations.readFrom(in)); } @Override @@ -96,8 +94,7 @@ public Object getKey() { return key; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, boolean keyed) throws IOException { String keyAsString = format.format(key).toString(); if (keyed) { builder.startObject(keyAsString); @@ -111,7 +108,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -119,15 +115,10 @@ public int compareKey(Bucket other) { return Double.compare(key, other.key); } - public boolean getKeyed() { - return keyed; - } - Bucket finalizeSampling(SamplingContext samplingContext) { return new Bucket( key, samplingContext.scaleUp(docCount), - keyed, format, InternalAggregations.finalizeSampling(aggregations, samplingContext) ); @@ -220,7 +211,7 @@ public InternalHistogram(StreamInput in) throws IOException { } format = in.readNamedWriteable(DocValueFormat.class); keyed = in.readBoolean(); - buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, keyed, format)); + buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, format)); // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort if (in.getTransportVersion().between(TransportVersions.V_8_13_0, TransportVersions.V_8_14_0)) { // list is mutable by #readCollectionAsList contract @@ -265,7 +256,7 @@ public InternalHistogram create(List buckets) { @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(prototype.key, prototype.docCount, prototype.keyed, prototype.format, aggregations); + return new Bucket(prototype.key, prototype.docCount, prototype.format, aggregations); } private List reduceBuckets(PriorityQueue> pq, AggregationReduceContext reduceContext) { @@ -373,7 +364,7 @@ public void accept(double key) { reduceContext.consumeBucketsAndMaybeBreak(size); size = 0; } - iter.add(new Bucket(key, 0, keyed, format, reducedEmptySubAggs)); + iter.add(new Bucket(key, 0, format, reducedEmptySubAggs)); } }); } @@ -478,7 +469,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); @@ -508,7 +499,7 @@ public InternalAggregation createAggregation(List buckets = new ArrayList<>(numClusters); - for (int bucketOrd = 0; bucketOrd < numClusters; bucketOrd++) { - buckets.add(collector.buildBucket(bucketOrd, subAggregationResults.apply(bucketOrd))); - } + List buckets = new ArrayList<>(numClusters); + for (int bucketOrd = 0; bucketOrd < numClusters; bucketOrd++) { + buckets.add(collector.buildBucket(bucketOrd, subAggregationResults.apply(bucketOrd))); + } - Function, InternalAggregation> resultBuilder = bucketsToFormat -> { - // The contract of the histogram aggregation is that shards must return - // buckets ordered by centroid in ascending order - CollectionUtil.introSort(bucketsToFormat, BucketOrder.key(true).comparator()); + Function, InternalAggregation> resultBuilder = bucketsToFormat -> { + // The contract of the histogram aggregation is that shards must return + // buckets ordered by centroid in ascending order + CollectionUtil.introSort(bucketsToFormat, BucketOrder.key(true).comparator()); - InternalVariableWidthHistogram.EmptyBucketInfo emptyBucketInfo = new InternalVariableWidthHistogram.EmptyBucketInfo( - buildEmptySubAggregations() - ); + InternalVariableWidthHistogram.EmptyBucketInfo emptyBucketInfo = new InternalVariableWidthHistogram.EmptyBucketInfo( + buildEmptySubAggregations() + ); - return new InternalVariableWidthHistogram(name, bucketsToFormat, emptyBucketInfo, numBuckets, formatter, metadata()); - }; + return new InternalVariableWidthHistogram(name, bucketsToFormat, emptyBucketInfo, numBuckets, formatter, metadata()); + }; - return new InternalAggregation[] { resultBuilder.apply(buckets) }; + return new InternalAggregation[] { resultBuilder.apply(buckets) }; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java index 5c8f8ab9c562e..b49668e45b889 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.search.aggregations.bucket.missing; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.fielddata.DocValueBits; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -67,7 +68,7 @@ public void collect(int doc, long bucket) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalMissing( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java index 0fbb9745aa400..23a2d6380c290 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitSet; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.mapper.NestedObjectMapper; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -124,7 +125,7 @@ private void processBufferedDocs() throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalNested( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java index 0e3e4679c7a2d..2477b67367e14 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java @@ -13,6 +13,7 @@ import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitSet; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.mapper.NestedObjectMapper; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -86,7 +87,7 @@ public void collect(int childDoc, long bucket) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalReverseNested( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java index f7022ff22d373..5b456b3246b64 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java @@ -38,9 +38,7 @@ public static class Bucket extends InternalMultiBucketAggregation.InternalBucket IpPrefix.Bucket, KeyComparable { - private final transient DocValueFormat format; private final BytesRef key; - private final boolean keyed; private final boolean isIpv6; private final int prefixLength; private final boolean appendPrefixLength; @@ -48,18 +46,14 @@ public static class Bucket extends InternalMultiBucketAggregation.InternalBucket private final InternalAggregations aggregations; public Bucket( - DocValueFormat format, BytesRef key, - boolean keyed, boolean isIpv6, int prefixLength, boolean appendPrefixLength, long docCount, InternalAggregations aggregations ) { - this.format = format; this.key = key; - this.keyed = keyed; this.isIpv6 = isIpv6; this.prefixLength = prefixLength; this.appendPrefixLength = appendPrefixLength; @@ -70,9 +64,7 @@ public Bucket( /** * Read from a stream. */ - public Bucket(StreamInput in, DocValueFormat format, boolean keyed) throws IOException { - this.format = format; - this.keyed = keyed; + public Bucket(StreamInput in) throws IOException { this.key = in.readBytesRef(); this.isIpv6 = in.readBoolean(); this.prefixLength = in.readVInt(); @@ -81,8 +73,7 @@ public Bucket(StreamInput in, DocValueFormat format, boolean keyed) throws IOExc this.aggregations = InternalAggregations.readFrom(in); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, boolean keyed) throws IOException { String key = DocValueFormat.IP.format(this.key); if (appendPrefixLength) { key = key + "/" + prefixLength; @@ -101,7 +92,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(IpPrefixAggregationBuilder.PREFIX_LENGTH_FIELD.getPreferredName(), prefixLength); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } private static BytesRef netmask(int prefixLength) { @@ -118,10 +108,6 @@ public void writeTo(StreamOutput out) throws IOException { aggregations.writeTo(out); } - public DocValueFormat getFormat() { - return format; - } - public BytesRef getKey() { return key; } @@ -162,14 +148,13 @@ public boolean equals(Object o) { && prefixLength == bucket.prefixLength && appendPrefixLength == bucket.appendPrefixLength && docCount == bucket.docCount - && Objects.equals(format, bucket.format) && Objects.equals(key, bucket.key) && Objects.equals(aggregations, bucket.aggregations); } @Override public int hashCode() { - return Objects.hash(format, key, isIpv6, prefixLength, appendPrefixLength, docCount, aggregations); + return Objects.hash(key, isIpv6, prefixLength, appendPrefixLength, docCount, aggregations); } @Override @@ -206,7 +191,7 @@ public InternalIpPrefix(StreamInput in) throws IOException { format = in.readNamedWriteable(DocValueFormat.class); keyed = in.readBoolean(); minDocCount = in.readVLong(); - buckets = in.readCollectionAsList(stream -> new Bucket(stream, format, keyed)); + buckets = in.readCollectionAsList(Bucket::new); } @Override @@ -298,7 +283,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (InternalIpPrefix.Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); @@ -316,9 +301,7 @@ public InternalIpPrefix create(List buckets) { @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { return new Bucket( - format, prototype.key, - prototype.keyed, prototype.isIpv6, prototype.prefixLength, prototype.appendPrefixLength, @@ -328,16 +311,7 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) } private Bucket createBucket(Bucket prototype, InternalAggregations aggregations, long docCount) { - return new Bucket( - format, - prototype.key, - prototype.keyed, - prototype.isIpv6, - prototype.prefixLength, - prototype.appendPrefixLength, - docCount, - aggregations - ); + return new Bucket(prototype.key, prototype.isIpv6, prototype.prefixLength, prototype.appendPrefixLength, docCount, aggregations); } private Bucket reduceBucket(List buckets, AggregationReduceContext context) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java index 9548cd871e161..38d26bfa9ae28 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java @@ -12,6 +12,8 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CollectionUtil; +import org.elasticsearch.common.util.IntArray; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; @@ -160,57 +162,60 @@ private static void maskIpAddress(final BytesRef ipAddress, final BytesRef subne } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { long totalOrdsToCollect = 0; - final int[] bucketsInOrd = new int[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - final long bucketCount = bucketOrds.bucketsInOrd(owningBucketOrds[ordIdx]); - bucketsInOrd[ordIdx] = (int) bucketCount; - totalOrdsToCollect += bucketCount; - } - - long[] bucketOrdsToCollect = new long[(int) totalOrdsToCollect]; - int b = 0; - for (long owningBucketOrd : owningBucketOrds) { - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); - while (ordsEnum.next()) { - bucketOrdsToCollect[b++] = ordsEnum.ord(); + try (IntArray bucketsInOrd = bigArrays().newIntArray(owningBucketOrds.size())) { + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + final long bucketCount = bucketOrds.bucketsInOrd(owningBucketOrds.get(ordIdx)); + bucketsInOrd.set(ordIdx, (int) bucketCount); + totalOrdsToCollect += bucketCount; } - } - var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - b = 0; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - List buckets = new ArrayList<>(bucketsInOrd[ordIdx]); - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - while (ordsEnum.next()) { - long ordinal = ordsEnum.ord(); - if (bucketOrdsToCollect[b] != ordinal) { - throw AggregationErrors.iterationOrderChangedWithoutMutating(bucketOrds.toString(), ordinal, bucketOrdsToCollect[b]); + try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(totalOrdsToCollect)) { + int[] b = new int[] { 0 }; + for (long i = 0; i < owningBucketOrds.size(); i++) { + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(i)); + while (ordsEnum.next()) { + bucketOrdsToCollect.set(b[0]++, ordsEnum.ord()); + } } - BytesRef ipAddress = new BytesRef(); - ordsEnum.readValue(ipAddress); - long docCount = bucketDocCount(ordinal); - buckets.add( - new InternalIpPrefix.Bucket( - config.format(), - BytesRef.deepCopyOf(ipAddress), - keyed, - ipPrefix.isIpv6, - ipPrefix.prefixLength, - ipPrefix.appendPrefixLength, - docCount, - subAggregationResults.apply(b++) - ) - ); - - // NOTE: the aggregator is expected to return sorted results - CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); + + var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); + b[0] = 0; + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { + List buckets = new ArrayList<>(bucketsInOrd.get(ordIdx)); + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); + while (ordsEnum.next()) { + long ordinal = ordsEnum.ord(); + if (bucketOrdsToCollect.get(b[0]) != ordinal) { + throw AggregationErrors.iterationOrderChangedWithoutMutating( + bucketOrds.toString(), + ordinal, + bucketOrdsToCollect.get(b[0]) + ); + } + BytesRef ipAddress = new BytesRef(); + ordsEnum.readValue(ipAddress); + long docCount = bucketDocCount(ordinal); + checkRealMemoryCBForInternalBucket(); + buckets.add( + new InternalIpPrefix.Bucket( + BytesRef.deepCopyOf(ipAddress), + ipPrefix.isIpv6, + ipPrefix.prefixLength, + ipPrefix.appendPrefixLength, + docCount, + subAggregationResults.apply(b[0]++) + ) + ); + + // NOTE: the aggregator is expected to return sorted results + CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); + } + return new InternalIpPrefix(name, config.format(), keyed, minDocCount, buckets, metadata()); + }); } - results[ordIdx] = new InternalIpPrefix(name, config.format(), keyed, minDocCount, buckets, metadata()); } - return results; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java index 6119af3cb6a57..c10bb3543549e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java @@ -14,6 +14,7 @@ import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.search.DocValueFormat; @@ -359,13 +360,13 @@ private interface DocCollector { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForFixedBucketCount( owningBucketOrds, ranges.length, (offsetInOwningOrd, docCount, subAggregationResults) -> { Range range = ranges[offsetInOwningOrd]; - return new InternalBinaryRange.Bucket(format, keyed, range.key, range.from, range.to, docCount, subAggregationResults); + return new InternalBinaryRange.Bucket(format, range.key, range.from, range.to, docCount, subAggregationResults); }, buckets -> new InternalBinaryRange(name, format, keyed, buckets, metadata()) ); @@ -377,7 +378,7 @@ public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); List buckets = new ArrayList<>(ranges.length); for (Range range : ranges) { - InternalBinaryRange.Bucket bucket = new InternalBinaryRange.Bucket(format, keyed, range.key, range.from, range.to, 0, subAggs); + InternalBinaryRange.Bucket bucket = new InternalBinaryRange.Bucket(format, range.key, range.from, range.to, 0, subAggs); buckets.add(bucket); } return new InternalBinaryRange(name, format, keyed, buckets, metadata()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java index 100bab7443a51..9571dfebc6069 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java @@ -39,23 +39,13 @@ public final class InternalBinaryRange extends InternalMultiBucketAggregation Bucket.createFromStream(stream, format, keyed)); + buckets = in.readCollectionAsList(stream -> Bucket.createFromStream(stream, format)); } @Override @@ -235,7 +222,7 @@ public InternalBinaryRange create(List buckets) { @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(format, keyed, prototype.key, prototype.from, prototype.to, prototype.docCount, aggregations); + return new Bucket(format, prototype.key, prototype.from, prototype.to, prototype.docCount, aggregations); } @Override @@ -251,7 +238,7 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont @Override protected Bucket createBucket(Bucket proto, long docCount, InternalAggregations aggregations) { - return new Bucket(proto.format, proto.keyed, proto.key, proto.from, proto.to, docCount, aggregations); + return new Bucket(proto.format, proto.key, proto.from, proto.to, docCount, aggregations); } }; @@ -299,7 +286,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (Bucket range : buckets) { - range.toXContent(builder, params); + range.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java index 7b2858806c325..7291a099dd7f7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java @@ -34,19 +34,11 @@ public Bucket( boolean keyed, DocValueFormat formatter ) { - super(key, from, to, docCount, InternalAggregations.from(aggregations), keyed, formatter); + super(key, from, to, docCount, InternalAggregations.from(aggregations), formatter); } - public Bucket( - String key, - double from, - double to, - long docCount, - InternalAggregations aggregations, - boolean keyed, - DocValueFormat formatter - ) { - super(key, from, to, docCount, aggregations, keyed, formatter); + public Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations, DocValueFormat formatter) { + super(key, from, to, docCount, aggregations, formatter); } @Override @@ -99,10 +91,9 @@ public Bucket createBucket( double to, long docCount, InternalAggregations aggregations, - boolean keyed, DocValueFormat formatter ) { - return new Bucket(key, from, to, docCount, aggregations, keyed, formatter); + return new Bucket(key, from, to, docCount, aggregations, formatter); } @Override @@ -113,7 +104,6 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) prototype.internalGetTo(), prototype.getDocCount(), aggregations, - prototype.getKeyed(), prototype.getFormat() ); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java index d1c3761d45e82..9a33df4702c1c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java @@ -23,8 +23,8 @@ public class InternalGeoDistance extends InternalRange ranges, DocValueFormat format, boolean keye } @SuppressWarnings("unchecked") - public B createBucket( - String key, - double from, - double to, - long docCount, - InternalAggregations aggregations, - boolean keyed, - DocValueFormat format - ) { - return (B) new Bucket(key, from, to, docCount, aggregations, keyed, format); + public B createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, DocValueFormat format) { + return (B) new Bucket(key, from, to, docCount, aggregations, format); } @SuppressWarnings("unchecked") @@ -232,7 +209,6 @@ public B createBucket(InternalAggregations aggregations, B prototype) { prototype.to, prototype.getDocCount(), aggregations, - prototype.keyed, prototype.format ); } @@ -285,7 +261,7 @@ public InternalRange(StreamInput in) throws IOException { } long docCount = in.readVLong(); InternalAggregations aggregations = InternalAggregations.readFrom(in); - ranges.add(getFactory().createBucket(key, from, to, docCount, aggregations, keyed, format)); + ranges.add(getFactory().createBucket(key, from, to, docCount, aggregations, format)); } this.ranges = ranges; } @@ -335,7 +311,7 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont @Override protected Bucket createBucket(Bucket proto, long docCount, InternalAggregations aggregations) { - return getFactory().createBucket(proto.key, proto.from, proto.to, docCount, aggregations, proto.keyed, proto.format); + return getFactory().createBucket(proto.key, proto.from, proto.to, docCount, aggregations, proto.format); } }; @@ -371,7 +347,6 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { b.to, samplingContext.scaleUp(b.getDocCount()), InternalAggregations.finalizeSampling(b.getAggregations(), samplingContext), - b.keyed, b.format ) ) @@ -390,7 +365,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (B range : ranges) { - range.toXContent(builder, params); + range.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java index 6d63bb786c29f..a4574e8081868 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.NumericDoubleValues; @@ -531,21 +532,13 @@ protected long subBucketOrdinal(long owningBucketOrdinal, int rangeOrd) { @Override @SuppressWarnings("unchecked") - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForFixedBucketCount( owningBucketOrds, ranges.length, (offsetInOwningOrd, docCount, subAggregationResults) -> { Range range = ranges[offsetInOwningOrd]; - return rangeFactory.createBucket( - range.key, - range.originalFrom, - range.originalTo, - docCount, - subAggregationResults, - keyed, - format - ); + return rangeFactory.createBucket(range.key, range.originalFrom, range.originalTo, docCount, subAggregationResults, format); }, buckets -> rangeFactory.create(name, buckets, format, keyed, metadata()) ); @@ -563,7 +556,6 @@ public InternalAggregation buildEmptyAggregation() { range.originalTo, 0, subAggs, - keyed, format ); buckets.add(bucket); @@ -613,7 +605,7 @@ public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); List buckets = new ArrayList<>(ranges.length); for (RangeAggregator.Range range : ranges) { - buckets.add(factory.createBucket(range.key, range.originalFrom, range.originalTo, 0, subAggs, keyed, format)); + buckets.add(factory.createBucket(range.key, range.originalFrom, range.originalTo, 0, subAggs, format)); } return factory.create(name, buckets, format, keyed, metadata()); } @@ -885,7 +877,7 @@ protected InternalAggregation adapt(InternalAggregation delegateResult) { Range r = ranges[i]; InternalFilters.InternalBucket b = filters.getBuckets().get(i); buckets.add( - rangeFactory.createBucket(r.getKey(), r.originalFrom, r.originalTo, b.getDocCount(), b.getAggregations(), keyed, format) + rangeFactory.createBucket(r.getKey(), r.originalFrom, r.originalTo, b.getDocCount(), b.getAggregations(), format) ); } return rangeFactory.create(name(), buckets, format, keyed, filters.getMetadata()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java index 37cee75c11b48..70f72fafba7b5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java @@ -19,6 +19,7 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -120,7 +121,7 @@ public void postCollection() throws IOException { } @Override - public void prepareSelectedBuckets(long... selectedBuckets) throws IOException { + public void prepareSelectedBuckets(LongArray selectedBuckets) { // no-op - deferred aggs processed in postCollection call } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java index 78b2cdfe7655d..a4c06a194fbf7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java @@ -11,6 +11,7 @@ import org.apache.lucene.misc.search.DiversifiedTopDocsCollector; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.aggregations.AggregationExecutionContext; @@ -212,7 +213,7 @@ protected boolean shouldDefer(Aggregator aggregator) { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalSampler( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java index fc03786356f87..699b8c6b5d500 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java @@ -9,18 +9,23 @@ package org.elasticsearch.search.aggregations.bucket.sampler.random; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; -import org.elasticsearch.common.CheckedSupplier; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.CardinalityUpperBound; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; @@ -33,14 +38,13 @@ public class RandomSamplerAggregator extends BucketsAggregator implements Single private final int seed; private final Integer shardSeed; private final double probability; - private final CheckedSupplier weightSupplier; + private Weight weight; RandomSamplerAggregator( String name, int seed, Integer shardSeed, double probability, - CheckedSupplier weightSupplier, AggregatorFactories factories, AggregationContext context, Aggregator parent, @@ -55,12 +59,35 @@ public class RandomSamplerAggregator extends BucketsAggregator implements Single RandomSamplerAggregationBuilder.NAME + " aggregation [" + name + "] must have sub aggregations configured" ); } - this.weightSupplier = weightSupplier; this.shardSeed = shardSeed; } + /** + * This creates the query weight which will be used in the aggregator. + * + * This weight is a boolean query between {@link RandomSamplingQuery} and the configured top level query of the search. This allows + * the aggregation to iterate the documents directly, thus sampling in the background instead of the foreground. + * @return weight to be used, is cached for additional usages + * @throws IOException when building the weight or queries fails; + */ + private Weight getWeight() throws IOException { + if (weight == null) { + ScoreMode scoreMode = scoreMode(); + BooleanQuery.Builder fullQuery = new BooleanQuery.Builder().add( + context.query(), + scoreMode.needsScores() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER + ); + if (probability < 1.0) { + Query sampleQuery = new RandomSamplingQuery(probability, seed, shardSeed == null ? context.shardRandomSeed() : shardSeed); + fullQuery.add(sampleQuery, BooleanClause.Occur.FILTER); + } + weight = context.searcher().createWeight(context.searcher().rewrite(fullQuery.build()), scoreMode, 1f); + } + return weight; + } + @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalRandomSampler( @@ -100,22 +127,26 @@ protected LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCt if (sub.isNoop()) { return LeafBucketCollector.NO_OP_COLLECTOR; } + + Scorer scorer = getWeight().scorer(aggCtx.getLeafReaderContext()); + // This means there are no docs to iterate, possibly due to the fields not existing + if (scorer == null) { + return LeafBucketCollector.NO_OP_COLLECTOR; + } + sub.setScorer(scorer); + // No sampling is being done, collect all docs + // TODO know when sampling would be much slower and skip sampling: https://github.com/elastic/elasticsearch/issues/84353 if (probability >= 1.0) { grow(1); - return new LeafBucketCollector() { + return new LeafBucketCollectorBase(sub, null) { @Override public void collect(int doc, long owningBucketOrd) throws IOException { collectExistingBucket(sub, doc, 0); } }; } - // TODO know when sampling would be much slower and skip sampling: https://github.com/elastic/elasticsearch/issues/84353 - Scorer scorer = weightSupplier.get().scorer(aggCtx.getLeafReaderContext()); - // This means there are no docs to iterate, possibly due to the fields not existing - if (scorer == null) { - return LeafBucketCollector.NO_OP_COLLECTOR; - } + final DocIdSetIterator docIt = scorer.iterator(); final Bits liveDocs = aggCtx.getLeafReaderContext().reader().getLiveDocs(); try { @@ -135,5 +166,4 @@ public void collect(int doc, long owningBucketOrd) throws IOException { // Since we have done our own collection, there is nothing for the leaf collector to do return LeafBucketCollector.NO_OP_COLLECTOR; } - } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java index 67c958046dac7..50921501896d3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java @@ -9,10 +9,6 @@ package org.elasticsearch.search.aggregations.bucket.sampler.random; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Weight; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; @@ -30,7 +26,6 @@ public class RandomSamplerAggregatorFactory extends AggregatorFactory { private final Integer shardSeed; private final double probability; private final SamplingContext samplingContext; - private Weight weight; RandomSamplerAggregatorFactory( String name, @@ -57,41 +52,6 @@ public Optional getSamplingContext() { @Override public Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) throws IOException { - return new RandomSamplerAggregator( - name, - seed, - shardSeed, - probability, - this::getWeight, - factories, - context, - parent, - cardinality, - metadata - ); + return new RandomSamplerAggregator(name, seed, shardSeed, probability, factories, context, parent, cardinality, metadata); } - - /** - * This creates the query weight which will be used in the aggregator. - * - * This weight is a boolean query between {@link RandomSamplingQuery} and the configured top level query of the search. This allows - * the aggregation to iterate the documents directly, thus sampling in the background instead of the foreground. - * @return weight to be used, is cached for additional usages - * @throws IOException when building the weight or queries fails; - */ - private Weight getWeight() throws IOException { - if (weight == null) { - RandomSamplingQuery query = new RandomSamplingQuery( - probability, - seed, - shardSeed == null ? context.shardRandomSeed() : shardSeed - ); - BooleanQuery booleanQuery = new BooleanQuery.Builder().add(query, BooleanClause.Occur.FILTER) - .add(context.query(), BooleanClause.Occur.FILTER) - .build(); - weight = context.searcher().createWeight(context.searcher().rewrite(booleanQuery), ScoreMode.COMPLETE_NO_SCORES, 1f); - } - return weight; - } - } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java index 5c422a9dd4e32..6388eb3baaa84 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java @@ -66,6 +66,8 @@ public abstract static class AbstractTermsBucket> buckets @@ -369,7 +372,7 @@ protected static XContentBuilder doXContentCommon( builder.field(SUM_OF_OTHER_DOC_COUNTS.getPreferredName(), otherDocCount); builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (AbstractTermsBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, showDocCountError); } builder.endArray(); return builder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 0f7c61dc9f25b..db9da6ed67207 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongHash; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.ObjectArrayPriorityQueue; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; @@ -191,7 +192,7 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return resultStrategy.buildAggregations(owningBucketOrds); } @@ -696,61 +697,66 @@ abstract class ResultStrategy< B extends InternalMultiBucketAggregation.InternalBucket, TB extends InternalMultiBucketAggregation.InternalBucket> implements Releasable { - private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + if (valueCount == 0) { // no context in this reader - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - results[ordIdx] = buildNoValuesResult(owningBucketOrds[ordIdx]); - } - return results; + return GlobalOrdinalsStringTermsAggregator.this.buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildNoValuesResult(owningBucketOrds.get(ordIdx)) + ); } - - B[][] topBucketsPreOrd = buildTopBucketsPerOrd(owningBucketOrds.length); - long[] otherDocCount = new long[owningBucketOrds.length]; - GlobalOrdLookupFunction lookupGlobalOrd = valuesSupplier.get()::lookupOrd; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - final int size; - if (bucketCountThresholds.getMinDocCount() == 0) { - // if minDocCount == 0 then we can end up with more buckets then maxBucketOrd() returns - size = (int) Math.min(valueCount, bucketCountThresholds.getShardSize()); - } else { - size = (int) Math.min(maxBucketOrd(), bucketCountThresholds.getShardSize()); - } - try (ObjectArrayPriorityQueue ordered = buildPriorityQueue(size)) { - final int finalOrdIdx = ordIdx; - BucketUpdater updater = bucketUpdater(owningBucketOrds[ordIdx], lookupGlobalOrd); - collectionStrategy.forEach(owningBucketOrds[ordIdx], new BucketInfoConsumer() { - TB spare = null; - - @Override - public void accept(long globalOrd, long bucketOrd, long docCount) throws IOException { - otherDocCount[finalOrdIdx] += docCount; - if (docCount >= bucketCountThresholds.getShardMinDocCount()) { - if (spare == null) { - spare = buildEmptyTemporaryBucket(); + try ( + LongArray otherDocCount = bigArrays().newLongArray(owningBucketOrds.size(), true); + ObjectArray topBucketsPreOrd = buildTopBucketsPerOrd(owningBucketOrds.size()) + ) { + GlobalOrdLookupFunction lookupGlobalOrd = valuesSupplier.get()::lookupOrd; + for (long ordIdx = 0; ordIdx < topBucketsPreOrd.size(); ordIdx++) { + final int size; + if (bucketCountThresholds.getMinDocCount() == 0) { + // if minDocCount == 0 then we can end up with more buckets then maxBucketOrd() returns + size = (int) Math.min(valueCount, bucketCountThresholds.getShardSize()); + } else { + size = (int) Math.min(maxBucketOrd(), bucketCountThresholds.getShardSize()); + } + try (ObjectArrayPriorityQueue ordered = buildPriorityQueue(size)) { + final long finalOrdIdx = ordIdx; + final long owningBucketOrd = owningBucketOrds.get(ordIdx); + BucketUpdater updater = bucketUpdater(owningBucketOrd, lookupGlobalOrd); + collectionStrategy.forEach(owningBucketOrd, new BucketInfoConsumer() { + TB spare = null; + + @Override + public void accept(long globalOrd, long bucketOrd, long docCount) throws IOException { + otherDocCount.increment(finalOrdIdx, docCount); + if (docCount >= bucketCountThresholds.getShardMinDocCount()) { + if (spare == null) { + checkRealMemoryCBForInternalBucket(); + spare = buildEmptyTemporaryBucket(); + } + updater.updateBucket(spare, globalOrd, bucketOrd, docCount); + spare = ordered.insertWithOverflow(spare); } - updater.updateBucket(spare, globalOrd, bucketOrd, docCount); - spare = ordered.insertWithOverflow(spare); } + }); + + // Get the top buckets + topBucketsPreOrd.set(ordIdx, buildBuckets((int) ordered.size())); + for (int i = (int) ordered.size() - 1; i >= 0; --i) { + checkRealMemoryCBForInternalBucket(); + B bucket = convertTempBucketToRealBucket(ordered.pop(), lookupGlobalOrd); + topBucketsPreOrd.get(ordIdx)[i] = bucket; + otherDocCount.increment(ordIdx, -bucket.getDocCount()); } - }); - - // Get the top buckets - topBucketsPreOrd[ordIdx] = buildBuckets((int) ordered.size()); - for (int i = (int) ordered.size() - 1; i >= 0; --i) { - topBucketsPreOrd[ordIdx][i] = convertTempBucketToRealBucket(ordered.pop(), lookupGlobalOrd); - otherDocCount[ordIdx] -= topBucketsPreOrd[ordIdx][i].getDocCount(); } } - } - buildSubAggs(topBucketsPreOrd); + buildSubAggs(topBucketsPreOrd); - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - results[ordIdx] = buildResult(owningBucketOrds[ordIdx], otherDocCount[ordIdx], topBucketsPreOrd[ordIdx]); + return GlobalOrdinalsStringTermsAggregator.this.buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildResult(owningBucketOrds.get(ordIdx), otherDocCount.get(ordIdx), topBucketsPreOrd.get(ordIdx)) + ); } - return results; } /** @@ -785,7 +791,7 @@ public void accept(long globalOrd, long bucketOrd, long docCount) throws IOExcep /** * Build an array to hold the "top" buckets for each ordinal. */ - abstract B[][] buildTopBucketsPerOrd(int size); + abstract ObjectArray buildTopBucketsPerOrd(long size); /** * Build an array of buckets for a particular ordinal to collect the @@ -802,7 +808,7 @@ public void accept(long globalOrd, long bucketOrd, long docCount) throws IOExcep * Build the sub-aggregations into the buckets. This will usually * delegate to {@link #buildSubAggsForAllBuckets}. */ - abstract void buildSubAggs(B[][] topBucketsPreOrd) throws IOException; + abstract void buildSubAggs(ObjectArray topBucketsPreOrd) throws IOException; /** * Turn the buckets into an aggregation result. @@ -841,8 +847,8 @@ LeafBucketCollector wrapCollector(LeafBucketCollector primary) { } @Override - StringTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new StringTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -879,7 +885,7 @@ StringTerms.Bucket convertTempBucketToRealBucket(OrdBucket temp, GlobalOrdLookup } @Override - void buildSubAggs(StringTerms.Bucket[][] topBucketsPreOrd) throws IOException { + void buildSubAggs(ObjectArray topBucketsPreOrd) throws IOException { buildSubAggsForAllBuckets(topBucketsPreOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); } @@ -973,8 +979,8 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } @Override - SignificantStringTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new SignificantStringTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -1026,7 +1032,7 @@ SignificantStringTerms.Bucket convertTempBucketToRealBucket( } @Override - void buildSubAggs(SignificantStringTerms.Bucket[][] topBucketsPreOrd) throws IOException { + void buildSubAggs(ObjectArray topBucketsPreOrd) throws IOException { buildSubAggsForAllBuckets(topBucketsPreOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java index f179b7d05f9a4..3f75a27306ab4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java @@ -134,7 +134,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th // There is a condition (presumably when only one shard has a bucket?) where reduce is not called // and I end up with buckets that contravene the user's min_doc_count criteria in my reducer if (bucket.subsetDf >= minDocCount) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } } builder.endArray(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java index 563321f56cb5f..5b9403840dfff 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java @@ -145,6 +145,6 @@ public int hashCode() { @Override public final XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - return doXContentCommon(builder, params, docCountError, otherDocCount, buckets); + return doXContentCommon(builder, params, showTermDocCountError, docCountError, otherDocCount, buckets); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java index 6540cd2ee38da..64cebee880141 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java @@ -81,14 +81,12 @@ public InternalAggregations getAggregations() { return aggregations; } - @Override - public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); keyToXContent(builder); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } protected abstract XContentBuilder keyToXContent(XContentBuilder builder) throws IOException; @@ -160,7 +158,7 @@ protected static XContentBuilder doXContentCommon(XContentBuilder builder, Param throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } builder.endArray(); return builder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index a60911b466847..3f579947248bb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -62,7 +62,7 @@ public interface Reader> { long supersetSize; /** * Ordinal of the bucket while it is being built. Not used after it is - * returned from {@link Aggregator#buildAggregations(long[])} and not + * returned from {@link Aggregator#buildAggregations(org.elasticsearch.common.util.LongArray)} and not * serialized. */ transient long bucketOrd; @@ -157,8 +157,7 @@ public int hashCode() { return Objects.hash(getClass(), aggregations, score, format); } - @Override - public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + final void bucketToXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); keyToXContent(builder); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); @@ -166,7 +165,6 @@ public final XContentBuilder toXContent(XContentBuilder builder, Params params) builder.field(BG_COUNT, supersetDf); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } protected abstract XContentBuilder keyToXContent(XContentBuilder builder) throws IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java index 8e25c164d5f33..b94b1f5ea40b1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java @@ -136,7 +136,7 @@ public void setAggregations(InternalAggregations aggregations) { } @Override - public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public final void bucketToXContent(XContentBuilder builder, Params params, boolean showDocCountError) throws IOException { builder.startObject(); keyToXContent(builder); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); @@ -145,7 +145,6 @@ public final XContentBuilder toXContent(XContentBuilder builder, Params params) } aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } protected abstract XContentBuilder keyToXContent(XContentBuilder builder) throws IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java index 651705bd71ef8..45ea1245ec38d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java @@ -12,7 +12,9 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongHash; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.SetBackedScalingCuckooFilter; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; @@ -118,70 +120,67 @@ private void collectValue(long val, int docId, long owningBucketOrd, LeafBucketC } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { /* * Collect the list of buckets, populate the filter with terms * that are too frequent, and figure out how to merge sub-buckets. */ - LongRareTerms.Bucket[][] rarestPerOrd = new LongRareTerms.Bucket[owningBucketOrds.length][]; - SetBackedScalingCuckooFilter[] filters = new SetBackedScalingCuckooFilter[owningBucketOrds.length]; - long keepCount = 0; - long[] mergeMap = new long[(int) bucketOrds.size()]; - Arrays.fill(mergeMap, -1); - long offset = 0; - for (int owningOrdIdx = 0; owningOrdIdx < owningBucketOrds.length; owningOrdIdx++) { - try (LongHash bucketsInThisOwningBucketToCollect = new LongHash(1, bigArrays())) { - filters[owningOrdIdx] = newFilter(); - List builtBuckets = new ArrayList<>(); - LongKeyedBucketOrds.BucketOrdsEnum collectedBuckets = bucketOrds.ordsEnum(owningBucketOrds[owningOrdIdx]); - while (collectedBuckets.next()) { - long docCount = bucketDocCount(collectedBuckets.ord()); - // if the key is below threshold, reinsert into the new ords - if (docCount <= maxDocCount) { - LongRareTerms.Bucket bucket = new LongRareTerms.Bucket(collectedBuckets.value(), docCount, null, format); - bucket.bucketOrd = offset + bucketsInThisOwningBucketToCollect.add(collectedBuckets.value()); - mergeMap[(int) collectedBuckets.ord()] = bucket.bucketOrd; - builtBuckets.add(bucket); - keepCount++; - } else { - filters[owningOrdIdx].add(collectedBuckets.value()); + try ( + ObjectArray rarestPerOrd = bigArrays().newObjectArray(owningBucketOrds.size()); + ObjectArray filters = bigArrays().newObjectArray(owningBucketOrds.size()) + ) { + try (LongArray mergeMap = bigArrays().newLongArray(bucketOrds.size())) { + mergeMap.fill(0, mergeMap.size(), -1); + long keepCount = 0; + long offset = 0; + for (long owningOrdIdx = 0; owningOrdIdx < owningBucketOrds.size(); owningOrdIdx++) { + try (LongHash bucketsInThisOwningBucketToCollect = new LongHash(1, bigArrays())) { + filters.set(owningOrdIdx, newFilter()); + List builtBuckets = new ArrayList<>(); + LongKeyedBucketOrds.BucketOrdsEnum collectedBuckets = bucketOrds.ordsEnum(owningBucketOrds.get(owningOrdIdx)); + while (collectedBuckets.next()) { + long docCount = bucketDocCount(collectedBuckets.ord()); + // if the key is below threshold, reinsert into the new ords + if (docCount <= maxDocCount) { + checkRealMemoryCBForInternalBucket(); + LongRareTerms.Bucket bucket = new LongRareTerms.Bucket(collectedBuckets.value(), docCount, null, format); + bucket.bucketOrd = offset + bucketsInThisOwningBucketToCollect.add(collectedBuckets.value()); + mergeMap.set(collectedBuckets.ord(), bucket.bucketOrd); + builtBuckets.add(bucket); + keepCount++; + } else { + filters.get(owningOrdIdx).add(collectedBuckets.value()); + } + } + rarestPerOrd.set(owningOrdIdx, builtBuckets.toArray(LongRareTerms.Bucket[]::new)); + offset += bucketsInThisOwningBucketToCollect.size(); } } - rarestPerOrd[owningOrdIdx] = builtBuckets.toArray(LongRareTerms.Bucket[]::new); - offset += bucketsInThisOwningBucketToCollect.size(); - } - } - /* - * Only merge/delete the ordinals if we have actually deleted one, - * to save on some redundant work. - */ - if (keepCount != mergeMap.length) { - LongUnaryOperator howToMerge = b -> mergeMap[(int) b]; - rewriteBuckets(offset, howToMerge); - if (deferringCollector() != null) { - ((BestBucketsDeferringCollector) deferringCollector()).rewriteBuckets(howToMerge); + /* + * Only merge/delete the ordinals if we have actually deleted one, + * to save on some redundant work. + */ + if (keepCount != mergeMap.size()) { + LongUnaryOperator howToMerge = mergeMap::get; + rewriteBuckets(offset, howToMerge); + if (deferringCollector() != null) { + ((BestBucketsDeferringCollector) deferringCollector()).rewriteBuckets(howToMerge); + } + } } - } - /* - * Now build the results! - */ - buildSubAggsForAllBuckets(rarestPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - Arrays.sort(rarestPerOrd[ordIdx], ORDER.comparator()); - result[ordIdx] = new LongRareTerms( - name, - ORDER, - metadata(), - format, - Arrays.asList(rarestPerOrd[ordIdx]), - maxDocCount, - filters[ordIdx] - ); + /* + * Now build the results! + */ + buildSubAggsForAllBuckets(rarestPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); + + return LongRareTermsAggregator.this.buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { + LongRareTerms.Bucket[] buckets = rarestPerOrd.get(ordIdx); + Arrays.sort(buckets, ORDER.comparator()); + return new LongRareTerms(name, ORDER, metadata(), format, Arrays.asList(buckets), maxDocCount, filters.get(ordIdx)); + }); } - return result; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java index 76202b6386a73..6ae47d5975479 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java @@ -18,6 +18,7 @@ import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.ObjectArrayPriorityQueue; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -117,7 +118,7 @@ public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return resultStrategy.buildAggregations(owningBucketOrds); } @@ -282,45 +283,50 @@ abstract class ResultStrategy ordered = buildPriorityQueue(size)) { - B spare = null; - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - Supplier emptyBucketBuilder = emptyBucketBuilder(owningBucketOrds[ordIdx]); - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - otherDocCounts[ordIdx] += docCount; - if (docCount < bucketCountThresholds.getShardMinDocCount()) { - continue; - } - if (spare == null) { - spare = emptyBucketBuilder.get(); + private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + try ( + LongArray otherDocCounts = bigArrays().newLongArray(owningBucketOrds.size(), true); + ObjectArray topBucketsPerOrd = buildTopBucketsPerOrd(Math.toIntExact(owningBucketOrds.size())) + ) { + for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) { + long owningOrd = owningBucketOrds.get(ordIdx); + collectZeroDocEntriesIfNeeded(owningOrd, excludeDeletedDocs); + int size = (int) Math.min(bucketOrds.size(), bucketCountThresholds.getShardSize()); + + try (ObjectArrayPriorityQueue ordered = buildPriorityQueue(size)) { + B spare = null; + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningOrd); + Supplier emptyBucketBuilder = emptyBucketBuilder(owningOrd); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + otherDocCounts.increment(ordIdx, docCount); + if (docCount < bucketCountThresholds.getShardMinDocCount()) { + continue; + } + if (spare == null) { + checkRealMemoryCBForInternalBucket(); + spare = emptyBucketBuilder.get(); + } + updateBucket(spare, ordsEnum, docCount); + spare = ordered.insertWithOverflow(spare); } - updateBucket(spare, ordsEnum, docCount); - spare = ordered.insertWithOverflow(spare); - } - topBucketsPerOrd[ordIdx] = buildBuckets((int) ordered.size()); - for (int i = (int) ordered.size() - 1; i >= 0; --i) { - topBucketsPerOrd[ordIdx][i] = ordered.pop(); - otherDocCounts[ordIdx] -= topBucketsPerOrd[ordIdx][i].getDocCount(); - finalizeBucket(topBucketsPerOrd[ordIdx][i]); + topBucketsPerOrd.set(ordIdx, buildBuckets((int) ordered.size())); + for (int i = (int) ordered.size() - 1; i >= 0; --i) { + topBucketsPerOrd.get(ordIdx)[i] = ordered.pop(); + otherDocCounts.increment(ordIdx, -topBucketsPerOrd.get(ordIdx)[i].getDocCount()); + finalizeBucket(topBucketsPerOrd.get(ordIdx)[i]); + } } } - } - buildSubAggs(topBucketsPerOrd); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - result[ordIdx] = buildResult(owningBucketOrds[ordIdx], otherDocCounts[ordIdx], topBucketsPerOrd[ordIdx]); + buildSubAggs(topBucketsPerOrd); + + return MapStringTermsAggregator.this.buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildResult(owningBucketOrds.get(ordIdx), otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)) + ); } - return result; } /** @@ -361,7 +367,7 @@ private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws /** * Build an array to hold the "top" buckets for each ordinal. */ - abstract B[][] buildTopBucketsPerOrd(int size); + abstract ObjectArray buildTopBucketsPerOrd(long size); /** * Build an array of buckets for a particular ordinal to collect the @@ -379,7 +385,7 @@ private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws * Build the sub-aggregations into the buckets. This will usually * delegate to {@link #buildSubAggsForAllBuckets}. */ - abstract void buildSubAggs(B[][] topBucketsPerOrd) throws IOException; + abstract void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException; /** * Turn the buckets into an aggregation result. @@ -501,8 +507,8 @@ void updateBucket(StringTerms.Bucket spare, BytesKeyedBucketOrds.BucketOrdsEnum } @Override - StringTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new StringTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -521,7 +527,7 @@ void finalizeBucket(StringTerms.Bucket bucket) { } @Override - void buildSubAggs(StringTerms.Bucket[][] topBucketsPerOrd) throws IOException { + void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException { buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); } @@ -637,8 +643,8 @@ void updateBucket(SignificantStringTerms.Bucket spare, BytesKeyedBucketOrds.Buck } @Override - SignificantStringTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new SignificantStringTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -657,7 +663,7 @@ void finalizeBucket(SignificantStringTerms.Bucket bucket) { } @Override - void buildSubAggs(SignificantStringTerms.Bucket[][] topBucketsPerOrd) throws IOException { + void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException { buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java index d39348d80df14..ce89b95b76a05 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.ObjectArrayPriorityQueue; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -39,7 +40,6 @@ import java.io.IOException; import java.util.Arrays; -import java.util.List; import java.util.Map; import java.util.function.BiConsumer; import java.util.function.Function; @@ -136,7 +136,7 @@ private void collectValue(long val, int doc, long owningBucketOrd, LeafBucketCol } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return resultStrategy.buildAggregations(owningBucketOrds); } @@ -163,48 +163,52 @@ public void collectDebugInfo(BiConsumer add) { abstract class ResultStrategy implements Releasable { - private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - B[][] topBucketsPerOrd = buildTopBucketsPerOrd(owningBucketOrds.length); - long[] otherDocCounts = new long[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - collectZeroDocEntriesIfNeeded(owningBucketOrds[ordIdx], excludeDeletedDocs); - long bucketsInOrd = bucketOrds.bucketsInOrd(owningBucketOrds[ordIdx]); - - int size = (int) Math.min(bucketsInOrd, bucketCountThresholds.getShardSize()); - try (ObjectArrayPriorityQueue ordered = buildPriorityQueue(size)) { - B spare = null; - BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - Supplier emptyBucketBuilder = emptyBucketBuilder(owningBucketOrds[ordIdx]); - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - otherDocCounts[ordIdx] += docCount; - if (docCount < bucketCountThresholds.getShardMinDocCount()) { - continue; - } - if (spare == null) { - spare = emptyBucketBuilder.get(); + private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + try ( + LongArray otherDocCounts = bigArrays().newLongArray(owningBucketOrds.size(), true); + ObjectArray topBucketsPerOrd = buildTopBucketsPerOrd(owningBucketOrds.size()) + ) { + for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) { + final long owningBucketOrd = owningBucketOrds.get(ordIdx); + collectZeroDocEntriesIfNeeded(owningBucketOrd, excludeDeletedDocs); + long bucketsInOrd = bucketOrds.bucketsInOrd(owningBucketOrd); + + int size = (int) Math.min(bucketsInOrd, bucketCountThresholds.getShardSize()); + try (ObjectArrayPriorityQueue ordered = buildPriorityQueue(size)) { + B spare = null; + BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); + Supplier emptyBucketBuilder = emptyBucketBuilder(owningBucketOrd); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + otherDocCounts.increment(ordIdx, docCount); + if (docCount < bucketCountThresholds.getShardMinDocCount()) { + continue; + } + if (spare == null) { + checkRealMemoryCBForInternalBucket(); + spare = emptyBucketBuilder.get(); + } + updateBucket(spare, ordsEnum, docCount); + spare = ordered.insertWithOverflow(spare); } - updateBucket(spare, ordsEnum, docCount); - spare = ordered.insertWithOverflow(spare); - } - // Get the top buckets - B[] bucketsForOrd = buildBuckets((int) ordered.size()); - topBucketsPerOrd[ordIdx] = bucketsForOrd; - for (int b = (int) ordered.size() - 1; b >= 0; --b) { - topBucketsPerOrd[ordIdx][b] = ordered.pop(); - otherDocCounts[ordIdx] -= topBucketsPerOrd[ordIdx][b].getDocCount(); + // Get the top buckets + B[] bucketsForOrd = buildBuckets((int) ordered.size()); + topBucketsPerOrd.set(ordIdx, bucketsForOrd); + for (int b = (int) ordered.size() - 1; b >= 0; --b) { + topBucketsPerOrd.get(ordIdx)[b] = ordered.pop(); + otherDocCounts.increment(ordIdx, -topBucketsPerOrd.get(ordIdx)[b].getDocCount()); + } } } - } - buildSubAggs(topBucketsPerOrd); + buildSubAggs(topBucketsPerOrd); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - result[ordIdx] = buildResult(owningBucketOrds[ordIdx], otherDocCounts[ordIdx], topBucketsPerOrd[ordIdx]); + return NumericTermsAggregator.this.buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildResult(owningBucketOrds.get(ordIdx), otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)) + ); } - return result; } /** @@ -227,7 +231,7 @@ private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws /** * Build an array to hold the "top" buckets for each ordinal. */ - abstract B[][] buildTopBucketsPerOrd(int size); + abstract ObjectArray buildTopBucketsPerOrd(long size); /** * Build an array of buckets for a particular ordinal. These arrays @@ -258,7 +262,7 @@ private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws * Build the sub-aggregations into the buckets. This will usually * delegate to {@link #buildSubAggsForAllBuckets}. */ - abstract void buildSubAggs(B[][] topBucketsPerOrd) throws IOException; + abstract void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException; /** * Collect extra entries for "zero" hit documents if they were requested @@ -297,7 +301,7 @@ final ObjectArrayPriorityQueue buildPriorityQueue(int size) { } @Override - final void buildSubAggs(B[][] topBucketsPerOrd) throws IOException { + final void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException { buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); } @@ -356,8 +360,8 @@ SortedNumericDocValues getValues(LeafReaderContext ctx) throws IOException { } @Override - LongTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new LongTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -397,7 +401,7 @@ LongTerms buildResult(long owningBucketOrd, long otherDocCount, LongTerms.Bucket bucketCountThresholds.getShardSize(), showTermDocCountError, otherDocCount, - List.of(topBuckets), + Arrays.asList(topBuckets), null ); } @@ -438,8 +442,8 @@ SortedNumericDocValues getValues(LeafReaderContext ctx) throws IOException { } @Override - DoubleTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new DoubleTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -479,7 +483,7 @@ DoubleTerms buildResult(long owningBucketOrd, long otherDocCount, DoubleTerms.Bu bucketCountThresholds.getShardSize(), showTermDocCountError, otherDocCount, - List.of(topBuckets), + Arrays.asList(topBuckets), null ); } @@ -551,8 +555,8 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } @Override - SignificantLongTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new SignificantLongTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -583,7 +587,7 @@ ObjectArrayPriorityQueue buildPriorityQueue(int siz } @Override - void buildSubAggs(SignificantLongTerms.Bucket[][] topBucketsPerOrd) throws IOException { + void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException { buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); } @@ -601,7 +605,7 @@ SignificantLongTerms buildResult(long owningBucketOrd, long otherDocCoun, Signif subsetSizes.get(owningBucketOrd), supersetSize, significanceHeuristic, - List.of(topBuckets) + Arrays.asList(topBuckets) ); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java index 2bc2833f0ddce..8a2c9d52f4212 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java @@ -12,6 +12,8 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.SetBackedScalingCuckooFilter; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.FieldData; @@ -119,72 +121,74 @@ private void collectValue(BytesRef val, int doc, long owningBucketOrd, LeafBucke } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { /* * Collect the list of buckets, populate the filter with terms * that are too frequent, and figure out how to merge sub-buckets. */ - StringRareTerms.Bucket[][] rarestPerOrd = new StringRareTerms.Bucket[owningBucketOrds.length][]; - SetBackedScalingCuckooFilter[] filters = new SetBackedScalingCuckooFilter[owningBucketOrds.length]; - long keepCount = 0; - long[] mergeMap = new long[(int) bucketOrds.size()]; - Arrays.fill(mergeMap, -1); - long offset = 0; - for (int owningOrdIdx = 0; owningOrdIdx < owningBucketOrds.length; owningOrdIdx++) { - try (BytesRefHash bucketsInThisOwningBucketToCollect = new BytesRefHash(1, bigArrays())) { - filters[owningOrdIdx] = newFilter(); - List builtBuckets = new ArrayList<>(); - BytesKeyedBucketOrds.BucketOrdsEnum collectedBuckets = bucketOrds.ordsEnum(owningBucketOrds[owningOrdIdx]); - BytesRef scratch = new BytesRef(); - while (collectedBuckets.next()) { - collectedBuckets.readValue(scratch); - long docCount = bucketDocCount(collectedBuckets.ord()); - // if the key is below threshold, reinsert into the new ords - if (docCount <= maxDocCount) { - StringRareTerms.Bucket bucket = new StringRareTerms.Bucket(BytesRef.deepCopyOf(scratch), docCount, null, format); - bucket.bucketOrd = offset + bucketsInThisOwningBucketToCollect.add(scratch); - mergeMap[(int) collectedBuckets.ord()] = bucket.bucketOrd; - builtBuckets.add(bucket); - keepCount++; - } else { - filters[owningOrdIdx].add(scratch); + try ( + ObjectArray rarestPerOrd = bigArrays().newObjectArray(owningBucketOrds.size()); + ObjectArray filters = bigArrays().newObjectArray(owningBucketOrds.size()) + ) { + try (LongArray mergeMap = bigArrays().newLongArray(bucketOrds.size())) { + mergeMap.fill(0, mergeMap.size(), -1); + long keepCount = 0; + long offset = 0; + for (long owningOrdIdx = 0; owningOrdIdx < owningBucketOrds.size(); owningOrdIdx++) { + try (BytesRefHash bucketsInThisOwningBucketToCollect = new BytesRefHash(1, bigArrays())) { + filters.set(owningOrdIdx, newFilter()); + List builtBuckets = new ArrayList<>(); + BytesKeyedBucketOrds.BucketOrdsEnum collectedBuckets = bucketOrds.ordsEnum(owningBucketOrds.get(owningOrdIdx)); + BytesRef scratch = new BytesRef(); + while (collectedBuckets.next()) { + collectedBuckets.readValue(scratch); + long docCount = bucketDocCount(collectedBuckets.ord()); + // if the key is below threshold, reinsert into the new ords + if (docCount <= maxDocCount) { + checkRealMemoryCBForInternalBucket(); + StringRareTerms.Bucket bucket = new StringRareTerms.Bucket( + BytesRef.deepCopyOf(scratch), + docCount, + null, + format + ); + bucket.bucketOrd = offset + bucketsInThisOwningBucketToCollect.add(scratch); + mergeMap.set(collectedBuckets.ord(), bucket.bucketOrd); + builtBuckets.add(bucket); + keepCount++; + } else { + filters.get(owningOrdIdx).add(scratch); + } + } + rarestPerOrd.set(owningOrdIdx, builtBuckets.toArray(StringRareTerms.Bucket[]::new)); + offset += bucketsInThisOwningBucketToCollect.size(); } } - rarestPerOrd[owningOrdIdx] = builtBuckets.toArray(StringRareTerms.Bucket[]::new); - offset += bucketsInThisOwningBucketToCollect.size(); - } - } - /* - * Only merge/delete the ordinals if we have actually deleted one, - * to save on some redundant work. - */ - if (keepCount != mergeMap.length) { - LongUnaryOperator howToMerge = b -> mergeMap[(int) b]; - rewriteBuckets(offset, howToMerge); - if (deferringCollector() != null) { - ((BestBucketsDeferringCollector) deferringCollector()).rewriteBuckets(howToMerge); + /* + * Only merge/delete the ordinals if we have actually deleted one, + * to save on some redundant work. + */ + if (keepCount != mergeMap.size()) { + LongUnaryOperator howToMerge = mergeMap::get; + rewriteBuckets(offset, howToMerge); + if (deferringCollector() != null) { + ((BestBucketsDeferringCollector) deferringCollector()).rewriteBuckets(howToMerge); + } + } } - } - /* - * Now build the results! - */ - buildSubAggsForAllBuckets(rarestPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - Arrays.sort(rarestPerOrd[ordIdx], ORDER.comparator()); - result[ordIdx] = new StringRareTerms( - name, - ORDER, - metadata(), - format, - Arrays.asList(rarestPerOrd[ordIdx]), - maxDocCount, - filters[ordIdx] - ); + /* + * Now build the results! + */ + buildSubAggsForAllBuckets(rarestPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); + + return StringRareTermsAggregator.this.buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { + StringRareTerms.Bucket[] buckets = rarestPerOrd.get(ordIdx); + Arrays.sort(buckets, ORDER.comparator()); + return new StringRareTerms(name, ORDER, metadata(), format, Arrays.asList(buckets), maxDocCount, filters.get(ordIdx)); + }); } - return result; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java index 7755f1db6a3ee..8047d1f06990f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java @@ -111,7 +111,7 @@ public boolean canLeadReduction() { @Override public final XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - return doXContentCommon(builder, params, 0L, 0, Collections.emptyList()); + return doXContentCommon(builder, params, false, 0L, 0, Collections.emptyList()); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java index 8742136c86ec6..cf65f1ff7c835 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.aggregations.metrics; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorBase; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -36,11 +37,7 @@ protected MetricsAggregator(String name, AggregationContext context, Aggregator public abstract InternalAggregation buildAggregation(long owningBucketOrd) throws IOException; @Override - public final InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - results[ordIdx] = buildAggregation(owningBucketOrds[ordIdx]); - } - return results; + public final InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> buildAggregation(owningBucketOrds.get(ordIdx))); } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 6bc667d4359b1..546586a9ff3c3 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -191,7 +191,16 @@ protected SearchHit nextDoc(int doc) throws IOException { } }; - SearchHit[] hits = docsIterator.iterate(context.shardTarget(), context.searcher().getIndexReader(), docIdsToLoad); + SearchHit[] hits = docsIterator.iterate( + context.shardTarget(), + context.searcher().getIndexReader(), + docIdsToLoad, + context.request().allowPartialSearchResults() + ); + + if (docsIterator.isTimedOut()) { + context.queryResult().searchTimedOut(true); + } if (context.isCancelled()) { for (SearchHit hit : hits) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java index 682ee4b375668..df4e7649ffd3b 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java @@ -13,7 +13,10 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.search.query.SearchTimeoutException; import java.io.IOException; import java.util.Arrays; @@ -27,6 +30,12 @@ */ abstract class FetchPhaseDocsIterator { + private boolean timedOut = false; + + public boolean isTimedOut() { + return timedOut; + } + /** * Called when a new leaf reader is reached * @param ctx the leaf reader for this set of doc ids @@ -44,7 +53,7 @@ abstract class FetchPhaseDocsIterator { /** * Iterate over a set of docsIds within a particular shard and index reader */ - public final SearchHit[] iterate(SearchShardTarget shardTarget, IndexReader indexReader, int[] docIds) { + public final SearchHit[] iterate(SearchShardTarget shardTarget, IndexReader indexReader, int[] docIds, boolean allowPartialResults) { SearchHit[] searchHits = new SearchHit[docIds.length]; DocIdToIndex[] docs = new DocIdToIndex[docIds.length]; for (int index = 0; index < docIds.length; index++) { @@ -58,30 +67,55 @@ public final SearchHit[] iterate(SearchShardTarget shardTarget, IndexReader inde LeafReaderContext ctx = indexReader.leaves().get(leafOrd); int endReaderIdx = endReaderIdx(ctx, 0, docs); int[] docsInLeaf = docIdsInLeaf(0, endReaderIdx, docs, ctx.docBase); - setNextReader(ctx, docsInLeaf); - for (int i = 0; i < docs.length; i++) { - if (i >= endReaderIdx) { - leafOrd = ReaderUtil.subIndex(docs[i].docId, indexReader.leaves()); - ctx = indexReader.leaves().get(leafOrd); - endReaderIdx = endReaderIdx(ctx, i, docs); - docsInLeaf = docIdsInLeaf(i, endReaderIdx, docs, ctx.docBase); - setNextReader(ctx, docsInLeaf); + try { + setNextReader(ctx, docsInLeaf); + } catch (ContextIndexSearcher.TimeExceededException timeExceededException) { + if (allowPartialResults) { + timedOut = true; + return SearchHits.EMPTY; } - currentDoc = docs[i].docId; - assert searchHits[docs[i].index] == null; - searchHits[docs[i].index] = nextDoc(docs[i].docId); + throw new SearchTimeoutException(shardTarget, "Time exceeded"); } - } catch (Exception e) { - for (SearchHit searchHit : searchHits) { - if (searchHit != null) { - searchHit.decRef(); + for (int i = 0; i < docs.length; i++) { + try { + if (i >= endReaderIdx) { + leafOrd = ReaderUtil.subIndex(docs[i].docId, indexReader.leaves()); + ctx = indexReader.leaves().get(leafOrd); + endReaderIdx = endReaderIdx(ctx, i, docs); + docsInLeaf = docIdsInLeaf(i, endReaderIdx, docs, ctx.docBase); + setNextReader(ctx, docsInLeaf); + } + currentDoc = docs[i].docId; + assert searchHits[docs[i].index] == null; + searchHits[docs[i].index] = nextDoc(docs[i].docId); + } catch (ContextIndexSearcher.TimeExceededException timeExceededException) { + if (allowPartialResults) { + timedOut = true; + SearchHit[] partialSearchHits = new SearchHit[i]; + System.arraycopy(searchHits, 0, partialSearchHits, 0, i); + return partialSearchHits; + } + purgeSearchHits(searchHits); + throw new SearchTimeoutException(shardTarget, "Time exceeded"); } } + } catch (SearchTimeoutException e) { + throw e; + } catch (Exception e) { + purgeSearchHits(searchHits); throw new FetchPhaseExecutionException(shardTarget, "Error running fetch phase for doc [" + currentDoc + "]", e); } return searchHits; } + private static void purgeSearchHits(SearchHit[] searchHits) { + for (SearchHit searchHit : searchHits) { + if (searchHit != null) { + searchHit.decRef(); + } + } + } + private static int endReaderIdx(LeafReaderContext currentReaderContext, int index, DocIdToIndex[] docs) { int firstInNextReader = currentReaderContext.docBase + currentReaderContext.reader().maxDoc(); int i = index + 1; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java index e14177adba467..a8db0f26d2966 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java @@ -64,7 +64,6 @@ public abstract class AbstractHighlighterBuilder> BiFunction {}, FORCE_SOURCE_FIELD); // force_source is ignored parser.declareInt(HB::phraseLimit, PHRASE_LIMIT_FIELD); parser.declareInt(HB::maxAnalyzedOffset, MAX_ANALYZED_OFFSET_FIELD); parser.declareObject(HB::options, (XContentParser p, Void c) -> { diff --git a/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java b/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java index fff1990c29750..90e84acc7cad5 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.profile.aggregation; import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -68,7 +69,7 @@ public BucketComparator bucketComparator(String key, SortOrder order) { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { Timer timer = profileBreakdown.getNewTimer(AggregationTimingType.BUILD_AGGREGATION); InternalAggregation[] result; timer.start(); diff --git a/server/src/main/java/org/elasticsearch/transport/BytesTransportRequest.java b/server/src/main/java/org/elasticsearch/transport/BytesTransportRequest.java index 7bf172388eccd..0db3de9abdb7b 100644 --- a/server/src/main/java/org/elasticsearch/transport/BytesTransportRequest.java +++ b/server/src/main/java/org/elasticsearch/transport/BytesTransportRequest.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.RefCounted; import java.io.IOException; @@ -22,7 +21,7 @@ * A specialized, bytes only request, that can potentially be optimized on the network * layer, specifically for the same large buffer send to several nodes. */ -public class BytesTransportRequest extends TransportRequest implements RefCounted { +public class BytesTransportRequest extends TransportRequest { final ReleasableBytesReference bytes; private final TransportVersion version; diff --git a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification index 37b9b5836ca5f..3955fc87bf392 100644 --- a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -11,10 +11,8 @@ org.elasticsearch.action.admin.indices.stats.IndicesStatsFeatures org.elasticsearch.action.bulk.BulkFeatures org.elasticsearch.features.FeatureInfrastructureFeatures org.elasticsearch.health.HealthFeatures -org.elasticsearch.cluster.service.TransportFeatures org.elasticsearch.cluster.metadata.MetadataFeatures org.elasticsearch.rest.RestFeatures -org.elasticsearch.indices.IndicesFeatures org.elasticsearch.repositories.RepositoriesFeatures org.elasticsearch.action.admin.cluster.allocation.AllocationStatsFeatures org.elasticsearch.rest.action.admin.cluster.ClusterRerouteFeatures diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index ba575cc642a81..6191922f13094 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -133,3 +133,4 @@ 8.15.3,8702003 8.15.4,8702003 8.16.0,8772001 +8.16.1,8772004 diff --git a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt index f9a8237d63717..69aa5102dec8d 100644 --- a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt +++ b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt @@ -44,3 +44,4 @@ FORMING_SINGLE_NODE_CLUSTERS modules-discover CIRCUIT_BREAKER_ERRORS circuit-breaker-errors.html ALLOCATION_EXPLAIN_NO_COPIES cluster-allocation-explain.html#no-valid-shard-copy ALLOCATION_EXPLAIN_MAX_RETRY cluster-allocation-explain.html#maximum-number-of-retries-exceeded +SECURE_SETTINGS secure-settings.html diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index c54aea88613f5..f84d69af727ac 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -133,3 +133,4 @@ 8.15.3,8512000 8.15.4,8512000 8.16.0,8518000 +8.16.1,8518000 diff --git a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java index 9863d2156422d..f5a23cf68a26e 100644 --- a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java +++ b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -509,12 +510,12 @@ public void testGetDetailedMessage() { public void testToXContent() throws IOException { { ElasticsearchException e = new ElasticsearchException("test"); - assertExceptionAsJson(e, """ + assertThrowableAsJson(e, """ {"type":"exception","reason":"test"}"""); } { ElasticsearchException e = new IndexShardRecoveringException(new ShardId("_test", "_0", 5)); - assertExceptionAsJson(e, """ + assertThrowableAsJson(e, """ { "type": "index_shard_recovering_exception", "reason": "CurrentState[RECOVERING] Already recovering", @@ -529,7 +530,7 @@ public void testToXContent() throws IOException { "foo", new IllegalStateException("bar") ); - assertExceptionAsJson(e, """ + assertThrowableAsJson(e, """ { "type": "illegal_state_exception", "reason": "bar" @@ -537,7 +538,7 @@ public void testToXContent() throws IOException { } { ElasticsearchException e = new ElasticsearchException(new IllegalArgumentException("foo")); - assertExceptionAsJson(e, """ + assertThrowableAsJson(e, """ { "type": "exception", "reason": "java.lang.IllegalArgumentException: foo", @@ -552,7 +553,7 @@ public void testToXContent() throws IOException { "foo", new ElasticsearchException("bar", new IllegalArgumentException("index is closed", new RuntimeException("foobar"))) ); - assertExceptionAsJson(ex, """ + assertThrowableAsJson(ex, """ { "type": "exception", "reason": "foo", @@ -573,7 +574,7 @@ public void testToXContent() throws IOException { { ElasticsearchException e = new ElasticsearchException("foo", new IllegalStateException("bar")); - assertExceptionAsJson(e, """ + assertThrowableAsJson(e, """ { "type": "exception", "reason": "foo", @@ -602,21 +603,91 @@ public void testToXContent() throws IOException { } } + public void testGenerateFailureToXContentWithNoDetails() throws IOException { + { + Exception ex = new FileNotFoundException("foo not found"); + for (int i = 0; i < randomInt(10); i++) { + ex = new RemoteTransportException("foobar", ex); + } + assertFailureAsJson(ex, """ + {"error":{"type":"file_not_found_exception","reason":"foo not found"}}""", false); + } + { + ParsingException ex = new ParsingException(1, 2, "foobar", null); + assertFailureAsJson(ex, """ + {"error":{"type":"parsing_exception","reason":"foobar"}}""", false); + } + + { // header and metadata shouldn't be rendered + ParsingException ex = new ParsingException(1, 2, "foobar", null); + ex.addMetadata("es.test1", "value1"); + ex.addMetadata("es.test2", "value2"); + ex.addHeader("test", "some value"); + ex.addHeader("test_multi", "some value", "another value"); + + String expected = """ + {"error":{"type": "parsing_exception","reason": "foobar"}}"""; + assertFailureAsJson(ex, expected, false); + } + } + + public void testGenerateFailureToXContentWithDetails() throws IOException { + { + Exception ex = new FileNotFoundException("foo not found"); + for (int i = 0; i < randomInt(10); i++) { + ex = new RemoteTransportException("foobar", ex); + } + assertFailureAsJson(ex, """ + {"error":{"type":"file_not_found_exception","reason":"foo not found", + "root_cause":[{"type":"file_not_found_exception","reason":"foo not found"}]}}""", true); + } + { + ParsingException ex = new ParsingException(1, 2, "foobar", null); + assertFailureAsJson(ex, """ + {"error":{"type":"parsing_exception","reason":"foobar","line":1,"col":2, + "root_cause":[{"type":"parsing_exception","reason":"foobar","line":1,"col":2}]}}""", true); + } + + { // render header and metadata + ParsingException ex = new ParsingException(1, 2, "foobar", null); + ex.addMetadata("es.test1", "value1"); + ex.addMetadata("es.test2", "value2"); + ex.addHeader("test", "some value"); + ex.addHeader("test_multi", "some value", "another value"); + + String expectedFragment = """ + { + "type": "parsing_exception", + "reason": "foobar", + "line": 1, + "col": 2, + "test1": "value1", + "test2": "value2", + "header": { + "test_multi": [ + "some value", + "another value" + ], + "test": "some value" + } + """; + String expected = "{\"error\":" + expectedFragment + ",\"root_cause\":[" + expectedFragment + "}]}}"; + assertFailureAsJson(ex, expected, true); + } + } + public void testGenerateThrowableToXContent() throws IOException { { - Exception ex; - if (randomBoolean()) { - // just a wrapper which is omitted - ex = new RemoteTransportException("foobar", new FileNotFoundException("foo not found")); - } else { - ex = new FileNotFoundException("foo not found"); + Exception ex = new FileNotFoundException("foo not found"); + for (int i = 0; i < randomInt(10); i++) { + ex = new RemoteTransportException("foobar", ex); } - assertExceptionAsJson(ex, """ + assertThrowableAsJson(ex, """ {"type":"file_not_found_exception","reason":"foo not found"}"""); } { ParsingException ex = new ParsingException(1, 2, "foobar", null); - assertExceptionAsJson(ex, """ + assertThrowableAsJson(ex, """ {"type":"parsing_exception","reason":"foobar","line":1,"col":2}"""); } @@ -656,7 +727,7 @@ public void testGenerateThrowableToXContent() throws IOException { "test": "some value" } }"""; - assertExceptionAsJson(ex, expected); + assertThrowableAsJson(ex, expected); } } @@ -697,7 +768,7 @@ public void testToXContentWithHeadersAndMetadata() throws IOException { } }"""; - assertExceptionAsJson(e, expectedJson); + assertThrowableAsJson(e, expectedJson); ElasticsearchException parsed; try (XContentParser parser = createParser(XContentType.JSON.xContent(), expectedJson)) { @@ -859,7 +930,7 @@ public void testFromXContentWithHeadersAndMetadata() throws IOException { } assertNotNull(parsed); - assertEquals(parsed.getMessage(), "Elasticsearch exception [type=exception, reason=foo]"); + assertEquals("Elasticsearch exception [type=exception, reason=foo]", parsed.getMessage()); assertThat(parsed.getHeaderKeys(), hasSize(1)); assertThat(parsed.getHeader("foo_1"), hasItem("foo1")); assertThat(parsed.getMetadataKeys(), hasSize(1)); @@ -996,11 +1067,40 @@ public void testThrowableToAndFromXContent() throws IOException { public void testUnknownFailureToAndFromXContent() throws IOException { final XContent xContent = randomFrom(XContentType.values()).xContent(); - BytesReference failureBytes = toShuffledXContent((builder, params) -> { - // Prints a null failure using generateFailureXContent() - ElasticsearchException.generateFailureXContent(builder, params, null, randomBoolean()); - return builder; - }, xContent.type(), ToXContent.EMPTY_PARAMS, randomBoolean()); + // Prints a null failure using generateFailureXContent() + BytesReference failureBytes = toShuffledXContent( + (builder, params) -> ElasticsearchException.generateFailureXContent(builder, params, null, randomBoolean()), + xContent.type(), + ToXContent.EMPTY_PARAMS, + randomBoolean() + ); + + ElasticsearchException parsedFailure; + try (XContentParser parser = createParser(xContent, failureBytes)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + parsedFailure = ElasticsearchException.failureFromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } + + // Failure was null, expecting a "unknown" reason + assertEquals("Elasticsearch exception [type=unknown, reason=unknown]", parsedFailure.getMessage()); + assertEquals(0, parsedFailure.getHeaders().size()); + assertEquals(0, parsedFailure.getMetadata().size()); + } + + public void testUnknownFailureToAndFromXContentV8() throws IOException { + final XContent xContent = randomFrom(XContentType.values()).xContent(); + + // Prints a null failure using generateFailureXContent() + BytesReference failureBytes = toShuffledXContent( + (builder, params) -> ElasticsearchException.generateFailureXContent(builder, params, null, randomBoolean()), + xContent.type(), + RestApiVersion.V_8, + ToXContent.EMPTY_PARAMS, + randomBoolean() + ); ElasticsearchException parsedFailure; try (XContentParser parser = createParser(xContent, failureBytes)) { @@ -1021,10 +1121,46 @@ public void testFailureToAndFromXContentWithNoDetails() throws IOException { final XContent xContent = randomFrom(XContentType.values()).xContent(); final Exception failure = (Exception) randomExceptions().v1(); - BytesReference failureBytes = toShuffledXContent((builder, params) -> { - ElasticsearchException.generateFailureXContent(builder, params, failure, false); - return builder; - }, xContent.type(), ToXContent.EMPTY_PARAMS, randomBoolean()); + BytesReference failureBytes = toShuffledXContent( + (builder, params) -> ElasticsearchException.generateFailureXContent(builder, params, failure, false), + xContent.type(), + ToXContent.EMPTY_PARAMS, + randomBoolean() + ); + + try (XContentParser parser = createParser(xContent, failureBytes)) { + failureBytes = BytesReference.bytes(shuffleXContent(parser, randomBoolean())); + } + + ElasticsearchException parsedFailure; + try (XContentParser parser = createParser(xContent, failureBytes)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + parsedFailure = ElasticsearchException.failureFromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } + assertNotNull(parsedFailure); + + String type = ElasticsearchException.getExceptionName(failure); + String reason = failure.getMessage(); + assertEquals(ElasticsearchException.buildMessage(type, reason, null), parsedFailure.getMessage()); + assertEquals(0, parsedFailure.getHeaders().size()); + assertEquals(0, parsedFailure.getMetadata().size()); + assertNull(parsedFailure.getCause()); + } + + public void testFailureToAndFromXContentWithNoDetailsV8() throws IOException { + final XContent xContent = randomFrom(XContentType.values()).xContent(); + + final Exception failure = (Exception) randomExceptions().v1(); + BytesReference failureBytes = toShuffledXContent( + (builder, params) -> ElasticsearchException.generateFailureXContent(builder, params, failure, false), + xContent.type(), + RestApiVersion.V_8, + ToXContent.EMPTY_PARAMS, + randomBoolean() + ); try (XContentParser parser = createParser(xContent, failureBytes)) { failureBytes = BytesReference.bytes(shuffleXContent(parser, randomBoolean())); @@ -1165,10 +1301,12 @@ public void testFailureToAndFromXContentWithDetails() throws IOException { } Exception finalFailure = failure; - BytesReference failureBytes = toShuffledXContent((builder, params) -> { - ElasticsearchException.generateFailureXContent(builder, params, finalFailure, true); - return builder; - }, xContent.type(), ToXContent.EMPTY_PARAMS, randomBoolean()); + BytesReference failureBytes = toShuffledXContent( + (builder, params) -> ElasticsearchException.generateFailureXContent(builder, params, finalFailure, true), + xContent.type(), + ToXContent.EMPTY_PARAMS, + randomBoolean() + ); try (XContentParser parser = createParser(xContent, failureBytes)) { failureBytes = BytesReference.bytes(shuffleXContent(parser, randomBoolean())); @@ -1197,13 +1335,20 @@ private static void assertToXContentAsJson(ToXContent e, String expectedJson) th assertToXContentEquivalent(new BytesArray(expectedJson), actual, XContentType.JSON); } - private static void assertExceptionAsJson(Exception e, String expectedJson) throws IOException { + private static void assertThrowableAsJson(Throwable e, String expectedJson) throws IOException { assertToXContentAsJson((builder, params) -> { ElasticsearchException.generateThrowableXContent(builder, params, e); return builder; }, expectedJson); } + private static void assertFailureAsJson(Exception e, String expectedJson, boolean detailed) throws IOException { + assertToXContentAsJson( + (builder, params) -> ElasticsearchException.generateFailureXContent(builder, params, e, detailed), + expectedJson + ); + } + public static void assertDeepEquals(ElasticsearchException expected, ElasticsearchException actual) { do { if (expected == null) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java index 7a31f0dcb4631..a7058e5d6cd8c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -1057,6 +1057,8 @@ public static NodeStats createNodeStats() { randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue), + randomLongBetween(0, maxStatValue), + randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue) ); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/CCSTelemetrySnapshotTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/CCSTelemetrySnapshotTests.java index e9188d9cb8f0d..a72630c327ea2 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/CCSTelemetrySnapshotTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/CCSTelemetrySnapshotTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.admin.cluster.stats.CCSTelemetrySnapshot.PerClusterCCSTelemetry; import org.elasticsearch.action.admin.cluster.stats.LongMetric.LongMetricValue; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Tuple; @@ -32,9 +33,13 @@ public class CCSTelemetrySnapshotTests extends AbstractWireSerializingTestCase { private LongMetricValue randomLongMetricValue() { + return randomLongMetricValueBetween(0, 1_000_000); + } + + private LongMetricValue randomLongMetricValueBetween(int low, int high) { LongMetric v = new LongMetric(); for (int i = 0; i < randomIntBetween(5, 10); i++) { - v.record(randomIntBetween(0, 1_000_000)); + v.record(randomIntBetween(low, high)); } return v.getValue(); } @@ -330,4 +335,21 @@ private String readJSONFromResource(String fileName) throws IOException { return new String(inputStream.readAllBytes(), StandardCharsets.UTF_8); } } + + public void testRanges() throws IOException { + var value1 = randomLongMetricValueBetween(1_000_000, 10_000_000); + var count1 = value1.count(); + var max1 = value1.max(); + var output = new BytesStreamOutput(); + value1.writeTo(output); + var value1Read = LongMetricValue.fromStream(output.bytes().streamInput()); + var value2 = randomLongMetricValueBetween(0, 100); + var count2 = value2.count(); + output = new BytesStreamOutput(); + value2.writeTo(output); + var value2Read = LongMetricValue.fromStream(output.bytes().streamInput()); + value2Read.add(value1Read); + assertThat(value2Read.count(), equalTo(count1 + count2)); + assertThat(value2Read.max(), equalTo(max1)); + } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java index 0b9cba837583d..5cf7b438b41ab 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java @@ -42,6 +42,7 @@ import java.io.IOException; import java.io.Reader; +import java.util.Arrays; import java.util.List; import java.util.Map; @@ -250,6 +251,32 @@ public void testFillsAttributes() throws IOException { assertEquals("", tokens.get(3).getType()); } + public void testAnalyzerWithTwoTextsAndNoIndexName() throws IOException { + AnalyzeAction.Request request = new AnalyzeAction.Request(); + + for (String analyzer : Arrays.asList("standard", "simple", "stop", "keyword", "whitespace", "classic")) { + request.analyzer(analyzer); + request.text("a a", "b b"); + + AnalyzeAction.Response analyzeIndex = TransportAnalyzeAction.analyze(request, registry, mockIndexService(), maxTokenCount); + List tokensIndex = analyzeIndex.getTokens(); + + AnalyzeAction.Response analyzeNoIndex = TransportAnalyzeAction.analyze(request, registry, null, maxTokenCount); + List tokensNoIndex = analyzeNoIndex.getTokens(); + + assertEquals(tokensIndex.size(), tokensNoIndex.size()); + for (int i = 0; i < tokensIndex.size(); i++) { + AnalyzeAction.AnalyzeToken withIndex = tokensIndex.get(i); + AnalyzeAction.AnalyzeToken withNoIndex = tokensNoIndex.get(i); + + assertEquals(withIndex.getStartOffset(), withNoIndex.getStartOffset()); + assertEquals(withIndex.getEndOffset(), withNoIndex.getEndOffset()); + assertEquals(withIndex.getPosition(), withNoIndex.getPosition()); + assertEquals(withIndex.getType(), withNoIndex.getType()); + } + } + } + public void testWithIndexAnalyzers() throws IOException { AnalyzeAction.Request request = new AnalyzeAction.Request(); request.text("the quick brown fox"); diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/ReindexDataStreamResponseTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/ReindexDataStreamResponseTests.java new file mode 100644 index 0000000000000..fe839c28aab88 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/datastreams/ReindexDataStreamResponseTests.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.action.datastreams; + +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamResponse; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.equalTo; + +public class ReindexDataStreamResponseTests extends AbstractWireSerializingTestCase { + @Override + protected Writeable.Reader instanceReader() { + return ReindexDataStreamResponse::new; + } + + @Override + protected ReindexDataStreamResponse createTestInstance() { + return new ReindexDataStreamResponse(randomAlphaOfLength(40)); + } + + @Override + protected ReindexDataStreamResponse mutateInstance(ReindexDataStreamResponse instance) { + return createTestInstance(); + } + + public void testToXContent() throws IOException { + ReindexDataStreamResponse response = createTestInstance(); + try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent)) { + builder.humanReadable(true); + response.toXContent(builder, EMPTY_PARAMS); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + assertThat(parser.map(), equalTo(Map.of("task", response.getTaskId()))); + } + } + } +} diff --git a/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java b/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java index 4e6b2b17b2554..61284a49b2502 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java @@ -79,7 +79,7 @@ public void testXContentDeserialization() throws IOException { assertEquals(actualPipelines.size(), parsedPipelines.size()); for (PipelineConfiguration pipeline : parsedPipelines) { assertTrue(pipelinesMap.containsKey(pipeline.getId())); - assertEquals(pipelinesMap.get(pipeline.getId()).getConfigAsMap(), pipeline.getConfigAsMap()); + assertEquals(pipelinesMap.get(pipeline.getId()).getConfig(), pipeline.getConfig()); } } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java b/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java index 0bc5c69d8ad4b..dc1698e3459ec 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java @@ -134,7 +134,12 @@ public void setup() { ); fileSettingsService = spy( - new FileSettingsService(clusterService, mock(ReservedClusterStateService.class), newEnvironment(Settings.EMPTY)) + new FileSettingsService( + clusterService, + mock(ReservedClusterStateService.class), + newEnvironment(Settings.EMPTY), + new FileSettingsService.FileSettingsHealthIndicatorService() + ) ); } diff --git a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java index f8ecdbd062054..725a4583d104a 100644 --- a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java @@ -101,7 +101,7 @@ protected SearchPhase getNextPhase() { @Override protected void executePhaseOnShard( final SearchShardIterator shardIt, - final SearchShardTarget shard, + final Transport.Connection shard, final SearchActionListener listener ) {} diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 762a7e0f47cab..dda20dfb37e9d 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -8,35 +8,65 @@ */ package org.elasticsearch.action.search; +import org.apache.lucene.document.Document; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.store.MockDirectoryWrapper; +import org.apache.lucene.util.Accountable; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.cache.bitset.BitsetFilterCache; +import org.elasticsearch.index.mapper.IdLoader; +import org.elasticsearch.index.mapper.MapperMetrics; +import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSearchResult; +import org.elasticsearch.search.fetch.FetchSubPhase; +import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; import org.elasticsearch.search.fetch.QueryFetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; +import org.elasticsearch.search.fetch.StoredFieldsSpec; +import org.elasticsearch.search.internal.AliasFilter; +import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.query.QuerySearchResult; +import org.elasticsearch.search.query.SearchTimeoutException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; +import org.elasticsearch.test.TestSearchContext; import org.elasticsearch.transport.Transport; +import java.io.IOException; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; @@ -749,4 +779,159 @@ private static void addProfiling(boolean profiled, QuerySearchResult queryResult private static ProfileResult fetchProfile(boolean profiled) { return profiled ? new ProfileResult("fetch", "fetch", Map.of(), Map.of(), FETCH_PROFILE_TIME, List.of()) : null; } + + public void testFetchTimeoutWithPartialResults() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter w = new RandomIndexWriter(random(), dir); + w.addDocument(new Document()); + w.addDocument(new Document()); + w.addDocument(new Document()); + IndexReader r = w.getReader(); + w.close(); + ContextIndexSearcher contextIndexSearcher = createSearcher(r); + try (SearchContext searchContext = createSearchContext(contextIndexSearcher, true)) { + FetchPhase fetchPhase = createFetchPhase(contextIndexSearcher); + fetchPhase.execute(searchContext, new int[] { 0, 1, 2 }, null); + assertTrue(searchContext.queryResult().searchTimedOut()); + assertEquals(1, searchContext.fetchResult().hits().getHits().length); + } finally { + r.close(); + dir.close(); + } + } + + public void testFetchTimeoutNoPartialResults() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter w = new RandomIndexWriter(random(), dir); + w.addDocument(new Document()); + w.addDocument(new Document()); + w.addDocument(new Document()); + IndexReader r = w.getReader(); + w.close(); + ContextIndexSearcher contextIndexSearcher = createSearcher(r); + + try (SearchContext searchContext = createSearchContext(contextIndexSearcher, false)) { + FetchPhase fetchPhase = createFetchPhase(contextIndexSearcher); + expectThrows(SearchTimeoutException.class, () -> fetchPhase.execute(searchContext, new int[] { 0, 1, 2 }, null)); + assertNull(searchContext.fetchResult().hits()); + } finally { + r.close(); + dir.close(); + } + } + + private static ContextIndexSearcher createSearcher(IndexReader reader) throws IOException { + return new ContextIndexSearcher(reader, null, null, new QueryCachingPolicy() { + @Override + public void onUse(Query query) {} + + @Override + public boolean shouldCache(Query query) { + return false; + } + }, randomBoolean()); + } + + private static FetchPhase createFetchPhase(ContextIndexSearcher contextIndexSearcher) { + return new FetchPhase(Collections.singletonList(fetchContext -> new FetchSubPhaseProcessor() { + boolean processCalledOnce = false; + + @Override + public void setNextReader(LeafReaderContext readerContext) {} + + @Override + public void process(FetchSubPhase.HitContext hitContext) { + // we throw only once one doc has been fetched, so we can test partial results are returned + if (processCalledOnce) { + contextIndexSearcher.throwTimeExceededException(); + } else { + processCalledOnce = true; + } + } + + @Override + public StoredFieldsSpec storedFieldsSpec() { + return StoredFieldsSpec.NO_REQUIREMENTS; + } + })); + } + + private static SearchContext createSearchContext(ContextIndexSearcher contextIndexSearcher, boolean allowPartialResults) { + IndexSettings indexSettings = new IndexSettings( + IndexMetadata.builder("index") + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())) + .numberOfShards(1) + .numberOfReplicas(0) + .creationDate(System.currentTimeMillis()) + .build(), + Settings.EMPTY + ); + BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetFilterCache.Listener() { + @Override + public void onCache(ShardId shardId, Accountable accountable) { + + } + + @Override + public void onRemoval(ShardId shardId, Accountable accountable) { + + } + }); + + SearchExecutionContext searchExecutionContext = new SearchExecutionContext( + 0, + 0, + indexSettings, + bitsetFilterCache, + null, + null, + MappingLookup.EMPTY, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + Collections.emptyMap(), + null, + MapperMetrics.NOOP + ); + TestSearchContext searchContext = new TestSearchContext(searchExecutionContext, null, contextIndexSearcher) { + private final FetchSearchResult fetchSearchResult = new FetchSearchResult(); + private final ShardSearchRequest request = new ShardSearchRequest( + OriginalIndices.NONE, + new SearchRequest().allowPartialSearchResults(allowPartialResults), + new ShardId("index", "indexUUID", 0), + 0, + 1, + AliasFilter.EMPTY, + 1f, + 0L, + null + ); + + @Override + public IdLoader newIdLoader() { + return new IdLoader.StoredIdLoader(); + } + + @Override + public FetchSearchResult fetchResult() { + return fetchSearchResult; + } + + @Override + public ShardSearchRequest request() { + return request; + } + }; + searchContext.addReleasable(searchContext.fetchResult()::decRef); + searchContext.setTask(new SearchShardTask(-1, "type", "action", "description", null, Collections.emptyMap())); + return searchContext; + } } diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java index 03c5d0a06f6fb..484b3c6b386fd 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java @@ -147,7 +147,7 @@ public void executeNextPhase(SearchPhase currentPhase, Supplier nex @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection shard, SearchActionListener listener ) { onShardResult(new SearchPhaseResult() { diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index f655136cd4ba4..b4ddd48172d01 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchPhaseResult; -import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.test.ESTestCase; @@ -119,16 +118,15 @@ public void testSkipSearchShards() throws InterruptedException { @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ) { - seenShard.computeIfAbsent(shard.getShardId(), (i) -> { + seenShard.computeIfAbsent(shardIt.shardId(), (i) -> { numRequests.incrementAndGet(); // only count this once per replica return Boolean.TRUE; }); new Thread(() -> { - Transport.Connection connection = getConnection(null, shard.getNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult( new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode() @@ -227,23 +225,22 @@ public void testLimitConcurrentShardRequests() throws InterruptedException { @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ) { - seenShard.computeIfAbsent(shard.getShardId(), (i) -> { + seenShard.computeIfAbsent(shardIt.shardId(), (i) -> { numRequests.incrementAndGet(); // only count this once per shard copy return Boolean.TRUE; }); new Thread(() -> { safeAwait(awaitInitialRequests); - Transport.Connection connection = getConnection(null, shard.getNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult( new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode() ); try { - if (shardFailures[shard.getShardId().id()]) { + if (shardFailures[shardIt.shardId().id()]) { listener.onFailure(new RuntimeException()); } else { listener.onResponse(testSearchPhaseResult); @@ -340,11 +337,11 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ) { - assertTrue("shard: " + shard.getShardId() + " has been queried twice", testResponse.queried.add(shard.getShardId())); - Transport.Connection connection = getConnection(null, shard.getNodeId()); + var shardId = shardIt.shardId(); + assertTrue("shard: " + shardId + " has been queried twice", testResponse.queried.add(shardId)); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult( new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode() @@ -464,13 +461,13 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ) { - assertTrue("shard: " + shard.getShardId() + " has been queried twice", response.queried.add(shard.getShardId())); - Transport.Connection connection = getConnection(null, shard.getNodeId()); + var shardId = shardIt.shardId(); + assertTrue("shard: " + shardId + " has been queried twice", response.queried.add(shardId)); final TestSearchPhaseResult testSearchPhaseResult; - if (shard.getShardId().id() == 0) { + if (shardId.id() == 0) { testSearchPhaseResult = new TestSearchPhaseResult(null, connection.getNode()); } else { testSearchPhaseResult = new TestSearchPhaseResult( @@ -573,15 +570,14 @@ public void testAllowPartialResults() throws InterruptedException { @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ) { - seenShard.computeIfAbsent(shard.getShardId(), (i) -> { + seenShard.computeIfAbsent(shardIt.shardId(), (i) -> { numRequests.incrementAndGet(); // only count this once per shard copy return Boolean.TRUE; }); new Thread(() -> { - Transport.Connection connection = getConnection(null, shard.getNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult( new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode() @@ -673,7 +669,7 @@ public void testSkipUnavailableSearchShards() throws InterruptedException { @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ) { assert false : "Expected to skip all shards"; diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index 51796f404c283..d54ac9c66d9a5 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -639,7 +639,6 @@ public void testMergeAggs() throws InterruptedException { 10000D, count, InternalAggregations.EMPTY, - false, DocValueFormat.RAW ); InternalDateRange range = factory.create(rangeAggName, singletonList(bucket), DocValueFormat.RAW, false, emptyMap()); @@ -1498,15 +1497,7 @@ private SearchHits createSimpleDeterministicSearchHits(String clusterAlias, Inde private static InternalAggregations createDeterminsticAggregation(String maxAggName, String rangeAggName, double value, int count) { Max max = new Max(maxAggName, value, DocValueFormat.RAW, Collections.emptyMap()); InternalDateRange.Factory factory = new InternalDateRange.Factory(); - InternalDateRange.Bucket bucket = factory.createBucket( - "bucket", - 0D, - 10000D, - count, - InternalAggregations.EMPTY, - false, - DocValueFormat.RAW - ); + InternalDateRange.Bucket bucket = factory.createBucket("bucket", 0D, 10000D, count, InternalAggregations.EMPTY, DocValueFormat.RAW); InternalDateRange range = factory.create(rangeAggName, singletonList(bucket), DocValueFormat.RAW, false, emptyMap()); InternalAggregations aggs = InternalAggregations.from(Arrays.asList(range, max)); diff --git a/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymRuleActionTests.java b/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymRuleActionTests.java index a1b9c59571496..303b75098ab67 100644 --- a/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymRuleActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymRuleActionTests.java @@ -26,7 +26,7 @@ public void testEmptyRequestBody() throws Exception { .withParams(Map.of("synonymsSet", "testSet", "synonymRuleId", "testRule")) .build(); - FakeRestChannel channel = new FakeRestChannel(request, true, 0); + FakeRestChannel channel = new FakeRestChannel(request, randomBoolean(), 0); try (var threadPool = createThreadPool()) { final var nodeClient = new NoOpNodeClient(threadPool); expectThrows(IllegalArgumentException.class, () -> action.handleRequest(request, channel, nodeClient)); diff --git a/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymsActionTests.java b/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymsActionTests.java index 4dce73fcf0e89..915c338195c86 100644 --- a/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymsActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymsActionTests.java @@ -26,7 +26,7 @@ public void testEmptyRequestBody() throws Exception { .withParams(Map.of("synonymsSet", "test")) .build(); - FakeRestChannel channel = new FakeRestChannel(request, true, 0); + FakeRestChannel channel = new FakeRestChannel(request, randomBoolean(), 0); try (var threadPool = createThreadPool()) { final var nodeClient = new NoOpNodeClient(threadPool); expectThrows(IllegalArgumentException.class, () -> action.handleRequest(request, channel, nodeClient)); diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java index 4518bd655346a..226f5dbf3b2ff 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java @@ -22,10 +22,14 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.component.Lifecycle; +import org.elasticsearch.common.compress.CompressorFactory; +import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistryTests; +import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -38,6 +42,7 @@ import org.elasticsearch.test.transport.MockTransport; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.BytesTransportRequest; import org.elasticsearch.transport.CloseableConnection; import org.elasticsearch.transport.RemoteTransportException; import org.elasticsearch.transport.TestTransportChannel; @@ -49,6 +54,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.ToXContent; +import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; @@ -155,6 +161,7 @@ public void doRun() { final var joinValidationService = new JoinValidationService( settings, transportService, + writableRegistry(), () -> usually() ? clusterState : null, clusterState::metadata, List.of() @@ -286,7 +293,14 @@ public void writeTo(StreamOutput out) {} ); // registers request handler - new JoinValidationService(Settings.EMPTY, joiningNodeTransportService, () -> clusterState, clusterState::metadata, List.of()); + new JoinValidationService( + Settings.EMPTY, + joiningNodeTransportService, + writableRegistry(), + () -> clusterState, + clusterState::metadata, + List.of() + ); joiningNodeTransportService.start(); joiningNodeTransportService.acceptIncomingRequests(); @@ -325,6 +339,7 @@ protected void onSendRequest(long requestId, String action, TransportRequest req final var joinValidationService = new JoinValidationService( Settings.EMPTY, masterTransportService, + writableRegistry(), () -> clusterState, clusterState::metadata, List.of() @@ -349,7 +364,7 @@ protected void onSendRequest(long requestId, String action, TransportRequest req } } - public void testJoinValidationRejectsMismatchedClusterUUID() { + public void testJoinValidationRejectsMismatchedClusterUUID() throws IOException { final var deterministicTaskQueue = new DeterministicTaskQueue(); final var mockTransport = new MockTransport(); final var localNode = DiscoveryNodeUtils.create("node0"); @@ -371,7 +386,14 @@ public void testJoinValidationRejectsMismatchedClusterUUID() { final var settings = Settings.builder().put(Environment.PATH_DATA_SETTING.getKey(), dataPath).build(); // registers request handler - new JoinValidationService(settings, transportService, () -> localClusterState, localClusterState::metadata, List.of()); + new JoinValidationService( + settings, + transportService, + writableRegistry(), + () -> localClusterState, + localClusterState::metadata, + List.of() + ); transportService.start(); transportService.acceptIncomingRequests(); @@ -384,7 +406,7 @@ public void testJoinValidationRejectsMismatchedClusterUUID() { transportService.sendRequest( localNode, JoinValidationService.JOIN_VALIDATE_ACTION_NAME, - new ValidateJoinRequest(otherClusterState), + serializeClusterState(otherClusterState), new ActionListenerResponseHandler<>(future, in -> TransportResponse.Empty.INSTANCE, TransportResponseHandler.TRANSPORT_WORKER) ); deterministicTaskQueue.runAllTasks(); @@ -401,6 +423,22 @@ public void testJoinValidationRejectsMismatchedClusterUUID() { ); } + private static BytesTransportRequest serializeClusterState(ClusterState clusterState) { + try ( + var bytesStream = new BytesStreamOutput(); + var compressedStream = new OutputStreamStreamOutput( + CompressorFactory.COMPRESSOR.threadLocalOutputStream(Streams.flushOnCloseStream(bytesStream)) + ) + ) { + compressedStream.setTransportVersion(TransportVersion.current()); + clusterState.writeTo(compressedStream); + compressedStream.flush(); + return new BytesTransportRequest(ReleasableBytesReference.wrap(bytesStream.bytes()), TransportVersion.current()); + } catch (Exception e) { + throw new AssertionError(e); + } + } + public void testJoinValidationRunsJoinValidators() { final var deterministicTaskQueue = new DeterministicTaskQueue(); final var mockTransport = new MockTransport(); @@ -420,11 +458,12 @@ public void testJoinValidationRunsJoinValidators() { new JoinValidationService( Settings.EMPTY, transportService, + writableRegistry(), () -> localClusterState, localClusterState::metadata, List.of((node, state) -> { assertSame(node, localNode); - assertSame(state, stateForValidation); + assertEquals(state.stateUUID(), stateForValidation.stateUUID()); throw new IllegalStateException("simulated validation failure"); }) ); // registers request handler @@ -435,7 +474,7 @@ public void testJoinValidationRunsJoinValidators() { transportService.sendRequest( localNode, JoinValidationService.JOIN_VALIDATE_ACTION_NAME, - new ValidateJoinRequest(stateForValidation), + serializeClusterState(stateForValidation), new ActionListenerResponseHandler<>(future, in -> TransportResponse.Empty.INSTANCE, TransportResponseHandler.TRANSPORT_WORKER) ); deterministicTaskQueue.runAllTasks(); @@ -467,9 +506,16 @@ protected void onSendRequest(long requestId, String action, TransportRequest req null, Collections.emptySet() ); - final var joinValidationService = new JoinValidationService(Settings.EMPTY, masterTransportService, () -> null, () -> { - throw new AssertionError("should not be called"); - }, List.of()); + final var joinValidationService = new JoinValidationService( + Settings.EMPTY, + masterTransportService, + writableRegistry(), + () -> null, + () -> { + throw new AssertionError("should not be called"); + }, + List.of() + ); masterTransportService.start(); masterTransportService.acceptIncomingRequests(); diff --git a/server/src/test/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListenerTests.java b/server/src/test/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListenerTests.java deleted file mode 100644 index 00cfac7248da6..0000000000000 --- a/server/src/test/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListenerTests.java +++ /dev/null @@ -1,246 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.features; - -import org.elasticsearch.Version; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.features.NodeFeatures; -import org.elasticsearch.action.admin.cluster.node.features.NodesFeaturesRequest; -import org.elasticsearch.action.admin.cluster.node.features.NodesFeaturesResponse; -import org.elasticsearch.action.admin.cluster.node.features.TransportNodesFeaturesAction; -import org.elasticsearch.client.internal.ClusterAdminClient; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.features.NodeFeaturesFixupListener.NodesFeaturesTask; -import org.elasticsearch.cluster.features.NodeFeaturesFixupListener.NodesFeaturesUpdater; -import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.node.VersionInformation; -import org.elasticsearch.cluster.service.ClusterStateTaskExecutorUtils; -import org.elasticsearch.cluster.service.MasterServiceTaskQueue; -import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.Scheduler; -import org.mockito.ArgumentCaptor; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Executor; - -import static org.elasticsearch.test.LambdaMatchers.transformedMatch; -import static org.hamcrest.Matchers.arrayContainingInAnyOrder; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.ArgumentMatchers.same; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.hamcrest.MockitoHamcrest.argThat; - -public class NodeFeaturesFixupListenerTests extends ESTestCase { - - @SuppressWarnings("unchecked") - private static MasterServiceTaskQueue newMockTaskQueue() { - return mock(MasterServiceTaskQueue.class); - } - - private static DiscoveryNodes nodes(Version... versions) { - var builder = DiscoveryNodes.builder(); - for (int i = 0; i < versions.length; i++) { - builder.add(DiscoveryNodeUtils.create("node" + i, new TransportAddress(TransportAddress.META_ADDRESS, 9200 + i), versions[i])); - } - builder.localNodeId("node0").masterNodeId("node0"); - return builder.build(); - } - - private static DiscoveryNodes nodes(VersionInformation... versions) { - var builder = DiscoveryNodes.builder(); - for (int i = 0; i < versions.length; i++) { - builder.add( - DiscoveryNodeUtils.builder("node" + i) - .address(new TransportAddress(TransportAddress.META_ADDRESS, 9200 + i)) - .version(versions[i]) - .build() - ); - } - builder.localNodeId("node0").masterNodeId("node0"); - return builder.build(); - } - - @SafeVarargs - private static Map> features(Set... nodeFeatures) { - Map> features = new HashMap<>(); - for (int i = 0; i < nodeFeatures.length; i++) { - features.put("node" + i, nodeFeatures[i]); - } - return features; - } - - private static NodesFeaturesResponse getResponse(Map> responseData) { - return new NodesFeaturesResponse( - ClusterName.DEFAULT, - responseData.entrySet() - .stream() - .map( - e -> new NodeFeatures( - e.getValue(), - DiscoveryNodeUtils.create(e.getKey(), new TransportAddress(TransportAddress.META_ADDRESS, 9200)) - ) - ) - .toList(), - List.of() - ); - } - - public void testNothingDoneWhenNothingToFix() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(nodes(Version.CURRENT, Version.CURRENT)) - .nodeFeatures(features(Set.of("f1", "f2"), Set.of("f1", "f2"))) - .build(); - - NodeFeaturesFixupListener listener = new NodeFeaturesFixupListener(taskQueue, client, null, null); - listener.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - - verify(taskQueue, never()).submitTask(anyString(), any(), any()); - } - - public void testFeaturesFixedAfterNewMaster() throws Exception { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - Set features = Set.of("f1", "f2"); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(nodes(Version.CURRENT, Version.CURRENT, Version.CURRENT)) - .nodeFeatures(features(features, Set.of(), Set.of())) - .build(); - - ArgumentCaptor> action = ArgumentCaptor.captor(); - ArgumentCaptor task = ArgumentCaptor.captor(); - - NodeFeaturesFixupListener listener = new NodeFeaturesFixupListener(taskQueue, client, null, null); - listener.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - verify(client).execute( - eq(TransportNodesFeaturesAction.TYPE), - argThat(transformedMatch(NodesFeaturesRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), - action.capture() - ); - - action.getValue().onResponse(getResponse(Map.of("node1", features, "node2", features))); - verify(taskQueue).submitTask(anyString(), task.capture(), any()); - - ClusterState newState = ClusterStateTaskExecutorUtils.executeAndAssertSuccessful( - testState, - new NodesFeaturesUpdater(), - List.of(task.getValue()) - ); - - assertThat(newState.clusterFeatures().allNodeFeatures(), containsInAnyOrder("f1", "f2")); - } - - public void testFeaturesFetchedOnlyForUpdatedNodes() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes( - nodes( - VersionInformation.CURRENT, - VersionInformation.CURRENT, - new VersionInformation(Version.V_8_12_0, IndexVersion.current(), IndexVersion.current()) - ) - ) - .nodeFeatures(features(Set.of("f1", "f2"), Set.of(), Set.of())) - .build(); - - ArgumentCaptor> action = ArgumentCaptor.captor(); - - NodeFeaturesFixupListener listener = new NodeFeaturesFixupListener(taskQueue, client, null, null); - listener.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - verify(client).execute( - eq(TransportNodesFeaturesAction.TYPE), - argThat(transformedMatch(NodesFeaturesRequest::nodesIds, arrayContainingInAnyOrder("node1"))), - action.capture() - ); - } - - public void testConcurrentChangesDoNotOverlap() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - Set features = Set.of("f1", "f2"); - - ClusterState testState1 = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(nodes(Version.CURRENT, Version.CURRENT, Version.CURRENT)) - .nodeFeatures(features(features, Set.of(), Set.of())) - .build(); - - NodeFeaturesFixupListener listeners = new NodeFeaturesFixupListener(taskQueue, client, null, null); - listeners.clusterChanged(new ClusterChangedEvent("test", testState1, ClusterState.EMPTY_STATE)); - verify(client).execute( - eq(TransportNodesFeaturesAction.TYPE), - argThat(transformedMatch(NodesFeaturesRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), - any() - ); - // don't send back the response yet - - ClusterState testState2 = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(nodes(Version.CURRENT, Version.CURRENT, Version.CURRENT)) - .nodeFeatures(features(features, features, Set.of())) - .build(); - // should not send any requests - listeners.clusterChanged(new ClusterChangedEvent("test", testState2, testState1)); - verifyNoMoreInteractions(client); - } - - public void testFailedRequestsAreRetried() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - Scheduler scheduler = mock(Scheduler.class); - Executor executor = mock(Executor.class); - Set features = Set.of("f1", "f2"); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(nodes(Version.CURRENT, Version.CURRENT, Version.CURRENT)) - .nodeFeatures(features(features, Set.of(), Set.of())) - .build(); - - ArgumentCaptor> action = ArgumentCaptor.captor(); - ArgumentCaptor retry = ArgumentCaptor.forClass(Runnable.class); - - NodeFeaturesFixupListener listener = new NodeFeaturesFixupListener(taskQueue, client, scheduler, executor); - listener.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - verify(client).execute( - eq(TransportNodesFeaturesAction.TYPE), - argThat(transformedMatch(NodesFeaturesRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), - action.capture() - ); - - action.getValue().onFailure(new RuntimeException("failure")); - verify(scheduler).schedule(retry.capture(), any(), same(executor)); - - // running the retry should cause another call - retry.getValue().run(); - verify(client, times(2)).execute( - eq(TransportNodesFeaturesAction.TYPE), - argThat(transformedMatch(NodesFeaturesRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), - action.capture() - ); - } -} diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java index 6be5b48f9d723..57c360dc6a92a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java @@ -10,163 +10,90 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.DateMathExpressionResolver; -import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; import org.elasticsearch.test.ESTestCase; -import org.hamcrest.Matchers; import java.time.Instant; import java.time.ZoneId; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; import java.util.Locale; +import java.util.function.LongSupplier; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class DateMathExpressionResolverTests extends ESTestCase { - private final Context context = new Context( - ClusterState.builder(new ClusterName("_name")).build(), - IndicesOptions.strictExpand(), - SystemIndexAccessLevel.NONE - ); + private final long now = randomMillisUpToYear9999(); + private final LongSupplier getTime = () -> now; - private static ZonedDateTime dateFromMillis(long millis) { - return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC); - } + public void testNoDateMathExpression() { + String expression = randomAlphaOfLength(10); + assertThat(DateMathExpressionResolver.resolveExpression(expression, getTime), equalTo(expression)); - private static String formatDate(String pattern, ZonedDateTime zonedDateTime) { - DateTimeFormatter dateFormatter = DateTimeFormatter.ofPattern(pattern, Locale.ROOT); - return dateFormatter.format(zonedDateTime); + expression = "*"; + assertThat(DateMathExpressionResolver.resolveExpression(expression, getTime), equalTo(expression)); } - public void testNormal() throws Exception { - int numIndexExpressions = randomIntBetween(1, 9); - List indexExpressions = new ArrayList<>(numIndexExpressions); - for (int i = 0; i < numIndexExpressions; i++) { - indexExpressions.add(randomAlphaOfLength(10)); - } - List result = DateMathExpressionResolver.resolve(context, indexExpressions); - assertThat(result.size(), equalTo(indexExpressions.size())); - for (int i = 0; i < indexExpressions.size(); i++) { - assertThat(result.get(i), equalTo(indexExpressions.get(i))); - } - } + public void testExpression() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-{now}>", getTime); + assertThat(result, equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); - public void testExpression() throws Exception { - List indexExpressions = Arrays.asList("<.marvel-{now}>", "<.watch_history-{now}>", ""); - List result = DateMathExpressionResolver.resolve(context, indexExpressions); - assertThat(result.size(), equalTo(3)); - assertThat(result.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(1), equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(2), equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + result = DateMathExpressionResolver.resolveExpression("<.watch_history-{now}>", getTime); + assertThat(result, equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); + + result = DateMathExpressionResolver.resolveExpression("", getTime); + assertThat(result, equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); } public void testExpressionWithWildcardAndExclusions() { - List indexExpressions = Arrays.asList( - "<-before-inner-{now}>", - "-", - "", - "<-after-inner-{now}>", - "-" - ); - List result = DateMathExpressionResolver.resolve(context, indexExpressions); - assertThat( - result, - Matchers.contains( - equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - equalTo("-"), // doesn't evaluate because it doesn't start with "<" and it is not an exclusion - equalTo("wild*card-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) + "*"), - equalTo("-after-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - equalTo("-after-outer-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))) - ) - ); - Context noWildcardExpandContext = new Context( - ClusterState.builder(new ClusterName("_name")).build(), - IndicesOptions.strictSingleIndexNoExpandForbidClosed(), - SystemIndexAccessLevel.NONE - ); - result = DateMathExpressionResolver.resolve(noWildcardExpandContext, indexExpressions); - assertThat( - result, - Matchers.contains( - equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - // doesn't evaluate because it doesn't start with "<" and there can't be exclusions without wildcard expansion - equalTo("-"), - equalTo("wild*card-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) + "*"), - equalTo("-after-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - // doesn't evaluate because it doesn't start with "<" and there can't be exclusions without wildcard expansion - equalTo("-") - ) - ); - } + String result = DateMathExpressionResolver.resolveExpression("<-before-inner-{now}>", getTime); + assertThat(result, equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); + + result = DateMathExpressionResolver.resolveExpression("", getTime); + assertThat(result, equalTo("wild*card-" + formatDate("uuuu.MM.dd", dateFromMillis(now)) + "*")); + + result = DateMathExpressionResolver.resolveExpression("<-after-inner-{now}>", getTime); + assertThat(result, equalTo("-after-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); - public void testEmpty() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Collections.emptyList()); - assertThat(result.size(), equalTo(0)); } - public void testExpression_Static() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-test>")); - assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".marvel-test")); + public void testExpression_Static() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-test>", getTime); + assertThat(result, equalTo(".marvel-test")); } - public void testExpression_MultiParts() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.text1-{now/d}-text2-{now/M}>")); - assertThat(result.size(), equalTo(1)); + public void testExpression_MultiParts() { + String result = DateMathExpressionResolver.resolveExpression("<.text1-{now/d}-text2-{now/M}>", getTime); assertThat( - result.get(0), + result, equalTo( ".text1-" - + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) + + formatDate("uuuu.MM.dd", dateFromMillis(now)) + "-text2-" - + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()).withDayOfMonth(1)) + + formatDate("uuuu.MM.dd", dateFromMillis(now).withDayOfMonth(1)) ) ); } - public void testExpression_CustomFormat() throws Exception { - List results = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{yyyy.MM.dd}}>")); - assertThat(results.size(), equalTo(1)); - assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - } - - public void testExpression_EscapeStatic() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar\\{v\\}el-{now/d}>")); - assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + public void testExpression_CustomFormat() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{yyyy.MM.dd}}>", getTime); + assertThat(result, equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); } - public void testExpression_EscapeDateFormat() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{'\\{year\\}'yyyy}}>")); - assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(context.getStartTime())))); + public void testExpression_EscapeStatic() { + String result = DateMathExpressionResolver.resolveExpression("<.mar\\{v\\}el-{now/d}>", getTime); + assertThat(result, equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); } - public void testExpression_MixedArray() throws Exception { - List result = DateMathExpressionResolver.resolve( - context, - Arrays.asList("name1", "<.marvel-{now/d}>", "name2", "<.logstash-{now/M{uuuu.MM}}>") - ); - assertThat(result.size(), equalTo(4)); - assertThat(result.get(0), equalTo("name1")); - assertThat(result.get(1), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(2), equalTo("name2")); - assertThat(result.get(3), equalTo(".logstash-" + formatDate("uuuu.MM", dateFromMillis(context.getStartTime()).withDayOfMonth(1)))); + public void testExpression_EscapeDateFormat() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{'\\{year\\}'yyyy}}>", getTime); + assertThat(result, equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(now)))); } - public void testExpression_CustomTimeZoneInIndexName() throws Exception { + public void testExpression_CustomTimeZoneInIndexName() { ZoneId timeZone; int hoursOffset; int minutesOffset = 0; @@ -194,57 +121,57 @@ public void testExpression_CustomTimeZoneInIndexName() throws Exception { // rounding to today 00:00 now = ZonedDateTime.now(ZoneOffset.UTC).withHour(0).withMinute(0).withSecond(0); } - Context context = new Context( - this.context.getState(), - this.context.getOptions(), - now.toInstant().toEpochMilli(), - SystemIndexAccessLevel.NONE, - name -> false, - name -> false - ); - List results = DateMathExpressionResolver.resolve( - context, - Arrays.asList("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>") + + String result = DateMathExpressionResolver.resolveExpression( + "<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>", + () -> now.toInstant().toEpochMilli() ); - assertThat(results.size(), equalTo(1)); - logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, results.get(0)); - assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone)))); + logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, result); + assertThat(result, equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone)))); } - public void testExpressionInvalidUnescaped() throws Exception { + public void testExpressionInvalidUnescaped() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar}vel-{now/d}>")) + () -> DateMathExpressionResolver.resolveExpression("<.mar}vel-{now/d}>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("invalid character at position [")); } - public void testExpressionInvalidDateMathFormat() throws Exception { + public void testExpressionInvalidDateMathFormat() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}>")) + () -> DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{}>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("date math placeholder is open ended")); } - public void testExpressionInvalidEmptyDateMathFormat() throws Exception { + public void testExpressionInvalidEmptyDateMathFormat() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}}>")) + () -> DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{}}>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("missing date format")); } - public void testExpressionInvalidOpenEnded() throws Exception { + public void testExpressionInvalidOpenEnded() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d>")) + () -> DateMathExpressionResolver.resolveExpression("<.marvel-{now/d>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("date math placeholder is open ended")); } + static ZonedDateTime dateFromMillis(long millis) { + return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC); + } + + static String formatDate(String pattern, ZonedDateTime zonedDateTime) { + DateTimeFormatter dateFormatter = DateTimeFormatter.ofPattern(pattern, Locale.ROOT); + return dateFormatter.format(zonedDateTime); + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeTests.java index 41651d52ceb9f..0e4b8271ceac7 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeTests.java @@ -185,38 +185,6 @@ public void testNodeCPUsRoundUp() { } } - public void testDesiredNodeHasRangeFloatProcessors() { - final var settings = Settings.builder().put(NODE_NAME_SETTING.getKey(), randomAlphaOfLength(10)).build(); - - { - final var desiredNode = new DesiredNode( - settings, - new DesiredNode.ProcessorsRange(0.4, 1.2), - ByteSizeValue.ofGb(1), - ByteSizeValue.ofGb(1) - ); - assertThat(desiredNode.clusterHasRequiredFeatures(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED::equals), is(true)); - assertThat(desiredNode.clusterHasRequiredFeatures(nf -> false), is(false)); - } - - { - final var desiredNode = new DesiredNode( - settings, - randomIntBetween(0, 10) + randomDoubleBetween(0.00001, 0.99999, true), - ByteSizeValue.ofGb(1), - ByteSizeValue.ofGb(1) - ); - assertThat(desiredNode.clusterHasRequiredFeatures(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED::equals), is(true)); - assertThat(desiredNode.clusterHasRequiredFeatures(nf -> false), is(false)); - } - - { - final var desiredNode = new DesiredNode(settings, 2.0f, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1)); - assertThat(desiredNode.clusterHasRequiredFeatures(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED::equals), is(true)); - assertThat(desiredNode.clusterHasRequiredFeatures(nf -> false), is(true)); - } - } - public void testEqualsOrProcessorsCloseTo() { final Settings settings = Settings.builder().put(NODE_NAME_SETTING.getKey(), randomAlphaOfLength(10)).build(); final double maxDelta = 1E-3; diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index 99470918ce063..30895767c33c2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -47,6 +48,7 @@ import java.time.LocalDate; import java.time.ZoneOffset; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -58,6 +60,8 @@ import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createBackingIndex; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createFailureStore; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.newInstance; +import static org.elasticsearch.cluster.metadata.DateMathExpressionResolverTests.dateFromMillis; +import static org.elasticsearch.cluster.metadata.DateMathExpressionResolverTests.formatDate; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_HIDDEN_SETTING; import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.elasticsearch.indices.SystemIndices.EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY; @@ -885,10 +889,7 @@ public void testConcreteIndicesIgnoreIndicesEmptyRequest() { IndicesOptions.lenientExpandOpen(), SystemIndexAccessLevel.NONE ); - assertThat( - newHashSet(indexNameExpressionResolver.concreteIndexNames(context, new String[] {})), - equalTo(newHashSet("kuku", "testXXX")) - ); + assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context)), equalTo(newHashSet("kuku", "testXXX"))); } public void testConcreteIndicesNoIndicesErrorMessage() { @@ -1408,52 +1409,56 @@ public void testConcreteIndicesWildcardNoMatch() { } } - public void testIsAllIndicesNull() throws Exception { + public void testIsAllIndicesNull() { assertThat(IndexNameExpressionResolver.isAllIndices(null), equalTo(true)); } - public void testIsAllIndicesEmpty() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Collections.emptyList()), equalTo(true)); + public void testIsAllIndicesEmpty() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of()), equalTo(true)); + } + + public void testIsAllIndicesExplicitAll() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("_all")), equalTo(true)); } - public void testIsAllIndicesExplicitAll() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("_all")), equalTo(true)); + public void testIsAllIndicesExplicitAllPlusOther() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("_all", "other")), equalTo(false)); } - public void testIsAllIndicesExplicitAllPlusOther() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("_all", "other")), equalTo(false)); + public void testIsNoneIndices() { + assertThat(IndexNameExpressionResolver.isNoneExpression(new String[] { "*", "-*" }), equalTo(true)); } - public void testIsAllIndicesNormalIndexes() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("index1", "index2", "index3")), equalTo(false)); + public void testIsAllIndicesNormalIndexes() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("index1", "index2", "index3")), equalTo(false)); } - public void testIsAllIndicesWildcard() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("*")), equalTo(false)); + public void testIsAllIndicesWildcard() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("*")), equalTo(false)); } - public void testIsExplicitAllIndicesNull() throws Exception { + public void testIsExplicitAllIndicesNull() { assertThat(IndexNameExpressionResolver.isExplicitAllPattern(null), equalTo(false)); } - public void testIsExplicitAllIndicesEmpty() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Collections.emptyList()), equalTo(false)); + public void testIsExplicitAllIndicesEmpty() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of()), equalTo(false)); } - public void testIsExplicitAllIndicesExplicitAll() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("_all")), equalTo(true)); + public void testIsExplicitAllIndicesExplicitAll() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("_all")), equalTo(true)); } - public void testIsExplicitAllIndicesExplicitAllPlusOther() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("_all", "other")), equalTo(false)); + public void testIsExplicitAllIndicesExplicitAllPlusOther() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("_all", "other")), equalTo(false)); } - public void testIsExplicitAllIndicesNormalIndexes() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("index1", "index2", "index3")), equalTo(false)); + public void testIsExplicitAllIndicesNormalIndexes() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("index1", "index2", "index3")), equalTo(false)); } - public void testIsExplicitAllIndicesWildcard() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("*")), equalTo(false)); + public void testIsExplicitAllIndicesWildcard() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("*")), equalTo(false)); } public void testIndexOptionsFailClosedIndicesAndAliases() { @@ -1580,16 +1585,13 @@ public void testResolveExpressions() { .put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1"))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - assertEquals(new HashSet<>(Arrays.asList("alias-0", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "alias-*")); + assertEquals(Set.of("alias-0", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "alias-*")); + assertEquals(Set.of("test-0", "alias-0", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "test-0", "alias-*")); assertEquals( - new HashSet<>(Arrays.asList("test-0", "alias-0", "alias-1")), - indexNameExpressionResolver.resolveExpressions(state, "test-0", "alias-*") - ); - assertEquals( - new HashSet<>(Arrays.asList("test-0", "test-1", "alias-0", "alias-1")), + Set.of("test-0", "test-1", "alias-0", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "test-*", "alias-*") ); - assertEquals(new HashSet<>(Arrays.asList("test-1", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "*-1")); + assertEquals(Set.of("test-1", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "*-1")); } public void testFilteringAliases() { @@ -1598,16 +1600,16 @@ public void testFilteringAliases() { .put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1"))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = new HashSet<>(Arrays.asList("alias-0", "alias-1")); + Set resolvedExpressions = Set.of("alias-0", "alias-1"); String[] strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertArrayEquals(new String[] { "alias-0" }, strings); // concrete index supersedes filtering alias - resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "alias-0", "alias-1")); + resolvedExpressions = Set.of("test-0", "alias-0", "alias-1"); strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertNull(strings); - resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-1", "alias-0", "alias-1")); + resolvedExpressions = Set.of("test-0", "test-1", "alias-0", "alias-1"); strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertNull(strings); } @@ -1742,7 +1744,7 @@ public void testIndexAliasesSkipIdentity() { ); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-alias")); + Set resolvedExpressions = Set.of("test-0", "test-alias"); String[] aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, false, resolvedExpressions); assertNull(aliases); aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); @@ -1769,7 +1771,7 @@ public void testConcreteWriteIndexSuccessful() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-alias")) + Set.of("test-0", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1851,7 +1853,7 @@ public void testConcreteWriteIndexWithWildcardExpansion() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of("test-0", "test-1", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1889,7 +1891,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithSingleIndex() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-alias")) + Set.of("test-0", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1925,7 +1927,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithMultipleIndices() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of("test-0", "test-1", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1966,7 +1968,7 @@ public void testAliasResolutionNotAllowingMultipleIndices() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of("test-0", "test-1", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -2328,40 +2330,40 @@ public void testFullWildcardSystemIndexResolutionWithExpandHiddenAllowed() { SearchRequest request = new SearchRequest(randomFrom("*", "_all")); request.indicesOptions(IndicesOptions.strictExpandHidden()); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); } public void testWildcardSystemIndexResolutionMultipleMatchesAllowed() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".w*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".watches")); } public void testWildcardSystemIndexResolutionSingleMatchAllowed() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta", ".ml-stuff")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".ml-meta", ".ml-stuff")); } public void testSingleSystemIndexResolutionAllowed() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-meta"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".ml-meta")); } public void testFullWildcardSystemIndicesAreHidden() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(randomFrom("*", "_all")); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder("some-other-index")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining("some-other-index")); } public void testFullWildcardSystemIndexResolutionDeprecated() { @@ -2370,8 +2372,8 @@ public void testFullWildcardSystemIndexResolutionDeprecated() { SearchRequest request = new SearchRequest(randomFrom("*", "_all")); request.indicesOptions(IndicesOptions.strictExpandHidden()); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); assertWarnings( true, new DeprecationWarning( @@ -2388,8 +2390,8 @@ public void testSingleSystemIndexResolutionDeprecated() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-meta"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".ml-meta")); assertWarnings( true, new DeprecationWarning( @@ -2405,8 +2407,8 @@ public void testWildcardSystemIndexResolutionSingleMatchDeprecated() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".w*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".watches")); assertWarnings( true, new DeprecationWarning( @@ -2423,8 +2425,8 @@ public void testWildcardSystemIndexResolutionMultipleMatchesDeprecated() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta", ".ml-stuff")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".ml-meta", ".ml-stuff")); assertWarnings( true, new DeprecationWarning( @@ -2479,8 +2481,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.FALSE.toString()); SearchRequest request = new SearchRequest(".external-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings( true, new DeprecationWarning( @@ -2496,8 +2498,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.FALSE.toString()); SearchRequest request = new SearchRequest(".external-sys-idx"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings( true, new DeprecationWarning( @@ -2515,8 +2517,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "stack-component"); SearchRequest request = new SearchRequest(".external-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -2526,8 +2528,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "stack-component"); SearchRequest request = new SearchRequest(".external-sys-idx"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -2538,8 +2540,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "other"); SearchRequest request = new SearchRequest(".external-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -2549,8 +2551,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "other"); SearchRequest request = new SearchRequest(".external-sys-idx"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -3073,7 +3075,6 @@ public void testDataStreamsWithWildcardExpression() { assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream1, 2, epochMillis))); assertThat(result[2].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 1, epochMillis))); assertThat(result[3].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 2, epochMillis))); - ; } { IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN; @@ -3239,6 +3240,37 @@ public void testDataStreamsNames() { assertThat(names, empty()); } + public void testDateMathMixedArray() { + long now = System.currentTimeMillis(); + String dataMathIndex1 = ".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(now)); + String dateMathIndex2 = ".logstash-" + formatDate("uuuu.MM", dateFromMillis(now).withDayOfMonth(1)); + IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( + ClusterState.builder(new ClusterName("_name")) + .metadata( + Metadata.builder() + .put(indexBuilder("name1")) + .put(indexBuilder("name2")) + .put(indexBuilder(dataMathIndex1)) + .put(indexBuilder(dateMathIndex2)) + ) + .build(), + IndicesOptions.strictExpand(), + now, + SystemIndexAccessLevel.NONE, + Predicates.never(), + Predicates.never() + ); + Collection result = IndexNameExpressionResolver.resolveExpressionsToResources( + context, + "name1", + "<.marvel-{now/d}>", + "name2", + "<.logstash-{now/M{uuuu.MM}}>" + ); + assertThat(result.size(), equalTo(4)); + assertThat(result, contains("name1", dataMathIndex1, "name2", dateMathIndex2)); + } + public void testMathExpressionSupport() { Instant instant = LocalDate.of(2021, 01, 11).atStartOfDay().toInstant(ZoneOffset.UTC); String resolved = IndexNameExpressionResolver.resolveDateMathExpression("", instant.toEpochMilli()); @@ -3418,10 +3450,6 @@ private ClusterState systemIndexTestClusterState() { return ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); } - private List resolveConcreteIndexNameList(ClusterState state, SearchRequest request) { - return Arrays.stream(indexNameExpressionResolver.concreteIndices(state, request)).map(Index::getName).toList(); - } - private static IndexMetadata.Builder indexBuilder(String index, Settings additionalSettings) { return IndexMetadata.builder(index).settings(indexSettings(IndexVersion.current(), 1, 0).put(additionalSettings)); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java index 982394ca31b1c..6a26e7948784c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java @@ -13,23 +13,20 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata.State; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; import org.elasticsearch.test.ESTestCase; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.List; +import java.util.Set; import java.util.function.Predicate; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createBackingIndex; import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; public class WildcardExpressionResolverTests extends ESTestCase { @@ -50,107 +47,31 @@ public void testConvertWildcardsJustIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testXXX"))), - equalTo(newHashSet("testXXX")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "ku*")), + equalTo(newHashSet("kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "testYYY"))), - equalTo(newHashSet("testXXX", "testYYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "ku*"))), - equalTo(newHashSet("testXXX", "kuku")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*")), equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXXX", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testX*", "kuku"))), - equalTo(newHashSet("testXXX", "testXYY", "kuku")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*")), equalTo(newHashSet("testXXX", "testXYY", "testYYY", "kuku")) ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("*", "-kuku"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - assertThat( - newHashSet( - IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - context, - Arrays.asList("testX*", "-doe", "-testXXX", "-testYYY") - ) - ), - equalTo(newHashSet("testXYY")) - ); - if (indicesOptions == IndicesOptions.lenientExpandOpen()) { - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "-testXXX"))), - equalTo(newHashSet("testXXX", "-testXXX")) - ); - } else if (indicesOptions == IndicesOptions.strictExpandOpen()) { - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.resolveExpressions(context, "testXXX", "-testXXX") - ); - assertEquals("-testXXX", infe.getIndex().getName()); - } - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "-testX*"))), - equalTo(newHashSet("testXXX")) - ); - } - - public void testConvertWildcardsTests() { - Metadata.Builder mdBuilder = Metadata.builder() - .put(indexBuilder("testXXX").putAlias(AliasMetadata.builder("alias1")).putAlias(AliasMetadata.builder("alias2"))) - .put(indexBuilder("testXYY").putAlias(AliasMetadata.builder("alias2"))) - .put(indexBuilder("testYYY").putAlias(AliasMetadata.builder("alias3"))) - .put(indexBuilder("kuku")); - ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - - IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( - state, - IndicesOptions.lenientExpandOpen(), - SystemIndexAccessLevel.NONE - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testYY*", "alias*"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("-kuku"))), - equalTo(newHashSet("-kuku")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("test*", "-testYYY"))), - equalTo(newHashSet("testXXX", "testXYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testX*", "testYYY"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testYYY", "testX*"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); } public void testConvertWildcardsOpenClosedIndicesTests() { Metadata.Builder mdBuilder = Metadata.builder() - .put(indexBuilder("testXXX").state(IndexMetadata.State.OPEN)) - .put(indexBuilder("testXXY").state(IndexMetadata.State.OPEN)) - .put(indexBuilder("testXYY").state(IndexMetadata.State.CLOSE)) - .put(indexBuilder("testYYY").state(IndexMetadata.State.OPEN)) - .put(indexBuilder("testYYX").state(IndexMetadata.State.CLOSE)) - .put(indexBuilder("kuku").state(IndexMetadata.State.OPEN)); + .put(indexBuilder("testXXX").state(State.OPEN)) + .put(indexBuilder("testXXY").state(State.OPEN)) + .put(indexBuilder("testXYY").state(State.CLOSE)) + .put(indexBuilder("testYYY").state(State.OPEN)) + .put(indexBuilder("testYYX").state(State.CLOSE)) + .put(indexBuilder("kuku").state(State.OPEN)); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( @@ -159,7 +80,7 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXXX", "testXXY", "testXYY")) ); context = new IndexNameExpressionResolver.Context( @@ -168,7 +89,7 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXYY")) ); context = new IndexNameExpressionResolver.Context( @@ -177,26 +98,9 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXXX", "testXXY")) ); - context = new IndexNameExpressionResolver.Context( - state, - IndicesOptions.fromOptions(true, true, false, false), - SystemIndexAccessLevel.NONE - ); - assertThat(IndexNameExpressionResolver.resolveExpressions(context, "testX*").size(), equalTo(0)); - context = new IndexNameExpressionResolver.Context( - state, - IndicesOptions.fromOptions(false, true, false, false), - SystemIndexAccessLevel.NONE - ); - IndexNameExpressionResolver.Context finalContext = context; - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.resolveExpressions(finalContext, "testX*") - ); - assertThat(infe.getIndex().getName(), is("testX*")); } // issue #13334 @@ -217,28 +121,27 @@ public void testMultipleWildcards() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*X*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*X*")), equalTo(newHashSet("testXXX", "testXXY", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*X*Y"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*X*Y")), equalTo(newHashSet("testXXY", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("kuku*Y*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "kuku*Y*")), equalTo(newHashSet("kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*Y*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*Y*")), equalTo(newHashSet("testXXY", "testXYY", "testYYY", "kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*Y*X"))) - .size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*Y*X")).size(), equalTo(0) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*Y*X"))).size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*Y*X")).size(), equalTo(0) ); } @@ -259,26 +162,6 @@ public void testAll() { newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); - assertThat( - newHashSet(IndexNameExpressionResolver.resolveExpressions(context, "_all")), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - IndicesOptions noExpandOptions = IndicesOptions.fromOptions( - randomBoolean(), - true, - false, - false, - randomBoolean(), - randomBoolean(), - randomBoolean(), - randomBoolean() - ); - IndexNameExpressionResolver.Context noExpandContext = new IndexNameExpressionResolver.Context( - state, - noExpandOptions, - SystemIndexAccessLevel.NONE - ); - assertThat(IndexNameExpressionResolver.resolveExpressions(noExpandContext, "_all").size(), equalTo(0)); } public void testAllAliases() { @@ -506,112 +389,47 @@ public void testResolveAliases() { ); { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, - Collections.singletonList("foo_a*") + "foo_a*" ); assertThat(indices, containsInAnyOrder("foo_index", "bar_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( skipAliasesLenientContext, - Collections.singletonList("foo_a*") + "foo_a*" ); assertEquals(0, indices.size()); } { - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - skipAliasesStrictContext, - Collections.singletonList("foo_a*") - ) + Set indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( + skipAliasesStrictContext, + "foo_a*" ); - assertEquals("foo_a*", infe.getIndex().getName()); + assertThat(indices, empty()); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, - Collections.singletonList("foo*") + "foo*" ); assertThat(indices, containsInAnyOrder("foo_foo", "foo_index", "bar_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( skipAliasesLenientContext, - Collections.singletonList("foo*") + "foo*" ); assertThat(indices, containsInAnyOrder("foo_foo", "foo_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( skipAliasesStrictContext, - Collections.singletonList("foo*") + "foo*" ); assertThat(indices, containsInAnyOrder("foo_foo", "foo_index")); } - { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - indicesAndAliasesContext, - Collections.singletonList("foo_alias") - ); - assertThat(indices, containsInAnyOrder("foo_alias")); - } - { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - skipAliasesLenientContext, - Collections.singletonList("foo_alias") - ); - assertThat(indices, containsInAnyOrder("foo_alias")); - } - { - IllegalArgumentException iae = expectThrows( - IllegalArgumentException.class, - () -> IndexNameExpressionResolver.resolveExpressions(skipAliasesStrictContext, "foo_alias") - ); - assertEquals( - "The provided expression [foo_alias] matches an alias, specify the corresponding concrete indices instead.", - iae.getMessage() - ); - } - IndicesOptions noExpandNoAliasesIndicesOptions = IndicesOptions.fromOptions(true, false, false, false, true, false, true, false); - IndexNameExpressionResolver.Context noExpandNoAliasesContext = new IndexNameExpressionResolver.Context( - state, - noExpandNoAliasesIndicesOptions, - SystemIndexAccessLevel.NONE - ); - { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - noExpandNoAliasesContext, - List.of("foo_alias") - ); - assertThat(indices, containsInAnyOrder("foo_alias")); - } - IndicesOptions strictNoExpandNoAliasesIndicesOptions = IndicesOptions.fromOptions( - false, - true, - false, - false, - true, - false, - true, - false - ); - IndexNameExpressionResolver.Context strictNoExpandNoAliasesContext = new IndexNameExpressionResolver.Context( - state, - strictNoExpandNoAliasesIndicesOptions, - SystemIndexAccessLevel.NONE - ); - { - IllegalArgumentException iae = expectThrows( - IllegalArgumentException.class, - () -> IndexNameExpressionResolver.resolveExpressions(strictNoExpandNoAliasesContext, "foo_alias") - ); - assertEquals( - "The provided expression [foo_alias] matches an alias, specify the corresponding concrete indices instead.", - iae.getMessage() - ); - } } public void testResolveDataStreams() { @@ -654,17 +472,14 @@ public void testResolveDataStreams() { ); // data streams are not included but expression matches the data stream - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, - Collections.singletonList("foo_*") + "foo_*" ); assertThat(indices, containsInAnyOrder("foo_index", "foo_foo", "bar_index")); // data streams are not included and expression doesn't match the data steram - indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - indicesAndAliasesContext, - Collections.singletonList("bar_*") - ); + indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(indicesAndAliasesContext, "bar_*"); assertThat(indices, containsInAnyOrder("bar_bar", "bar_index")); } @@ -691,9 +506,9 @@ public void testResolveDataStreams() { ); // data stream's corresponding backing indices are resolved - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesAndDataStreamsContext, - Collections.singletonList("foo_*") + "foo_*" ); assertThat( indices, @@ -707,9 +522,9 @@ public void testResolveDataStreams() { ); // include all wildcard adds the data stream's backing indices - indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesAndDataStreamsContext, - Collections.singletonList("*") + "*" ); assertThat( indices, @@ -748,9 +563,9 @@ public void testResolveDataStreams() { ); // data stream's corresponding backing indices are resolved - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesDataStreamsAndHiddenIndices, - Collections.singletonList("foo_*") + "foo_*" ); assertThat( indices, @@ -764,9 +579,9 @@ public void testResolveDataStreams() { ); // include all wildcard adds the data stream's backing indices - indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesDataStreamsAndHiddenIndices, - Collections.singletonList("*") + "*" ); assertThat( indices, @@ -808,24 +623,17 @@ public void testMatchesConcreteIndicesWildcardAndAliases() { SystemIndexAccessLevel.NONE ); - Collection matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("*")); + Collection matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( + indicesAndAliasesContext, + "*" + ); assertThat(matches, containsInAnyOrder("bar_bar", "foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of("*")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(onlyIndicesContext, "*"); assertThat(matches, containsInAnyOrder("bar_bar", "foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("foo*")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(indicesAndAliasesContext, "foo*"); assertThat(matches, containsInAnyOrder("foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of("foo*")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(onlyIndicesContext, "foo*"); assertThat(matches, containsInAnyOrder("foo_foo", "foo_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("foo_alias")); - assertThat(matches, containsInAnyOrder("foo_alias")); - IllegalArgumentException iae = expectThrows( - IllegalArgumentException.class, - () -> IndexNameExpressionResolver.resolveExpressions(onlyIndicesContext, "foo_alias") - ); - assertThat( - iae.getMessage(), - containsString("The provided expression [foo_alias] matches an alias, specify the corresponding concrete indices instead") - ); } private static IndexMetadata.Builder indexBuilder(String index, boolean hidden) { @@ -838,10 +646,6 @@ private static IndexMetadata.Builder indexBuilder(String index) { } private static void assertWildcardResolvesToEmpty(IndexNameExpressionResolver.Context context, String wildcardExpression) { - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, List.of(wildcardExpression)) - ); - assertEquals(wildcardExpression, infe.getIndex().getName()); + assertThat(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, wildcardExpression), empty()); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java index 21b30557cafea..e5786b1b3449e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.routing; +import org.elasticsearch.TransportVersion; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; @@ -19,6 +20,7 @@ import java.util.List; +import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO_2; import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; @@ -27,16 +29,22 @@ public class IndexRoutingTableTests extends ESTestCase { public void testReadyForSearch() { - innerReadyForSearch(false); - innerReadyForSearch(true); + innerReadyForSearch(false, false); + innerReadyForSearch(false, true); + innerReadyForSearch(true, false); + innerReadyForSearch(true, true); } - private void innerReadyForSearch(boolean fastRefresh) { + // TODO: remove if (fastRefresh && beforeFastRefreshRCO) branches (ES-9563) + private void innerReadyForSearch(boolean fastRefresh, boolean beforeFastRefreshRCO) { Index index = new Index(randomIdentifier(), UUIDs.randomBase64UUID()); ClusterState clusterState = mock(ClusterState.class, Mockito.RETURNS_DEEP_STUBS); when(clusterState.metadata().index(any(Index.class)).getSettings()).thenReturn( Settings.builder().put(INDEX_FAST_REFRESH_SETTING.getKey(), fastRefresh).build() ); + when(clusterState.getMinTransportVersion()).thenReturn( + beforeFastRefreshRCO ? TransportVersion.fromId(FAST_REFRESH_RCO_2.id() - 1_00_0) : TransportVersion.current() + ); // 2 primaries that are search and index ShardId p1 = new ShardId(index, 0); IndexShardRoutingTable shardTable1 = new IndexShardRoutingTable( @@ -55,7 +63,7 @@ private void innerReadyForSearch(boolean fastRefresh) { shardTable1 = new IndexShardRoutingTable(p1, List.of(getShard(p1, true, ShardRoutingState.STARTED, ShardRouting.Role.INDEX_ONLY))); shardTable2 = new IndexShardRoutingTable(p2, List.of(getShard(p2, true, ShardRoutingState.STARTED, ShardRouting.Role.INDEX_ONLY))); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh) { + if (fastRefresh && beforeFastRefreshRCO) { assertTrue(indexRoutingTable.readyForSearch(clusterState)); } else { assertFalse(indexRoutingTable.readyForSearch(clusterState)); @@ -91,7 +99,7 @@ private void innerReadyForSearch(boolean fastRefresh) { ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh) { + if (fastRefresh && beforeFastRefreshRCO) { assertTrue(indexRoutingTable.readyForSearch(clusterState)); } else { assertFalse(indexRoutingTable.readyForSearch(clusterState)); @@ -118,8 +126,6 @@ private void innerReadyForSearch(boolean fastRefresh) { assertTrue(indexRoutingTable.readyForSearch(clusterState)); // 2 unassigned primaries that are index only with some replicas that are all available - // Fast refresh indices do not support replicas so this can not practically happen. If we add support we will want to ensure - // that readyForSearch allows for searching replicas when the index shard is not available. shardTable1 = new IndexShardRoutingTable( p1, List.of( @@ -137,8 +143,8 @@ private void innerReadyForSearch(boolean fastRefresh) { ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh) { - assertFalse(indexRoutingTable.readyForSearch(clusterState)); // if we support replicas for fast refreshes this needs to change + if (fastRefresh && beforeFastRefreshRCO) { + assertFalse(indexRoutingTable.readyForSearch(clusterState)); } else { assertTrue(indexRoutingTable.readyForSearch(clusterState)); } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java index 3e33b03663a5c..5d1b89ac63db0 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionTestUtils; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterInfo; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -57,6 +58,7 @@ import org.elasticsearch.test.MockLog; import org.elasticsearch.threadpool.TestThreadPool; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Queue; @@ -79,6 +81,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.sameInstance; @@ -918,6 +921,77 @@ public void resetDesiredBalance() { } } + public void testNotReconcileEagerlyForEmptyRoutingTable() { + final var threadPool = new TestThreadPool(getTestName()); + final var clusterService = ClusterServiceUtils.createClusterService(ClusterState.EMPTY_STATE, threadPool); + final var clusterSettings = createBuiltInClusterSettings(); + final var shardsAllocator = createShardsAllocator(); + final var reconciliationTaskSubmitted = new AtomicBoolean(); + final var desiredBalanceShardsAllocator = new DesiredBalanceShardsAllocator( + shardsAllocator, + threadPool, + clusterService, + new DesiredBalanceComputer(clusterSettings, TimeProviderUtils.create(() -> 1L), shardsAllocator) { + @Override + public DesiredBalance compute( + DesiredBalance previousDesiredBalance, + DesiredBalanceInput desiredBalanceInput, + Queue> pendingDesiredBalanceMoves, + Predicate isFresh + ) { + assertThat(previousDesiredBalance, sameInstance(DesiredBalance.INITIAL)); + return new DesiredBalance(desiredBalanceInput.index(), Map.of()); + } + }, + (clusterState, rerouteStrategy) -> null, + TelemetryProvider.NOOP, + EMPTY_NODE_ALLOCATION_STATS + ) { + + private ActionListener lastListener; + + @Override + public void allocate(RoutingAllocation allocation, ActionListener listener) { + lastListener = listener; + super.allocate(allocation, listener); + } + + @Override + protected void reconcile(DesiredBalance desiredBalance, RoutingAllocation allocation) { + fail("should not call reconcile"); + } + + @Override + protected void submitReconcileTask(DesiredBalance desiredBalance) { + assertThat(desiredBalance.lastConvergedIndex(), equalTo(0L)); + reconciliationTaskSubmitted.set(true); + lastListener.onResponse(null); + } + }; + assertThat(desiredBalanceShardsAllocator.getDesiredBalance(), sameInstance(DesiredBalance.INITIAL)); + try { + final PlainActionFuture future = new PlainActionFuture<>(); + desiredBalanceShardsAllocator.allocate( + new RoutingAllocation( + new AllocationDeciders(Collections.emptyList()), + clusterService.state(), + null, + null, + randomNonNegativeLong() + ), + future + ); + safeGet(future); + assertThat(desiredBalanceShardsAllocator.getStats().computationSubmitted(), equalTo(1L)); + assertThat(desiredBalanceShardsAllocator.getStats().computationExecuted(), equalTo(1L)); + assertThat(reconciliationTaskSubmitted.get(), is(true)); + assertThat(desiredBalanceShardsAllocator.getDesiredBalance().lastConvergedIndex(), equalTo(0L)); + } finally { + clusterService.close(); + terminate(threadPool); + } + } + private static IndexMetadata createIndex(String name) { return IndexMetadata.builder(name).settings(indexSettings(IndexVersion.current(), 1, 0)).build(); } diff --git a/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java b/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java deleted file mode 100644 index 9eec8309bbb83..0000000000000 --- a/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java +++ /dev/null @@ -1,313 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.service; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; -import org.elasticsearch.Version; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.client.internal.ClusterAdminClient; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.service.TransportVersionsFixupListener.NodeTransportVersionTask; -import org.elasticsearch.cluster.version.CompatibilityVersions; -import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.common.util.Maps; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.indices.SystemIndexDescriptor; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.Scheduler; -import org.mockito.ArgumentCaptor; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Executor; - -import static java.util.Map.entry; -import static org.elasticsearch.test.LambdaMatchers.transformedMatch; -import static org.hamcrest.Matchers.arrayContainingInAnyOrder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.everyItem; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.same; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.hamcrest.MockitoHamcrest.argThat; - -public class TransportVersionsFixupListenerTests extends ESTestCase { - - private static final Version NEXT_VERSION = Version.V_8_8_1; - private static final TransportVersion NEXT_TRANSPORT_VERSION = TransportVersion.fromId(NEXT_VERSION.id); - - @SuppressWarnings("unchecked") - private static MasterServiceTaskQueue newMockTaskQueue() { - return mock(MasterServiceTaskQueue.class); - } - - private static DiscoveryNodes node(Version... versions) { - var builder = DiscoveryNodes.builder(); - for (int i = 0; i < versions.length; i++) { - builder.add(DiscoveryNodeUtils.create("node" + i, new TransportAddress(TransportAddress.META_ADDRESS, 9200 + i), versions[i])); - } - builder.localNodeId("node0").masterNodeId("node0"); - return builder.build(); - } - - @SafeVarargs - private static Map versions(T... versions) { - Map tvs = new HashMap<>(); - for (int i = 0; i < versions.length; i++) { - tvs.put("node" + i, versions[i]); - } - return tvs; - } - - private static NodesInfoResponse getResponse(Map responseData) { - return new NodesInfoResponse( - ClusterName.DEFAULT, - responseData.entrySet() - .stream() - .map( - e -> new NodeInfo( - "", - e.getValue(), - null, - null, - null, - DiscoveryNodeUtils.create(e.getKey(), new TransportAddress(TransportAddress.META_ADDRESS, 9200)), - null, - null, - null, - null, - null, - null, - null, - null, - null, - null, - null, - null - ) - ) - .toList(), - List.of() - ); - } - - public void testNothingFixedWhenNothingToInfer() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(Version.V_8_8_0)) - .nodeIdsToCompatibilityVersions(versions(new CompatibilityVersions(TransportVersions.V_8_8_0, Map.of()))) - .build(); - - TransportVersionsFixupListener listeners = new TransportVersionsFixupListener( - taskQueue, - client, - new FeatureService(List.of(new TransportFeatures())), - null, - null - ); - listeners.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - - verify(taskQueue, never()).submitTask(anyString(), any(), any()); - } - - public void testNothingFixedWhenOnNextVersion() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(NEXT_VERSION)) - .nodeIdsToCompatibilityVersions(versions(new CompatibilityVersions(NEXT_TRANSPORT_VERSION, Map.of()))) - .build(); - - TransportVersionsFixupListener listeners = new TransportVersionsFixupListener( - taskQueue, - client, - new FeatureService(List.of(new TransportFeatures())), - null, - null - ); - listeners.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - - verify(taskQueue, never()).submitTask(anyString(), any(), any()); - } - - public void testNothingFixedWhenOnPreviousVersion() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(Version.V_8_7_0, Version.V_8_8_0)) - .nodeIdsToCompatibilityVersions( - Maps.transformValues( - versions(TransportVersions.V_8_7_0, TransportVersions.V_8_8_0), - transportVersion -> new CompatibilityVersions(transportVersion, Map.of()) - ) - ) - .build(); - - TransportVersionsFixupListener listeners = new TransportVersionsFixupListener( - taskQueue, - client, - new FeatureService(List.of(new TransportFeatures())), - null, - null - ); - listeners.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - - verify(taskQueue, never()).submitTask(anyString(), any(), any()); - } - - @SuppressWarnings("unchecked") - public void testVersionsAreFixed() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(NEXT_VERSION, NEXT_VERSION, NEXT_VERSION)) - .nodeIdsToCompatibilityVersions( - Maps.transformValues( - versions(NEXT_TRANSPORT_VERSION, TransportVersions.V_8_8_0, TransportVersions.V_8_8_0), - transportVersion -> new CompatibilityVersions(transportVersion, Map.of()) - ) - ) - .build(); - - ArgumentCaptor> action = ArgumentCaptor.forClass(ActionListener.class); - ArgumentCaptor task = ArgumentCaptor.forClass(NodeTransportVersionTask.class); - - TransportVersionsFixupListener listeners = new TransportVersionsFixupListener( - taskQueue, - client, - new FeatureService(List.of(new TransportFeatures())), - null, - null - ); - listeners.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - verify(client).nodesInfo( - argThat(transformedMatch(NodesInfoRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), - action.capture() - ); - action.getValue() - .onResponse( - getResponse( - Map.ofEntries( - entry("node1", new CompatibilityVersions(NEXT_TRANSPORT_VERSION, Map.of())), - entry("node2", new CompatibilityVersions(NEXT_TRANSPORT_VERSION, Map.of())) - ) - ) - ); - verify(taskQueue).submitTask(anyString(), task.capture(), any()); - - assertThat(task.getValue().results().keySet(), equalTo(Set.of("node1", "node2"))); - assertThat(task.getValue().results().values(), everyItem(equalTo(NEXT_TRANSPORT_VERSION))); - } - - public void testConcurrentChangesDoNotOverlap() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState1 = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(NEXT_VERSION, NEXT_VERSION, NEXT_VERSION)) - .nodeIdsToCompatibilityVersions( - Maps.transformValues( - versions(NEXT_TRANSPORT_VERSION, TransportVersions.V_8_8_0, TransportVersions.V_8_8_0), - transportVersion -> new CompatibilityVersions(transportVersion, Map.of()) - ) - ) - .build(); - - TransportVersionsFixupListener listeners = new TransportVersionsFixupListener( - taskQueue, - client, - new FeatureService(List.of(new TransportFeatures())), - null, - null - ); - listeners.clusterChanged(new ClusterChangedEvent("test", testState1, ClusterState.EMPTY_STATE)); - verify(client).nodesInfo(argThat(transformedMatch(NodesInfoRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), any()); - // don't send back the response yet - - ClusterState testState2 = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(NEXT_VERSION, NEXT_VERSION, NEXT_VERSION)) - .nodeIdsToCompatibilityVersions( - Maps.transformValues( - versions(NEXT_TRANSPORT_VERSION, NEXT_TRANSPORT_VERSION, TransportVersions.V_8_8_0), - transportVersion -> new CompatibilityVersions(transportVersion, Map.of()) - ) - ) - .build(); - // should not send any requests - listeners.clusterChanged(new ClusterChangedEvent("test", testState2, testState1)); - verifyNoMoreInteractions(client); - } - - @SuppressWarnings("unchecked") - public void testFailedRequestsAreRetried() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - Scheduler scheduler = mock(Scheduler.class); - Executor executor = mock(Executor.class); - - var compatibilityVersions = new CompatibilityVersions( - TransportVersion.current(), - Map.of(".system-index-1", new SystemIndexDescriptor.MappingsVersion(1, 1234)) - ); - ClusterState testState1 = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(Version.CURRENT, Version.CURRENT, Version.CURRENT)) - .nodeIdsToCompatibilityVersions( - Map.ofEntries( - entry("node0", compatibilityVersions), - entry("node1", new CompatibilityVersions(TransportVersions.V_8_8_0, Map.of())), - entry("node2", new CompatibilityVersions(TransportVersions.V_8_8_0, Map.of())) - ) - ) - .build(); - - ArgumentCaptor> action = ArgumentCaptor.forClass(ActionListener.class); - ArgumentCaptor retry = ArgumentCaptor.forClass(Runnable.class); - - TransportVersionsFixupListener listeners = new TransportVersionsFixupListener( - taskQueue, - client, - new FeatureService(List.of(new TransportFeatures())), - scheduler, - executor - ); - listeners.clusterChanged(new ClusterChangedEvent("test", testState1, ClusterState.EMPTY_STATE)); - verify(client, times(1)).nodesInfo(any(), action.capture()); - // do response immediately - action.getValue().onFailure(new RuntimeException("failure")); - verify(scheduler).schedule(retry.capture(), any(), same(executor)); - - // running retry should cause another check - retry.getValue().run(); - verify(client, times(2)).nodesInfo( - argThat(transformedMatch(NodesInfoRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), - any() - ); - } -} diff --git a/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java b/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java index ad298e7aa8307..3fd8535cd5c27 100644 --- a/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java +++ b/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java @@ -107,4 +107,10 @@ public void testGetDoubleLE() { Exception e = expectThrows(ArrayIndexOutOfBoundsException.class, () -> ref.getDoubleLE(9)); assertThat(e.getMessage(), equalTo("Index 9 out of bounds for length 9")); } + + public void testCopyBytes() { + var data = randomByteArrayOfLength(between(1024, 1024 * 1024 * 50)); + var copy = BytesReference.copyBytes(new BytesArray(data)); + assertArrayEquals(data, BytesReference.toBytes(copy)); + } } diff --git a/server/src/test/java/org/elasticsearch/features/FeatureServiceTests.java b/server/src/test/java/org/elasticsearch/features/FeatureServiceTests.java index e103704c89649..874a6a96313e4 100644 --- a/server/src/test/java/org/elasticsearch/features/FeatureServiceTests.java +++ b/server/src/test/java/org/elasticsearch/features/FeatureServiceTests.java @@ -9,15 +9,9 @@ package org.elasticsearch.features; -import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.VersionUtils; import java.util.List; import java.util.Map; @@ -30,79 +24,36 @@ public class FeatureServiceTests extends ESTestCase { private static class TestFeatureSpecification implements FeatureSpecification { private final Set features; - private final Map historicalFeatures; - private TestFeatureSpecification(Set features, Map historicalFeatures) { + private TestFeatureSpecification(Set features) { this.features = features; - this.historicalFeatures = historicalFeatures; } @Override public Set getFeatures() { return features; } - - @Override - public Map getHistoricalFeatures() { - return historicalFeatures; - } } public void testFailsDuplicateFeatures() { // these all need to be separate classes to trigger the exception - FeatureSpecification fs1 = new TestFeatureSpecification(Set.of(new NodeFeature("f1")), Map.of()) { - }; - FeatureSpecification fs2 = new TestFeatureSpecification(Set.of(new NodeFeature("f1")), Map.of()) { - }; - FeatureSpecification hfs1 = new TestFeatureSpecification(Set.of(), Map.of(new NodeFeature("f1"), Version.V_8_11_0)) { + FeatureSpecification fs1 = new TestFeatureSpecification(Set.of(new NodeFeature("f1"))) { }; - FeatureSpecification hfs2 = new TestFeatureSpecification(Set.of(), Map.of(new NodeFeature("f1"), Version.V_8_11_0)) { + FeatureSpecification fs2 = new TestFeatureSpecification(Set.of(new NodeFeature("f1"))) { }; assertThat( expectThrows(IllegalArgumentException.class, () -> new FeatureService(List.of(fs1, fs2))).getMessage(), containsString("Duplicate feature") ); - assertThat( - expectThrows(IllegalArgumentException.class, () -> new FeatureService(List.of(hfs1, hfs2))).getMessage(), - containsString("Duplicate feature") - ); - assertThat( - expectThrows(IllegalArgumentException.class, () -> new FeatureService(List.of(fs1, hfs1))).getMessage(), - containsString("Duplicate feature") - ); - } - - public void testFailsNonHistoricalVersion() { - FeatureSpecification fs = new TestFeatureSpecification( - Set.of(), - Map.of(new NodeFeature("f1"), Version.fromId(FeatureService.CLUSTER_FEATURES_ADDED_VERSION.id + 1)) - ); - - assertThat( - expectThrows(IllegalArgumentException.class, () -> new FeatureService(List.of(fs))).getMessage(), - containsString("not a historical version") - ); - } - - public void testFailsSameRegularAndHistoricalFeature() { - FeatureSpecification fs = new TestFeatureSpecification( - Set.of(new NodeFeature("f1")), - Map.of(new NodeFeature("f1"), Version.V_8_12_0) - ); - - assertThat( - expectThrows(IllegalArgumentException.class, () -> new FeatureService(List.of(fs))).getMessage(), - containsString("cannot be declared as both a regular and historical feature") - ); } public void testGetNodeFeaturesCombinesAllSpecs() { List specs = List.of( - new TestFeatureSpecification(Set.of(new NodeFeature("f1"), new NodeFeature("f2")), Map.of()), - new TestFeatureSpecification(Set.of(new NodeFeature("f3")), Map.of()), - new TestFeatureSpecification(Set.of(new NodeFeature("f4"), new NodeFeature("f5")), Map.of()), - new TestFeatureSpecification(Set.of(), Map.of()) + new TestFeatureSpecification(Set.of(new NodeFeature("f1"), new NodeFeature("f2"))), + new TestFeatureSpecification(Set.of(new NodeFeature("f3"))), + new TestFeatureSpecification(Set.of(new NodeFeature("f4"), new NodeFeature("f5"))), + new TestFeatureSpecification(Set.of()) ); FeatureService service = new FeatureService(specs); @@ -111,10 +62,10 @@ public void testGetNodeFeaturesCombinesAllSpecs() { public void testStateHasFeatures() { List specs = List.of( - new TestFeatureSpecification(Set.of(new NodeFeature("f1"), new NodeFeature("f2")), Map.of()), - new TestFeatureSpecification(Set.of(new NodeFeature("f3")), Map.of()), - new TestFeatureSpecification(Set.of(new NodeFeature("f4"), new NodeFeature("f5")), Map.of()), - new TestFeatureSpecification(Set.of(), Map.of()) + new TestFeatureSpecification(Set.of(new NodeFeature("f1"), new NodeFeature("f2"))), + new TestFeatureSpecification(Set.of(new NodeFeature("f3"))), + new TestFeatureSpecification(Set.of(new NodeFeature("f4"), new NodeFeature("f5"))), + new TestFeatureSpecification(Set.of()) ); ClusterState state = ClusterState.builder(ClusterName.DEFAULT) @@ -130,50 +81,4 @@ public void testStateHasFeatures() { assertFalse(service.clusterHasFeature(state, new NodeFeature("nf2"))); assertFalse(service.clusterHasFeature(state, new NodeFeature("nf3"))); } - - private static ClusterState stateWithMinVersion(Version version) { - DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(); - nodes.add(DiscoveryNodeUtils.builder("node").version(version, IndexVersions.ZERO, IndexVersion.current()).build()); - for (int n = randomInt(5); n >= 0; n--) { - nodes.add( - DiscoveryNodeUtils.builder("node" + n) - .version( - VersionUtils.randomVersionBetween(random(), version, Version.CURRENT), - IndexVersions.ZERO, - IndexVersion.current() - ) - .build() - ); - } - - return ClusterState.builder(ClusterName.DEFAULT).nodes(nodes).build(); - } - - public void testStateHasHistoricalFeatures() { - NodeFeature v8_11_0 = new NodeFeature("hf_8.11.0"); - NodeFeature v8_10_0 = new NodeFeature("hf_8.10.0"); - NodeFeature v7_17_0 = new NodeFeature("hf_7.17.0"); - List specs = List.of( - new TestFeatureSpecification(Set.of(), Map.of(v8_11_0, Version.V_8_11_0)), - new TestFeatureSpecification(Set.of(), Map.of(v8_10_0, Version.V_8_10_0)), - new TestFeatureSpecification(Set.of(), Map.of(v7_17_0, Version.V_7_17_0)) - ); - - FeatureService service = new FeatureService(specs); - assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_11_0), v8_11_0)); - assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_11_0), v8_10_0)); - assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_11_0), v7_17_0)); - - assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_8_10_0), v8_11_0)); - assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_10_0), v8_10_0)); - assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_10_0), v7_17_0)); - - assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_17_0), v8_11_0)); - assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_17_0), v8_10_0)); - assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_7_17_0), v7_17_0)); - - assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_16_0), v8_11_0)); - assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_16_0), v8_10_0)); - assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_16_0), v7_17_0)); - } } diff --git a/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java index 6713042002fa3..07aa9af3b4030 100644 --- a/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.Diagnosis; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; import org.elasticsearch.health.HealthIndicatorResult; @@ -1085,12 +1084,8 @@ static ClusterState createClusterState( Collection nodes, Map> indexNameToNodeIdsMap ) { - Map> features = new HashMap<>(); DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(); - for (DiscoveryNode node : nodes) { - nodesBuilder = nodesBuilder.add(node); - features.put(node.getId(), Set.of(HealthFeatures.SUPPORTS_HEALTH.id())); - } + nodes.forEach(nodesBuilder::add); nodesBuilder.localNodeId(randomFrom(nodes).getId()); nodesBuilder.masterNodeId(randomFrom(nodes).getId()); ClusterBlocks.Builder clusterBlocksBuilder = new ClusterBlocks.Builder(); @@ -1125,7 +1120,6 @@ static ClusterState createClusterState( state.metadata(metadata.generateClusterUuidIfNeeded().build()); state.routingTable(routingTable.build()); state.blocks(clusterBlocksBuilder); - state.nodeFeatures(features); return state.build(); } diff --git a/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java index 7a578650b7cbd..15ef2e150761f 100644 --- a/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthStatus; import org.elasticsearch.health.metadata.HealthMetadata; @@ -451,11 +450,7 @@ private ClusterState createClusterState( metadata.put(idxMetadata); } - var features = Set.of(HealthFeatures.SUPPORTS_SHARDS_CAPACITY_INDICATOR.id()); - return ClusterState.builder(clusterState) - .metadata(metadata) - .nodeFeatures(Map.of(dataNode.getId(), features, frozenNode.getId(), features)) - .build(); + return ClusterState.builder(clusterState).metadata(metadata).build(); } private static IndexMetadata.Builder createIndexInDataNode(int shards) { diff --git a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java index 19d92568e6528..fa774c0bcfd12 100644 --- a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java @@ -271,7 +271,7 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th final RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(restHeaders).build(); final RestControllerTests.AssertingChannel channel = new RestControllerTests.AssertingChannel( fakeRequest, - true, + randomBoolean(), RestStatus.BAD_REQUEST ); @@ -361,7 +361,11 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th Map> restHeaders = new HashMap<>(); restHeaders.put(Task.TRACE_PARENT_HTTP_HEADER, Collections.singletonList(traceParentValue)); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(restHeaders).build(); - RestControllerTests.AssertingChannel channel = new RestControllerTests.AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + RestControllerTests.AssertingChannel channel = new RestControllerTests.AssertingChannel( + fakeRequest, + randomBoolean(), + RestStatus.BAD_REQUEST + ); try ( AbstractHttpServerTransport transport = new AbstractHttpServerTransport( diff --git a/server/src/test/java/org/elasticsearch/http/TestHttpRequest.java b/server/src/test/java/org/elasticsearch/http/TestHttpRequest.java index 8cd61453a3391..27dc0be673abb 100644 --- a/server/src/test/java/org/elasticsearch/http/TestHttpRequest.java +++ b/server/src/test/java/org/elasticsearch/http/TestHttpRequest.java @@ -85,11 +85,6 @@ public HttpResponse createResponse(RestStatus status, ChunkedRestResponseBodyPar @Override public void release() {} - @Override - public HttpRequest releaseAndCopy() { - return this; - } - @Override public Exception getInboundException() { return null; diff --git a/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java b/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java index b4130120372a1..8da7ada91856d 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java @@ -37,6 +37,31 @@ public void testMemoryLimitSettingsFallbackToOldSingleLimitSetting() { assertThat(IndexingPressure.MAX_REPLICA_BYTES.get(settings), Matchers.equalTo(ByteSizeValue.ofKb(30))); } + public void testHighAndLowWatermarkSplits() { + IndexingPressure indexingPressure = new IndexingPressure(settings); + + try ( + Releasable ignored1 = indexingPressure.markCoordinatingOperationStarted(10, ByteSizeValue.ofKb(6).getBytes(), false); + Releasable ignored2 = indexingPressure.markCoordinatingOperationStarted(10, ByteSizeValue.ofKb(2).getBytes(), false) + ) { + assertFalse(indexingPressure.shouldSplitBulk(randomIntBetween(1, 1000))); + assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 0L); + assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 0L); + assertTrue(indexingPressure.shouldSplitBulk(randomIntBetween(1025, 10000))); + assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 0L); + assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); + + try (Releasable ignored3 = indexingPressure.markPrimaryOperationStarted(10, ByteSizeValue.ofKb(1).getBytes(), false)) { + assertFalse(indexingPressure.shouldSplitBulk(randomIntBetween(1, 127))); + assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 0L); + assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); + assertTrue(indexingPressure.shouldSplitBulk(randomIntBetween(129, 1000))); + assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 1L); + assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); + } + } + } + public void testHighAndLowWatermarkSettings() { IndexingPressure indexingPressure = new IndexingPressure(settings); diff --git a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java index 77ab665166926..997cb123dbf8e 100644 --- a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java @@ -48,7 +48,6 @@ import java.util.concurrent.atomic.AtomicLong; import static org.elasticsearch.cluster.node.DiscoveryNode.STATELESS_ENABLED_SETTING_NAME; -import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; import static org.elasticsearch.index.cache.bitset.BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -253,35 +252,21 @@ public void testShouldLoadRandomAccessFiltersEagerly() { for (var hasIndexRole : values) { for (var loadFiltersEagerly : values) { for (var isStateless : values) { - for (var fastRefresh : values) { - if (isStateless == false && fastRefresh) { - // fast refresh is only relevant for stateless indices - continue; - } - - boolean result = BitsetFilterCache.shouldLoadRandomAccessFiltersEagerly( - bitsetFilterCacheSettings(isStateless, hasIndexRole, loadFiltersEagerly, fastRefresh) - ); - if (isStateless) { - assertEquals(loadFiltersEagerly && ((hasIndexRole && fastRefresh) || hasIndexRole == false), result); - } else { - assertEquals(loadFiltersEagerly, result); - } + boolean result = BitsetFilterCache.shouldLoadRandomAccessFiltersEagerly( + bitsetFilterCacheSettings(isStateless, hasIndexRole, loadFiltersEagerly) + ); + if (isStateless) { + assertEquals(loadFiltersEagerly && hasIndexRole == false, result); + } else { + assertEquals(loadFiltersEagerly, result); } } } } } - private IndexSettings bitsetFilterCacheSettings( - boolean isStateless, - boolean hasIndexRole, - boolean loadFiltersEagerly, - boolean fastRefresh - ) { - var indexSettingsBuilder = Settings.builder() - .put(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING.getKey(), loadFiltersEagerly) - .put(INDEX_FAST_REFRESH_SETTING.getKey(), fastRefresh); + private IndexSettings bitsetFilterCacheSettings(boolean isStateless, boolean hasIndexRole, boolean loadFiltersEagerly) { + var indexSettingsBuilder = Settings.builder().put(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING.getKey(), loadFiltersEagerly); var nodeSettingsBuilder = Settings.builder() .putList( diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java index 399740e6200e6..d4d0e67ff4141 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java @@ -69,7 +69,7 @@ public void testCreateDynamicStringFieldAsKeywordForDimension() throws IOExcepti XContentParser parser = createParser(JsonXContent.jsonXContent, source); SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Optional.empty()).add( new PassThroughObjectMapper.Builder("labels").setPriority(0).setContainsDimensions().dynamic(ObjectMapper.Dynamic.TRUE) ).build(MapperBuilderContext.root(false, false)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index df6d9380fd141..d7f33b9cdb3ba 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -247,14 +247,14 @@ public void testSyntheticSourceInTimeSeries() throws IOException { }); DocumentMapper mapper = createTimeSeriesModeDocumentMapper(mapping); assertTrue(mapper.sourceMapper().isSynthetic()); - assertEquals("{\"_source\":{\"mode\":\"synthetic\"}}", mapper.sourceMapper().toString()); + assertEquals("{\"_source\":{}}", mapper.sourceMapper().toString()); } public void testSyntheticSourceWithLogsIndexMode() throws IOException { XContentBuilder mapping = fieldMapping(b -> { b.field("type", "keyword"); }); DocumentMapper mapper = createLogsModeDocumentMapper(mapping); assertTrue(mapper.sourceMapper().isSynthetic()); - assertEquals("{\"_source\":{\"mode\":\"synthetic\"}}", mapper.sourceMapper().toString()); + assertEquals("{\"_source\":{}}", mapper.sourceMapper().toString()); } public void testSupportsNonDefaultParameterValues() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/MultiDenseVectorScriptDocValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/MultiDenseVectorScriptDocValuesTests.java index ef316c5addefa..435baa477e740 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/MultiDenseVectorScriptDocValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/MultiDenseVectorScriptDocValuesTests.java @@ -18,46 +18,48 @@ import org.elasticsearch.script.field.vectors.MultiDenseVector; import org.elasticsearch.script.field.vectors.MultiDenseVectorDocValuesField; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.index.IndexVersionUtils; +import org.junit.BeforeClass; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Iterator; -import java.util.List; import static org.hamcrest.Matchers.containsString; public class MultiDenseVectorScriptDocValuesTests extends ESTestCase { + @BeforeClass + public static void setup() { + assumeTrue("Requires multi-dense vector support", MultiDenseVectorFieldMapper.FEATURE_FLAG.isEnabled()); + } + public void testFloatGetVectorValueAndGetMagnitude() throws IOException { int dims = 3; float[][][] vectors = { { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }, { { 1, 0, 2 } } }; float[][] expectedMagnitudes = { { 1.7320f, 2.4495f, 3.3166f }, { 2.2361f } }; - for (IndexVersion indexVersion : List.of(IndexVersionUtils.randomCompatibleVersion(random()), IndexVersion.current())) { - BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT, indexVersion); - BinaryDocValues magnitudeValues = wrap(expectedMagnitudes); - MultiDenseVectorDocValuesField field = new FloatMultiDenseVectorDocValuesField( - docValues, - magnitudeValues, - "test", - ElementType.FLOAT, - dims - ); - MultiDenseVectorScriptDocValues scriptDocValues = field.toScriptDocValues(); - for (int i = 0; i < vectors.length; i++) { - field.setNextDocId(i); - assertEquals(vectors[i].length, field.size()); - assertEquals(dims, scriptDocValues.dims()); - Iterator iterator = scriptDocValues.getVectorValues(); - float[] magnitudes = scriptDocValues.getMagnitudes(); - assertEquals(expectedMagnitudes[i].length, magnitudes.length); - for (int j = 0; j < vectors[i].length; j++) { - assertTrue(iterator.hasNext()); - assertArrayEquals(vectors[i][j], iterator.next(), 0.0001f); - assertEquals(expectedMagnitudes[i][j], magnitudes[j], 0.0001f); - } + BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT); + BinaryDocValues magnitudeValues = wrap(expectedMagnitudes); + MultiDenseVectorDocValuesField field = new FloatMultiDenseVectorDocValuesField( + docValues, + magnitudeValues, + "test", + ElementType.FLOAT, + dims + ); + MultiDenseVectorScriptDocValues scriptDocValues = field.toScriptDocValues(); + for (int i = 0; i < vectors.length; i++) { + field.setNextDocId(i); + assertEquals(vectors[i].length, field.size()); + assertEquals(dims, scriptDocValues.dims()); + Iterator iterator = scriptDocValues.getVectorValues(); + float[] magnitudes = scriptDocValues.getMagnitudes(); + assertEquals(expectedMagnitudes[i].length, magnitudes.length); + for (int j = 0; j < vectors[i].length; j++) { + assertTrue(iterator.hasNext()); + assertArrayEquals(vectors[i][j], iterator.next(), 0.0001f); + assertEquals(expectedMagnitudes[i][j], magnitudes[j], 0.0001f); } } } @@ -67,7 +69,7 @@ public void testByteGetVectorValueAndGetMagnitude() throws IOException { float[][][] vectors = { { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }, { { 1, 0, 2 } } }; float[][] expectedMagnitudes = { { 1.7320f, 2.4495f, 3.3166f }, { 2.2361f } }; - BinaryDocValues docValues = wrap(vectors, ElementType.BYTE, IndexVersion.current()); + BinaryDocValues docValues = wrap(vectors, ElementType.BYTE); BinaryDocValues magnitudeValues = wrap(expectedMagnitudes); MultiDenseVectorDocValuesField field = new ByteMultiDenseVectorDocValuesField( docValues, @@ -94,10 +96,9 @@ public void testByteGetVectorValueAndGetMagnitude() throws IOException { public void testFloatMetadataAndIterator() throws IOException { int dims = 3; - IndexVersion indexVersion = IndexVersion.current(); float[][][] vectors = new float[][][] { fill(new float[3][dims], ElementType.FLOAT), fill(new float[2][dims], ElementType.FLOAT) }; float[][] magnitudes = new float[][] { new float[3], new float[2] }; - BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT, indexVersion); + BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT); BinaryDocValues magnitudeValues = wrap(magnitudes); MultiDenseVectorDocValuesField field = new FloatMultiDenseVectorDocValuesField( @@ -123,10 +124,9 @@ public void testFloatMetadataAndIterator() throws IOException { public void testByteMetadataAndIterator() throws IOException { int dims = 3; - IndexVersion indexVersion = IndexVersion.current(); float[][][] vectors = new float[][][] { fill(new float[3][dims], ElementType.BYTE), fill(new float[2][dims], ElementType.BYTE) }; float[][] magnitudes = new float[][] { new float[3], new float[2] }; - BinaryDocValues docValues = wrap(vectors, ElementType.BYTE, indexVersion); + BinaryDocValues docValues = wrap(vectors, ElementType.BYTE); BinaryDocValues magnitudeValues = wrap(magnitudes); MultiDenseVectorDocValuesField field = new ByteMultiDenseVectorDocValuesField( docValues, @@ -162,7 +162,7 @@ public void testFloatMissingValues() throws IOException { int dims = 3; float[][][] vectors = { { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }, { { 1, 0, 2 } } }; float[][] magnitudes = { { 1.7320f, 2.4495f, 3.3166f }, { 2.2361f } }; - BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT, IndexVersion.current()); + BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT); BinaryDocValues magnitudeValues = wrap(magnitudes); MultiDenseVectorDocValuesField field = new FloatMultiDenseVectorDocValuesField( docValues, @@ -186,7 +186,7 @@ public void testByteMissingValues() throws IOException { int dims = 3; float[][][] vectors = { { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }, { { 1, 0, 2 } } }; float[][] magnitudes = { { 1.7320f, 2.4495f, 3.3166f }, { 2.2361f } }; - BinaryDocValues docValues = wrap(vectors, ElementType.BYTE, IndexVersion.current()); + BinaryDocValues docValues = wrap(vectors, ElementType.BYTE); BinaryDocValues magnitudeValues = wrap(magnitudes); MultiDenseVectorDocValuesField field = new ByteMultiDenseVectorDocValuesField( docValues, @@ -210,7 +210,7 @@ public void testFloatGetFunctionIsNotAccessible() throws IOException { int dims = 3; float[][][] vectors = { { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }, { { 1, 0, 2 } } }; float[][] magnitudes = { { 1.7320f, 2.4495f, 3.3166f }, { 2.2361f } }; - BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT, IndexVersion.current()); + BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT); BinaryDocValues magnitudeValues = wrap(magnitudes); MultiDenseVectorDocValuesField field = new FloatMultiDenseVectorDocValuesField( docValues, @@ -236,7 +236,7 @@ public void testByteGetFunctionIsNotAccessible() throws IOException { int dims = 3; float[][][] vectors = { { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }, { { 1, 0, 2 } } }; float[][] magnitudes = { { 1.7320f, 2.4495f, 3.3166f }, { 2.2361f } }; - BinaryDocValues docValues = wrap(vectors, ElementType.BYTE, IndexVersion.current()); + BinaryDocValues docValues = wrap(vectors, ElementType.BYTE); BinaryDocValues magnitudeValues = wrap(magnitudes); MultiDenseVectorDocValuesField field = new ByteMultiDenseVectorDocValuesField( docValues, @@ -306,7 +306,7 @@ public long cost() { }; } - public static BinaryDocValues wrap(float[][][] vectors, ElementType elementType, IndexVersion indexVersion) { + public static BinaryDocValues wrap(float[][][] vectors, ElementType elementType) { return new BinaryDocValues() { int idx = -1; int maxIdx = vectors.length; @@ -316,7 +316,7 @@ public BytesRef binaryValue() { if (idx >= maxIdx) { throw new IllegalStateException("max index exceeded"); } - return mockEncodeDenseVector(vectors[idx], elementType, indexVersion); + return mockEncodeDenseVector(vectors[idx], elementType, IndexVersion.current()); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index fdc18264e2299..dc70c44a89128 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -384,7 +384,7 @@ public void testSearchRequestRuntimeFieldsAndMultifieldDetection() { public void testSyntheticSourceSearchLookup() throws IOException { // Build a mapping using synthetic source - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Optional.empty()).add( new KeywordFieldMapper.Builder("cat", IndexVersion.current()).ignoreAbove(100) ).build(MapperBuilderContext.root(true, false)); diff --git a/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java b/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java index dabc8672733e2..0e8c7e0857251 100644 --- a/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java @@ -65,11 +65,12 @@ public void testReadStringProperty() { } public void testReadStringPropertyInvalidType() { - try { - ConfigurationUtils.readStringProperty(null, null, config, "arr"); - } catch (ElasticsearchParseException e) { - assertThat(e.getMessage(), equalTo("[arr] property isn't a string, but of type [java.util.Arrays$ArrayList]")); - } + ElasticsearchParseException caught = assertThrows( + ElasticsearchParseException.class, + () -> ConfigurationUtils.readStringProperty(null, null, config, "arr") + ); + assertThat(caught.getMessage(), equalTo("[arr] property isn't a string, but of type [java.util.Arrays$ArrayList]")); + } public void testReadBooleanProperty() { @@ -83,11 +84,11 @@ public void testReadNullBooleanProperty() { } public void testReadBooleanPropertyInvalidType() { - try { - ConfigurationUtils.readBooleanProperty(null, null, config, "arr", true); - } catch (ElasticsearchParseException e) { - assertThat(e.getMessage(), equalTo("[arr] property isn't a boolean, but of type [java.util.Arrays$ArrayList]")); - } + ElasticsearchParseException caught = assertThrows( + ElasticsearchParseException.class, + () -> ConfigurationUtils.readBooleanProperty(null, null, config, "arr", true) + ); + assertThat(caught.getMessage(), equalTo("[arr] property isn't a boolean, but of type [java.util.Arrays$ArrayList]")); } public void testReadStringOrIntProperty() { @@ -98,11 +99,11 @@ public void testReadStringOrIntProperty() { } public void testReadStringOrIntPropertyInvalidType() { - try { - ConfigurationUtils.readStringOrIntProperty(null, null, config, "arr", null); - } catch (ElasticsearchParseException e) { - assertThat(e.getMessage(), equalTo("[arr] property isn't a string or int, but of type [java.util.Arrays$ArrayList]")); - } + ElasticsearchParseException caught = assertThrows( + ElasticsearchParseException.class, + () -> ConfigurationUtils.readStringOrIntProperty(null, null, config, "arr", null) + ); + assertThat(caught.getMessage(), equalTo("[arr] property isn't a string or int, but of type [java.util.Arrays$ArrayList]")); } public void testReadMediaProperty() { diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java index b62fff2eceb28..8235c66ef976b 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java @@ -56,8 +56,8 @@ public void testFromXContent() throws IOException { assertEquals(2, custom.getPipelines().size()); assertEquals("1", custom.getPipelines().get("1").getId()); assertEquals("2", custom.getPipelines().get("2").getId()); - assertEquals(pipeline.getConfigAsMap(), custom.getPipelines().get("1").getConfigAsMap()); - assertEquals(pipeline2.getConfigAsMap(), custom.getPipelines().get("2").getConfigAsMap()); + assertEquals(pipeline.getConfig(), custom.getPipelines().get("1").getConfig()); + assertEquals(pipeline2.getConfig(), custom.getPipelines().get("2").getConfig()); } } diff --git a/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java b/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java index 202c4edb2d0c8..7be6e97762ccf 100644 --- a/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java @@ -26,26 +26,57 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.Map; import java.util.function.Predicate; +import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; public class PipelineConfigurationTests extends AbstractXContentTestCase { + public void testConfigInvariants() { + Map original = Map.of("a", 1); + Map mutable = new HashMap<>(original); + PipelineConfiguration configuration = new PipelineConfiguration("1", mutable); + // the config is equal to the original & mutable map, regardless of how you get a reference to it + assertThat(configuration.getConfig(), equalTo(original)); + assertThat(configuration.getConfig(), equalTo(mutable)); + assertThat(configuration.getConfig(), equalTo(configuration.getConfig(false))); + assertThat(configuration.getConfig(), equalTo(configuration.getConfig(true))); + // the config is the same instance as itself when unmodifiable is true + assertThat(configuration.getConfig(), sameInstance(configuration.getConfig())); + assertThat(configuration.getConfig(), sameInstance(configuration.getConfig(true))); + // but it's not the same instance as the original mutable map, nor if unmodifiable is false + assertThat(configuration.getConfig(), not(sameInstance(mutable))); + assertThat(configuration.getConfig(), not(sameInstance(configuration.getConfig(false)))); + + // changing the mutable map doesn't alter the pipeline's configuration + mutable.put("b", 2); + assertThat(configuration.getConfig(), equalTo(original)); + + // the modifiable map can be modified + Map modifiable = configuration.getConfig(false); + modifiable.put("c", 3); // this doesn't throw an exception + assertThat(modifiable.get("c"), equalTo(3)); + // but the next modifiable copy is a new fresh copy, and doesn't reflect those changes + assertThat(configuration.getConfig(), equalTo(configuration.getConfig(false))); + } + public void testSerialization() throws IOException { PipelineConfiguration configuration = new PipelineConfiguration( "1", new BytesArray("{}".getBytes(StandardCharsets.UTF_8)), XContentType.JSON ); - assertEquals(XContentType.JSON, configuration.getXContentType()); - + assertThat(configuration.getConfig(), anEmptyMap()); BytesStreamOutput out = new BytesStreamOutput(); configuration.writeTo(out); StreamInput in = StreamInput.wrap(out.bytes().toBytesRef().bytes); PipelineConfiguration serialized = PipelineConfiguration.readFrom(in); - assertEquals(XContentType.JSON, serialized.getXContentType()); - assertEquals("{}", serialized.getConfig().utf8ToString()); + assertThat(serialized.getConfig(), anEmptyMap()); } public void testMetaSerialization() throws IOException { @@ -56,13 +87,14 @@ public void testMetaSerialization() throws IOException { new BytesArray(configJson.getBytes(StandardCharsets.UTF_8)), XContentType.JSON ); - assertEquals(XContentType.JSON, configuration.getXContentType()); BytesStreamOutput out = new BytesStreamOutput(); configuration.writeTo(out); StreamInput in = StreamInput.wrap(out.bytes().toBytesRef().bytes); PipelineConfiguration serialized = PipelineConfiguration.readFrom(in); - assertEquals(XContentType.JSON, serialized.getXContentType()); - assertEquals(configJson, serialized.getConfig().utf8ToString()); + assertEquals( + XContentHelper.convertToMap(new BytesArray(configJson.getBytes(StandardCharsets.UTF_8)), true, XContentType.JSON).v2(), + serialized.getConfig() + ); } public void testParser() throws IOException { @@ -80,9 +112,8 @@ public void testParser() throws IOException { XContentParser xContentParser = xContentType.xContent() .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, bytes.streamInput()); PipelineConfiguration parsed = parser.parse(xContentParser, null); - assertEquals(xContentType.canonical(), parsed.getXContentType()); - assertEquals("{}", XContentHelper.convertToJson(parsed.getConfig(), false, parsed.getXContentType())); - assertEquals("1", parsed.getId()); + assertThat(parsed.getId(), equalTo("1")); + assertThat(parsed.getConfig(), anEmptyMap()); } public void testGetVersion() { diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java new file mode 100644 index 0000000000000..059cb15551acb --- /dev/null +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.plugins; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class PluginsLoaderTests extends ESTestCase { + + public void testToModuleName() { + assertThat(PluginsLoader.toModuleName("module.name"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module-name"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module-name1"), equalTo("module.name1")); + assertThat(PluginsLoader.toModuleName("1module-name"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module-name!"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module!@#name!"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("!module-name!"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module_name"), equalTo("module_name")); + assertThat(PluginsLoader.toModuleName("-module-name-"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("_module_name"), equalTo("_module_name")); + assertThat(PluginsLoader.toModuleName("_"), equalTo("_")); + } +} diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index f927a12b50da3..015bc72747bf2 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -66,12 +66,12 @@ public class PluginsServiceTests extends ESTestCase { public static class FilterablePlugin extends Plugin implements ScriptPlugin {} static PluginsService newPluginsService(Settings settings) { - return new PluginsService(settings, null, null, TestEnvironment.newEnvironment(settings).pluginsFile()) { + return new PluginsService(settings, null, new PluginsLoader(null, TestEnvironment.newEnvironment(settings).pluginsFile()) { @Override protected void addServerExportsService(Map> qualifiedExports) { // tests don't run modular } - }; + }); } static PluginsService newMockPluginsService(List> classpathPlugins) { @@ -466,7 +466,8 @@ public void testExtensiblePlugin() { List.of( new PluginsService.LoadedPlugin( new PluginDescriptor("extensible", null, null, null, null, classname, null, List.of(), false, false, false, false), - extensiblePlugin + extensiblePlugin, + null ) ) ); @@ -480,7 +481,8 @@ public void testExtensiblePlugin() { List.of( new PluginsService.LoadedPlugin( new PluginDescriptor("extensible", null, null, null, null, classname, null, List.of(), false, false, false, false), - extensiblePlugin + extensiblePlugin, + null ), new PluginsService.LoadedPlugin( new PluginDescriptor( @@ -497,7 +499,8 @@ public void testExtensiblePlugin() { false, false ), - testPlugin + testPlugin, + null ) ) ); @@ -875,20 +878,6 @@ public void testCanCreateAClassLoader() { assertEquals(this.getClass().getClassLoader(), loader.getParent()); } - public void testToModuleName() { - assertThat(PluginsService.toModuleName("module.name"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module-name"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module-name1"), equalTo("module.name1")); - assertThat(PluginsService.toModuleName("1module-name"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module-name!"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module!@#name!"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("!module-name!"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module_name"), equalTo("module_name")); - assertThat(PluginsService.toModuleName("-module-name-"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("_module_name"), equalTo("_module_name")); - assertThat(PluginsService.toModuleName("_"), equalTo("_")); - } - static final class Loader extends ClassLoader { Loader(ClassLoader parent) { super(parent); @@ -896,22 +885,25 @@ static final class Loader extends ClassLoader { } // Closes the URLClassLoaders and UberModuleClassloaders of plugins loaded by the given plugin service. + // We can use the direct ClassLoader from the plugin because tests do not use any parent SPI ClassLoaders. static void closePluginLoaders(PluginsService pluginService) { for (var lp : pluginService.plugins()) { - if (lp.loader() instanceof URLClassLoader urlClassLoader) { + if (lp.classLoader() instanceof URLClassLoader urlClassLoader) { try { PrivilegedOperations.closeURLClassLoader(urlClassLoader); } catch (IOException unexpected) { throw new UncheckedIOException(unexpected); } - } - if (lp.loader() instanceof UberModuleClassLoader loader) { + } else if (lp.classLoader() instanceof UberModuleClassLoader loader) { try { PrivilegedOperations.closeURLClassLoader(loader.getInternalLoader()); } catch (Exception e) { throw new RuntimeException(e); } + } else { + throw new AssertionError("Cannot close unexpected classloader " + lp.classLoader()); } + } } diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java new file mode 100644 index 0000000000000..03d1adff42c4e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.reservedstate.service; + +import org.elasticsearch.health.HealthIndicatorDetails; +import org.elasticsearch.health.HealthIndicatorResult; +import org.elasticsearch.health.SimpleHealthIndicatorDetails; +import org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.YELLOW; +import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.FAILURE_SYMPTOM; +import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.NO_CHANGES_SYMPTOM; +import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.STALE_SETTINGS_IMPACT; +import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.SUCCESS_SYMPTOM; + +/** + * Here, we test {@link FileSettingsHealthIndicatorService} in isolation; + * we do not test that {@link FileSettingsService} uses it correctly. + */ +public class FileSettingsHealthIndicatorServiceTests extends ESTestCase { + + FileSettingsHealthIndicatorService healthIndicatorService; + + @Before + public void initialize() { + healthIndicatorService = new FileSettingsHealthIndicatorService(); + } + + public void testInitiallyGreen() { + assertEquals( + new HealthIndicatorResult("file_settings", GREEN, NO_CHANGES_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()), + healthIndicatorService.calculate(false, null) + ); + } + + public void testGreenYellowYellowGreen() { + healthIndicatorService.changeOccurred(); + // This is a strange case: a change occurred, but neither success nor failure have been reported yet. + // While the change is still in progress, we don't change the status. + assertEquals( + new HealthIndicatorResult("file_settings", GREEN, SUCCESS_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()), + healthIndicatorService.calculate(false, null) + ); + + healthIndicatorService.failureOccurred("whoopsie 1"); + assertEquals( + new HealthIndicatorResult( + "file_settings", + YELLOW, + FAILURE_SYMPTOM, + new SimpleHealthIndicatorDetails(Map.of("failure_streak", 1L, "most_recent_failure", "whoopsie 1")), + STALE_SETTINGS_IMPACT, + List.of() + ), + healthIndicatorService.calculate(false, null) + ); + + healthIndicatorService.failureOccurred("whoopsie #2"); + assertEquals( + new HealthIndicatorResult( + "file_settings", + YELLOW, + FAILURE_SYMPTOM, + new SimpleHealthIndicatorDetails(Map.of("failure_streak", 2L, "most_recent_failure", "whoopsie #2")), + STALE_SETTINGS_IMPACT, + List.of() + ), + healthIndicatorService.calculate(false, null) + ); + + healthIndicatorService.successOccurred(); + assertEquals( + new HealthIndicatorResult("file_settings", GREEN, SUCCESS_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()), + healthIndicatorService.calculate(false, null) + ); + } +} diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index 0db29588c4298..ae60a21b6fc22 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NodeConnectionsService; +import org.elasticsearch.cluster.coordination.FailedToCommitClusterStateException; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.ReservedStateMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -33,6 +34,7 @@ import org.elasticsearch.env.BuildVersion; import org.elasticsearch.env.Environment; import org.elasticsearch.reservedstate.action.ReservedClusterSettingsAction; +import org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; @@ -78,6 +80,8 @@ import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; +import static org.mockito.Mockito.verifyNoMoreInteractions; public class FileSettingsServiceTests extends ESTestCase { private static final Logger logger = LogManager.getLogger(FileSettingsServiceTests.class); @@ -86,6 +90,7 @@ public class FileSettingsServiceTests extends ESTestCase { private ReservedClusterStateService controller; private ThreadPool threadpool; private FileSettingsService fileSettingsService; + private FileSettingsHealthIndicatorService healthIndicatorService; @Before public void setUp() throws Exception { @@ -131,7 +136,8 @@ public void setUp() throws Exception { List.of(new ReservedClusterSettingsAction(clusterSettings)) ) ); - fileSettingsService = spy(new FileSettingsService(clusterService, controller, env)); + healthIndicatorService = mock(FileSettingsHealthIndicatorService.class); + fileSettingsService = spy(new FileSettingsService(clusterService, controller, env, healthIndicatorService)); } @After @@ -162,6 +168,7 @@ public void testStartStop() { assertTrue(fileSettingsService.watching()); fileSettingsService.stop(); assertFalse(fileSettingsService.watching()); + verifyNoInteractions(healthIndicatorService); } public void testOperatorDirName() { @@ -208,6 +215,10 @@ public void testInitialFileError() throws Exception { verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), any()); // assert we never notified any listeners of successful application of file based settings assertFalse(settingsChanged.get()); + + verify(healthIndicatorService, times(1)).changeOccurred(); + verify(healthIndicatorService, times(1)).failureOccurred(argThat(s -> s.startsWith(IllegalStateException.class.getName()))); + verifyNoMoreInteractions(healthIndicatorService); } @SuppressWarnings("unchecked") @@ -232,6 +243,10 @@ public void testInitialFileWorks() throws Exception { verify(fileSettingsService, times(1)).processFileOnServiceStart(); verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), any()); + + verify(healthIndicatorService, times(1)).changeOccurred(); + verify(healthIndicatorService, times(1)).successOccurred(); + verifyNoMoreInteractions(healthIndicatorService); } @SuppressWarnings("unchecked") @@ -267,6 +282,10 @@ public void testProcessFileChanges() throws Exception { verify(fileSettingsService, times(1)).processFileChanges(); verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_VERSION_ONLY), any()); + + verify(healthIndicatorService, times(2)).changeOccurred(); + verify(healthIndicatorService, times(2)).successOccurred(); + verifyNoMoreInteractions(healthIndicatorService); } @SuppressWarnings("unchecked") @@ -321,6 +340,11 @@ public void testInvalidJSON() throws Exception { // Note: the name "processFileOnServiceStart" is a bit misleading because it is not // referring to fileSettingsService.start(). Rather, it is referring to the initialization // of the watcher thread itself, which occurs asynchronously when clusterChanged is first called. + + verify(healthIndicatorService, times(2)).changeOccurred(); + verify(healthIndicatorService, times(1)).successOccurred(); + verify(healthIndicatorService, times(1)).failureOccurred(argThat(s -> s.startsWith(IllegalArgumentException.class.getName()))); + verifyNoMoreInteractions(healthIndicatorService); } private static void awaitOrBust(CyclicBarrier barrier) { @@ -373,6 +397,12 @@ public void testStopWorksInMiddleOfProcessing() throws Exception { fileSettingsService.close(); // let the deadlocked thread end, so we can cleanly exit the test deadThreadLatch.countDown(); + + verify(healthIndicatorService, times(1)).changeOccurred(); + verify(healthIndicatorService, times(1)).failureOccurred( + argThat(s -> s.startsWith(FailedToCommitClusterStateException.class.getName())) + ); + verifyNoMoreInteractions(healthIndicatorService); } public void testHandleSnapshotRestoreClearsMetadata() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java b/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java index 8a8bed9ca73db..9f82911ed121f 100644 --- a/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java @@ -73,7 +73,7 @@ public List routes() { params.put("consumed", randomAlphaOfLength(8)); params.put("unconsumed", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); - RestChannel channel = new FakeRestChannel(request, true, 1); + RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> handler.handleRequest(request, channel, mockClient) @@ -108,7 +108,7 @@ public List routes() { params.put("unconsumed-first", randomAlphaOfLength(8)); params.put("unconsumed-second", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); - RestChannel channel = new FakeRestChannel(request, true, 1); + RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> handler.handleRequest(request, channel, mockClient) @@ -155,7 +155,7 @@ public List routes() { params.put("very_close_to_parametre", randomAlphaOfLength(8)); params.put("very_far_from_every_consumed_parameter", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); - RestChannel channel = new FakeRestChannel(request, true, 1); + RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> handler.handleRequest(request, channel, mockClient) @@ -206,7 +206,7 @@ public List routes() { params.put("consumed", randomAlphaOfLength(8)); params.put("response_param", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); - RestChannel channel = new FakeRestChannel(request, true, 1); + RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mockClient); assertTrue(restChannelConsumer.executed); assertTrue(restChannelConsumer.closed); @@ -238,7 +238,7 @@ public List routes() { params.put("human", null); params.put("error_trace", randomFrom("true", "false", null)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); - RestChannel channel = new FakeRestChannel(request, true, 1); + RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mockClient); assertTrue(restChannelConsumer.executed); assertTrue(restChannelConsumer.closed); @@ -283,7 +283,7 @@ public List routes() { params.put("size", randomAlphaOfLength(8)); params.put("time", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); - RestChannel channel = new FakeRestChannel(request, true, 1); + RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mockClient); assertTrue(restChannelConsumer.executed); assertTrue(restChannelConsumer.closed); @@ -314,7 +314,7 @@ public List routes() { new BytesArray(builder.toString()), XContentType.JSON ).build(); - final RestChannel channel = new FakeRestChannel(request, true, 1); + final RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mockClient); assertTrue(restChannelConsumer.executed); assertTrue(restChannelConsumer.closed); @@ -341,7 +341,7 @@ public List routes() { }; final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).build(); - final RestChannel channel = new FakeRestChannel(request, true, 1); + final RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mockClient); assertTrue(restChannelConsumer.executed); assertTrue(restChannelConsumer.closed); @@ -371,7 +371,7 @@ public List routes() { new BytesArray(builder.toString()), XContentType.JSON ).build(); - final RestChannel channel = new FakeRestChannel(request, true, 1); + final RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> handler.handleRequest(request, channel, mockClient) diff --git a/server/src/test/java/org/elasticsearch/rest/ChunkedRestResponseBodyPartTests.java b/server/src/test/java/org/elasticsearch/rest/ChunkedRestResponseBodyPartTests.java index 907c16aad5fdc..eece90ed94cf9 100644 --- a/server/src/test/java/org/elasticsearch/rest/ChunkedRestResponseBodyPartTests.java +++ b/server/src/test/java/org/elasticsearch/rest/ChunkedRestResponseBodyPartTests.java @@ -56,7 +56,7 @@ public void testEncodesChunkedXContentCorrectly() throws IOException { ToXContent.EMPTY_PARAMS, new FakeRestChannel( new FakeRestRequest.Builder(xContentRegistry()).withContent(BytesArray.EMPTY, randomXContent.type()).build(), - true, + randomBoolean(), 1 ) ); diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index afdad1045b4de..2fdb3daa26da4 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -161,7 +161,7 @@ public void testApplyProductSpecificResponseHeaders() { final ThreadContext threadContext = client.threadPool().getThreadContext(); final RestController restController = new RestController(null, null, circuitBreakerService, usageService, telemetryProvider); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).build(); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchRequest(fakeRequest, channel, threadContext); // the rest controller relies on the caller to stash the context, so we should expect these values here as we didn't stash the // context in this test @@ -180,7 +180,7 @@ public void testRequestWithDisallowedMultiValuedHeader() { restHeaders.put("header.1", Collections.singletonList("boo")); restHeaders.put("header.2", List.of("foo", "bar")); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(restHeaders).build(); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchRequest(fakeRequest, channel, threadContext); assertTrue(channel.getSendResponseCalled()); } @@ -211,7 +211,7 @@ public String getName() { }); } }); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.OK); spyRestController.dispatchRequest(fakeRequest, channel, threadContext); verify(requestsCounter).incrementBy( eq(1L), @@ -235,7 +235,7 @@ public MethodHandlers next() { return null; } }); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.BAD_REQUEST); spyRestController.dispatchRequest(fakeRequest, channel, threadContext); verify(requestsCounter).incrementBy(eq(1L), eq(Map.of(STATUS_CODE_KEY, 400))); } @@ -257,7 +257,7 @@ public MethodHandlers next() { } }); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.BAD_REQUEST); spyRestController.dispatchRequest(fakeRequest, channel, threadContext); verify(requestsCounter).incrementBy(eq(1L), eq(Map.of(STATUS_CODE_KEY, 400))); } @@ -280,7 +280,7 @@ public String getName() { })); when(spyRestController.getAllHandlers(any(), eq(fakeRequest.rawPath()))).thenAnswer(x -> handlers.iterator()); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.METHOD_NOT_ALLOWED); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.METHOD_NOT_ALLOWED); spyRestController.dispatchRequest(fakeRequest, channel, threadContext); verify(requestsCounter).incrementBy(eq(1L), eq(Map.of(STATUS_CODE_KEY, 405))); } @@ -290,7 +290,7 @@ public void testDispatchBadRequestEmitsMetric() { final RestController restController = new RestController(null, null, circuitBreakerService, usageService, telemetryProvider); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).build(); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchBadRequest(channel, threadContext, new Exception()); verify(requestsCounter).incrementBy(eq(1L), eq(Map.of(STATUS_CODE_KEY, 400))); } @@ -314,7 +314,7 @@ public MethodHandlers next() { return new MethodHandlers("/").addMethod(GET, RestApiVersion.current(), (request, channel, client) -> {}); } }); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchRequest(fakeRequest, channel, threadContext); verify(tracer).startTrace( eq(threadContext), @@ -340,7 +340,7 @@ public void testRequestWithDisallowedMultiValuedHeaderButSameValues() { new RestResponse(RestStatus.OK, RestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY) ) ); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.OK); restController.dispatchRequest(fakeRequest, channel, threadContext); assertTrue(channel.getSendResponseCalled()); } @@ -466,7 +466,7 @@ public void testRestInterceptor() throws Exception { ); restController.registerHandler(new Route(GET, "/wrapped"), handler); RestRequest request = testRestRequest("/wrapped", "{}", XContentType.JSON); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); httpServerTransport.start(); assertThat(wrapperCalled.get(), is(true)); @@ -477,7 +477,7 @@ public void testDispatchRequestAddsAndFreesBytesOnSuccess() { int contentLength = BREAKER_LIMIT.bytesAsInt(); String content = randomAlphaOfLength((int) Math.round(contentLength / inFlightRequestsBreaker.getOverhead())); RestRequest request = testRestRequest("/", content, XContentType.JSON); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.OK); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); @@ -489,7 +489,7 @@ public void testDispatchRequestAddsAndFreesBytesOnError() { int contentLength = BREAKER_LIMIT.bytesAsInt(); String content = randomAlphaOfLength((int) Math.round(contentLength / inFlightRequestsBreaker.getOverhead())); RestRequest request = testRestRequest("/error", content, XContentType.JSON); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); @@ -502,7 +502,7 @@ public void testDispatchRequestAddsAndFreesBytesOnlyOnceOnError() { String content = randomAlphaOfLength((int) Math.round(contentLength / inFlightRequestsBreaker.getOverhead())); // we will produce an error in the rest handler and one more when sending the error response RestRequest request = testRestRequest("/error", content, XContentType.JSON); - ExceptionThrowingChannel channel = new ExceptionThrowingChannel(request, true); + ExceptionThrowingChannel channel = new ExceptionThrowingChannel(request, randomBoolean()); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); @@ -521,7 +521,7 @@ public void testDispatchRequestAddsAndFreesBytesOnlyOnceOnErrorDuringSend() { ); // we will produce an error in the rest handler and one more when sending the error response RestRequest request = testRestRequest("/foo", content, XContentType.JSON); - ExceptionThrowingChannel channel = new ExceptionThrowingChannel(request, true) { + ExceptionThrowingChannel channel = new ExceptionThrowingChannel(request, randomBoolean()) { @Override protected BytesStream newBytesOutput() { return new RecyclerBytesStreamOutput(recycler); @@ -538,7 +538,7 @@ public void testDispatchRequestLimitsBytes() { int contentLength = BREAKER_LIMIT.bytesAsInt() + 1; String content = randomAlphaOfLength((int) Math.round(contentLength / inFlightRequestsBreaker.getOverhead())); RestRequest request = testRestRequest("/", content, XContentType.JSON); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.TOO_MANY_REQUESTS); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.TOO_MANY_REQUESTS); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); @@ -549,7 +549,7 @@ public void testDispatchRequestLimitsBytes() { public void testDispatchRequiresContentTypeForRequestsWithContent() { String content = randomAlphaOfLength((int) Math.round(BREAKER_LIMIT.getBytes() / inFlightRequestsBreaker.getOverhead())); RestRequest request = testRestRequest("/", content, null); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.NOT_ACCEPTABLE); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.NOT_ACCEPTABLE); restController = new RestController(null, null, circuitBreakerService, usageService, telemetryProvider); restController.registerHandler( new Route(GET, "/"), @@ -566,7 +566,7 @@ public void testDispatchDoesNotRequireContentTypeForRequestsWithoutContent() { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); assertFalse(channel.getSendResponseCalled()); restController.dispatchRequest(fakeRestRequest, channel, client.threadPool().getThreadContext()); @@ -582,7 +582,7 @@ public void testDispatchFailsWithPlainText() { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.NOT_ACCEPTABLE); restController.registerHandler( new Route(GET, "/foo"), (request, channel1, client) -> channel1.sendResponse( @@ -603,7 +603,7 @@ public void testDispatchUnsupportedContentType() { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.NOT_ACCEPTABLE); assertFalse(channel.getSendResponseCalled()); restController.dispatchRequest(fakeRestRequest, channel, client.threadPool().getThreadContext()); @@ -620,7 +620,7 @@ public void testDispatchWorksWithNewlineDelimitedJson() { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); restController.registerHandler(new Route(GET, "/foo"), new RestHandler() { @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { @@ -659,7 +659,7 @@ public void testDispatchWithContentStream() { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); restController.registerHandler(new Route(GET, "/foo"), new RestHandler() { @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { @@ -683,7 +683,7 @@ public void testDispatchWithContentStreamNoContentType() { RestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withContent(new BytesArray("{}"), null) .withPath("/foo") .build(); - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.NOT_ACCEPTABLE); if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } @@ -712,7 +712,7 @@ public void testNonStreamingXContentCausesErrorResponse() throws IOException { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.NOT_ACCEPTABLE); restController.registerHandler(new Route(GET, "/foo"), new RestHandler() { @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { @@ -737,7 +737,7 @@ public void testUnknownContentWithContentStream() { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.NOT_ACCEPTABLE); restController.registerHandler(new Route(GET, "/foo"), new RestHandler() { @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { @@ -756,7 +756,7 @@ public boolean supportsBulkContent() { public void testDispatchBadRequest() { final FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).build(); - final AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.BAD_REQUEST); + final AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchBadRequest( channel, client.threadPool().getThreadContext(), @@ -789,7 +789,7 @@ public boolean canTripCircuitBreaker() { .withContent(BytesReference.bytes(content), content.contentType()) .build(); - final AssertingChannel channel = new AssertingChannel(restRequest, true, RestStatus.OK); + final AssertingChannel channel = new AssertingChannel(restRequest, randomBoolean(), RestStatus.OK); assertFalse(channel.getSendResponseCalled()); assertFalse(restRequest.isContentConsumed()); @@ -801,7 +801,7 @@ public boolean canTripCircuitBreaker() { public void testDispatchBadRequestUnknownCause() { final FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).build(); - final AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.BAD_REQUEST); + final AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchBadRequest(channel, client.threadPool().getThreadContext(), null); assertTrue(channel.getSendResponseCalled()); assertThat(channel.getRestResponse().content().utf8ToString(), containsString("unknown cause")); @@ -813,14 +813,14 @@ public void testDispatchBadRequestWithValidationException() { final FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).build(); // it's always a 400 bad request when dispatching "regular" {@code ElasticsearchException} - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.BAD_REQUEST); assertFalse(channel.getSendResponseCalled()); restController.dispatchBadRequest(channel, client.threadPool().getThreadContext(), exception); assertTrue(channel.getSendResponseCalled()); assertThat(channel.getRestResponse().content().utf8ToString(), containsString("bad bad exception")); // but {@code HttpHeadersValidationException} do carry over the rest response code - channel = new AssertingChannel(fakeRestRequest, true, status); + channel = new AssertingChannel(fakeRestRequest, randomBoolean(), status); assertFalse(channel.getSendResponseCalled()); restController.dispatchBadRequest(channel, client.threadPool().getThreadContext(), new HttpHeadersValidationException(exception)); assertTrue(channel.getSendResponseCalled()); @@ -831,7 +831,7 @@ public void testFavicon() { final FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withMethod(GET) .withPath("/favicon.ico") .build(); - final AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + final AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); restController.dispatchRequest(fakeRestRequest, channel, client.threadPool().getThreadContext()); assertTrue(channel.getSendResponseCalled()); assertThat(channel.getRestResponse().contentType(), containsString("image/x-icon")); @@ -841,7 +841,7 @@ public void testFaviconWithWrongHttpMethod() { final FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withMethod( randomValueOtherThanMany(m -> m == GET || m == OPTIONS, () -> randomFrom(RestRequest.Method.values())) ).withPath("/favicon.ico").build(); - final AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.METHOD_NOT_ALLOWED); + final AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.METHOD_NOT_ALLOWED); restController.dispatchRequest(fakeRestRequest, channel, client.threadPool().getThreadContext()); assertTrue(channel.getSendResponseCalled()); assertThat(channel.getRestResponse().getHeaders().containsKey("Allow"), equalTo(true)); @@ -906,18 +906,13 @@ public HttpResponse createResponse(RestStatus status, ChunkedRestResponseBodyPar @Override public void release() {} - @Override - public HttpRequest releaseAndCopy() { - return this; - } - @Override public Exception getInboundException() { return null; } }, null); - final AssertingChannel channel = new AssertingChannel(request, true, RestStatus.METHOD_NOT_ALLOWED); + final AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.METHOD_NOT_ALLOWED); assertFalse(channel.getSendResponseCalled()); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); assertTrue(channel.getSendResponseCalled()); @@ -937,7 +932,7 @@ public Method method() { } }; - final AssertingChannel channel = new AssertingChannel(request, true, RestStatus.METHOD_NOT_ALLOWED); + final AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.METHOD_NOT_ALLOWED); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); verify(tracer).startTrace(any(), any(RestRequest.class), anyString(), anyMap()); verify(tracer).addError(any(RestRequest.class), any(IllegalArgumentException.class)); @@ -951,7 +946,7 @@ public void testDispatchCompatibleHandler() { final String mediaType = randomCompatibleMediaType(version); FakeRestRequest fakeRestRequest = requestWithContent(mediaType); - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); // dispatch to a compatible handler restController.registerHandler(GET, "/foo", RestApiVersion.minimumSupported(), (request, channel1, client) -> { @@ -975,7 +970,7 @@ public void testDispatchCompatibleRequestToNewlyAddedHandler() { final String mediaType = randomCompatibleMediaType(version); FakeRestRequest fakeRestRequest = requestWithContent(mediaType); - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); // dispatch to a CURRENT newly added handler restController.registerHandler(new Route(GET, "/foo"), (request, channel1, client) -> { @@ -1018,7 +1013,7 @@ public void testCurrentVersionVNDMediaTypeIsNotUsingCompatibility() { final String mediaType = randomCompatibleMediaType(version); FakeRestRequest fakeRestRequest = requestWithContent(mediaType); - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); // dispatch to a CURRENT newly added handler restController.registerHandler(new Route(GET, "/foo"), (request, channel1, client) -> { @@ -1041,7 +1036,7 @@ public void testCustomMediaTypeValidation() { final String mediaType = "application/x-protobuf"; FakeRestRequest fakeRestRequest = requestWithContent(mediaType); - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); // register handler that handles custom media type validation restController.registerHandler(new Route(GET, "/foo"), new RestHandler() { @@ -1068,7 +1063,7 @@ public void testBrowserSafelistedContentTypesAreRejected() { final String mediaType = randomFrom(RestController.SAFELISTED_MEDIA_TYPES); FakeRestRequest fakeRestRequest = requestWithContent(mediaType); - final AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); + final AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.NOT_ACCEPTABLE); restController.registerHandler(new Route(GET, "/foo"), new RestHandler() { @Override @@ -1115,7 +1110,7 @@ public void testApiProtectionWithServerlessDisabled() { List accessiblePaths = List.of("/public", "/internal", "/hidden"); accessiblePaths.forEach(path -> { RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath(path).build(); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.OK); restController.dispatchRequest(request, channel, new ThreadContext(Settings.EMPTY)); }); } @@ -1137,12 +1132,12 @@ public void testApiProtectionWithServerlessEnabledAsEndUser() { final Consumer> checkUnprotected = paths -> paths.forEach(path -> { RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath(path).build(); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.OK); restController.dispatchRequest(request, channel, new ThreadContext(Settings.EMPTY)); }); final Consumer> checkProtected = paths -> paths.forEach(path -> { RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath(path).build(); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.GONE); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.GONE); restController.dispatchRequest(request, channel, new ThreadContext(Settings.EMPTY)); RestResponse restResponse = channel.getRestResponse(); diff --git a/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java b/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java index 4345f3c5e3fb4..7fe2388ec5113 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java @@ -97,7 +97,7 @@ public void testUnsupportedMethodResponseHttpHeader() throws Exception { RestRequest restRequest = fakeRestRequestBuilder.build(); // Send the request and verify the response status code - FakeRestChannel restChannel = new FakeRestChannel(restRequest, true, 1); + FakeRestChannel restChannel = new FakeRestChannel(restRequest, randomBoolean(), 1); restController.dispatchRequest(restRequest, restChannel, new ThreadContext(Settings.EMPTY)); assertThat(restChannel.capturedResponse().status().getStatus(), is(405)); diff --git a/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java b/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java index cfed83f352951..b85ad31288c8c 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java @@ -93,7 +93,6 @@ public void testWithHeaders() throws Exception { assertThat(response.getHeaders().get("n1"), contains("v11", "v12")); assertThat(response.getHeaders().get("n2"), notNullValue()); assertThat(response.getHeaders().get("n2"), contains("v21", "v22")); - assertChannelWarnings(channel); } public void testEmptyChunkedBody() { @@ -114,11 +113,11 @@ public void testSimpleExceptionMessage() throws Exception { Exception t = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar")); RestResponse response = new RestResponse(channel, t); String text = response.content().utf8ToString(); - assertThat(text, containsString("ElasticsearchException[an error occurred reading data]")); - assertThat(text, not(containsString("FileNotFoundException"))); + assertThat(text, containsString(""" + {"type":"exception","reason":"an error occurred reading data"}""")); + assertThat(text, not(containsString("file_not_found_exception"))); assertThat(text, not(containsString("/foo/bar"))); assertThat(text, not(containsString("error_trace"))); - assertChannelWarnings(channel); } public void testDetailedExceptionMessage() throws Exception { @@ -134,20 +133,6 @@ public void testDetailedExceptionMessage() throws Exception { {"type":"file_not_found_exception","reason":"/foo/bar"}""")); } - public void testNonElasticsearchExceptionIsNotShownAsSimpleMessage() throws Exception { - RestRequest request = new FakeRestRequest(); - RestChannel channel = new SimpleExceptionRestChannel(request); - - Exception t = new UnknownException("an error occurred reading data", new FileNotFoundException("/foo/bar")); - RestResponse response = new RestResponse(channel, t); - String text = response.content().utf8ToString(); - assertThat(text, not(containsString("UnknownException[an error occurred reading data]"))); - assertThat(text, not(containsString("FileNotFoundException[/foo/bar]"))); - assertThat(text, not(containsString("error_trace"))); - assertThat(text, containsString("\"error\":\"No ElasticsearchException found\"")); - assertChannelWarnings(channel); - } - public void testErrorTrace() throws Exception { RestRequest request = new FakeRestRequest(); request.params().put("error_trace", "true"); @@ -177,7 +162,6 @@ public void testAuthenticationFailedNoStackTrace() throws IOException { RestResponse response = new RestResponse(channel, authnException); assertThat(response.status(), is(RestStatus.UNAUTHORIZED)); assertThat(response.content().utf8ToString(), not(containsString(ElasticsearchException.STACK_TRACE))); - assertChannelWarnings(channel); } } } @@ -202,7 +186,6 @@ public void testStackTrace() throws IOException { } else { assertThat(response.content().utf8ToString(), not(containsString(ElasticsearchException.STACK_TRACE))); } - assertChannelWarnings(channel); } } } @@ -232,9 +215,9 @@ public void testNullThrowable() throws Exception { RestResponse response = new RestResponse(channel, null); String text = response.content().utf8ToString(); - assertThat(text, containsString("\"error\":\"unknown\"")); + assertThat(text, containsString("\"type\":\"unknown\"")); + assertThat(text, containsString("\"reason\":\"unknown\"")); assertThat(text, not(containsString("error_trace"))); - assertChannelWarnings(channel); } public void testConvert() throws IOException { @@ -324,32 +307,26 @@ public void testErrorToAndFromXContent() throws IOException { original = new ElasticsearchException("ElasticsearchException without cause"); if (detailed) { addHeadersOrMetadata = randomBoolean(); - reason = "ElasticsearchException without cause"; - } else { - reason = "ElasticsearchException[ElasticsearchException without cause]"; } + reason = "ElasticsearchException without cause"; } case 1 -> { original = new ElasticsearchException("ElasticsearchException with a cause", new FileNotFoundException("missing")); if (detailed) { addHeadersOrMetadata = randomBoolean(); - type = "exception"; - reason = "ElasticsearchException with a cause"; cause = new ElasticsearchException("Elasticsearch exception [type=file_not_found_exception, reason=missing]"); - } else { - reason = "ElasticsearchException[ElasticsearchException with a cause]"; } + type = "exception"; + reason = "ElasticsearchException with a cause"; } case 2 -> { original = new ResourceNotFoundException("ElasticsearchException with custom status"); status = RestStatus.NOT_FOUND; if (detailed) { addHeadersOrMetadata = randomBoolean(); - type = "resource_not_found_exception"; - reason = "ElasticsearchException with custom status"; - } else { - reason = "ResourceNotFoundException[ElasticsearchException with custom status]"; } + type = "resource_not_found_exception"; + reason = "ElasticsearchException with custom status"; } case 3 -> { TransportAddress address = buildNewFakeTransportAddress(); @@ -360,12 +337,8 @@ public void testErrorToAndFromXContent() throws IOException { new ResourceAlreadyExistsException("ElasticsearchWrapperException with a cause that has a custom status") ); status = RestStatus.BAD_REQUEST; - if (detailed) { - type = "resource_already_exists_exception"; - reason = "ElasticsearchWrapperException with a cause that has a custom status"; - } else { - reason = "RemoteTransportException[[remote][" + address.toString() + "][action]]"; - } + type = "resource_already_exists_exception"; + reason = "ElasticsearchWrapperException with a cause that has a custom status"; } case 4 -> { original = new RemoteTransportException( @@ -373,23 +346,17 @@ public void testErrorToAndFromXContent() throws IOException { new IllegalArgumentException("wrong") ); status = RestStatus.BAD_REQUEST; - if (detailed) { - type = "illegal_argument_exception"; - reason = "wrong"; - } else { - reason = "RemoteTransportException[[ElasticsearchWrapperException with a cause that has a special treatment]]"; - } + type = "illegal_argument_exception"; + reason = "wrong"; } case 5 -> { status = randomFrom(RestStatus.values()); original = new ElasticsearchStatusException("ElasticsearchStatusException with random status", status); if (detailed) { addHeadersOrMetadata = randomBoolean(); - type = "status_exception"; - reason = "ElasticsearchStatusException with random status"; - } else { - reason = "ElasticsearchStatusException[ElasticsearchStatusException with random status]"; } + type = "status_exception"; + reason = "ElasticsearchStatusException with random status"; } default -> throw new UnsupportedOperationException("Failed to generate random exception"); } @@ -435,7 +402,6 @@ public void testErrorToAndFromXContent() throws IOException { assertEquals(expected.status(), parsedError.status()); assertDeepEquals(expected, parsedError); - assertChannelWarnings(channel); } public void testNoErrorFromXContent() throws IOException { @@ -502,7 +468,9 @@ public void testResponseContentTypeUponException() throws Exception { Exception t = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar")); RestResponse response = new RestResponse(channel, t); assertThat(response.contentType(), equalTo(mediaType)); - assertChannelWarnings(channel); + assertWarnings( + "The JSON format of non-detailed errors has changed in Elasticsearch 9.0 to match the JSON structure used for detailed errors." + ); } public void testSupressedLogging() throws IOException { @@ -534,7 +502,6 @@ public void testSupressedLogging() throws IOException { "401", "unauthorized" ); - assertChannelWarnings(channel); } private void assertLogging( @@ -560,15 +527,6 @@ private void assertLogging( } } - private void assertChannelWarnings(RestChannel channel) { - if (channel.detailedErrorsEnabled() == false) { - assertWarnings( - "The JSON format of non-detailed errors will change in Elasticsearch 9.0" - + " to match the JSON structure used for detailed errors. To keep using the existing format, use the V8 REST API." - ); - } - } - public static class WithHeadersException extends ElasticsearchException { WithHeadersException() { diff --git a/server/src/test/java/org/elasticsearch/rest/action/RestBuilderListenerTests.java b/server/src/test/java/org/elasticsearch/rest/action/RestBuilderListenerTests.java index 03ae366050646..827a07b89b2b8 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/RestBuilderListenerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/RestBuilderListenerTests.java @@ -26,7 +26,7 @@ public class RestBuilderListenerTests extends ESTestCase { public void testXContentBuilderClosedInBuildResponse() throws Exception { AtomicReference builderAtomicReference = new AtomicReference<>(); RestBuilderListener builderListener = new RestBuilderListener( - new FakeRestChannel(new FakeRestRequest(), true, 1) + new FakeRestChannel(new FakeRestRequest(), randomBoolean(), 1) ) { @Override public RestResponse buildResponse(Empty empty, XContentBuilder builder) throws Exception { @@ -44,7 +44,7 @@ public RestResponse buildResponse(Empty empty, XContentBuilder builder) throws E public void testXContentBuilderNotClosedInBuildResponseAssertionsDisabled() throws Exception { AtomicReference builderAtomicReference = new AtomicReference<>(); RestBuilderListener builderListener = new RestBuilderListener( - new FakeRestChannel(new FakeRestRequest(), true, 1) + new FakeRestChannel(new FakeRestRequest(), randomBoolean(), 1) ) { @Override public RestResponse buildResponse(Empty empty, XContentBuilder builder) throws Exception { @@ -68,7 +68,7 @@ public void testXContentBuilderNotClosedInBuildResponseAssertionsEnabled() throw assumeTrue("tests are not being run with assertions", RestBuilderListener.class.desiredAssertionStatus()); RestBuilderListener builderListener = new RestBuilderListener( - new FakeRestChannel(new FakeRestRequest(), true, 1) + new FakeRestChannel(new FakeRestRequest(), randomBoolean(), 1) ) { @Override public RestResponse buildResponse(Empty empty, XContentBuilder builder) throws Exception { diff --git a/server/src/test/java/org/elasticsearch/rest/action/cat/RestTasksActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/cat/RestTasksActionTests.java index 8104ecfc31c3d..dad6885a08fa8 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/cat/RestTasksActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/cat/RestTasksActionTests.java @@ -34,7 +34,7 @@ public void testConsumesParameters() throws Exception { FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams( Map.of("parent_task_id", "the node:3", "nodes", "node1,node2", "actions", "*") ).build(); - FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, true, 1); + FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, randomBoolean(), 1); try (var threadPool = createThreadPool()) { final var nodeClient = buildNodeClient(threadPool); action.handleRequest(fakeRestRequest, fakeRestChannel, nodeClient); diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java index 0d35e4311032d..f83ba1704f954 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java @@ -222,7 +222,7 @@ public void next() { }) .withHeaders(Map.of("Content-Type", Collections.singletonList("application/json"))) .build(); - FakeRestChannel channel = new FakeRestChannel(request, true, 1); + FakeRestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); RestBulkAction.ChunkHandler chunkHandler = new RestBulkAction.ChunkHandler( true, diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java index 4822b1c64cf41..d6953e79a0c3f 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java @@ -51,7 +51,7 @@ public void testEnableFieldsEmulationNoErrors() throws Exception { .withParams(params) .build(); - action.handleRequest(request, new FakeRestChannel(request, true, 1), verifyingClient); + action.handleRequest(request, new FakeRestChannel(request, randomBoolean(), 1), verifyingClient); } public void testValidateSearchRequest() { diff --git a/server/src/test/java/org/elasticsearch/script/MultiVectorScoreScriptUtilsTests.java b/server/src/test/java/org/elasticsearch/script/MultiVectorScoreScriptUtilsTests.java new file mode 100644 index 0000000000000..c4a1699181efc --- /dev/null +++ b/server/src/test/java/org/elasticsearch/script/MultiVectorScoreScriptUtilsTests.java @@ -0,0 +1,342 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script; + +import org.apache.lucene.util.VectorUtil; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; +import org.elasticsearch.index.mapper.vectors.MultiDenseVectorFieldMapper; +import org.elasticsearch.index.mapper.vectors.MultiDenseVectorScriptDocValuesTests; +import org.elasticsearch.script.MultiVectorScoreScriptUtils.MaxSimDotProduct; +import org.elasticsearch.script.MultiVectorScoreScriptUtils.MaxSimInvHamming; +import org.elasticsearch.script.field.vectors.BitMultiDenseVectorDocValuesField; +import org.elasticsearch.script.field.vectors.ByteMultiDenseVectorDocValuesField; +import org.elasticsearch.script.field.vectors.FloatMultiDenseVectorDocValuesField; +import org.elasticsearch.script.field.vectors.MultiDenseVectorDocValuesField; +import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HexFormat; +import java.util.List; + +import static org.hamcrest.Matchers.containsString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class MultiVectorScoreScriptUtilsTests extends ESTestCase { + + @BeforeClass + public static void setup() { + assumeTrue("Requires multi-dense vector support", MultiDenseVectorFieldMapper.FEATURE_FLAG.isEnabled()); + } + + public void testFloatMultiVectorClassBindings() throws IOException { + String fieldName = "vector"; + int dims = 5; + float[][][] docVectors = new float[][][] { + { { 230.0f, 300.33f, -34.8988f, 15.555f, -200.0f }, { 100.0f, 200.0f, -50.0f, 10.0f, -150.0f } } }; + float[][] docMagnitudes = new float[][] { { 0.0f, 0.0f } }; + for (int i = 0; i < docVectors.length; i++) { + for (int j = 0; j < docVectors[i].length; j++) { + docMagnitudes[i][j] = (float) Math.sqrt(VectorUtil.dotProduct(docVectors[i][j], docVectors[i][j])); + } + } + + List> queryVector = List.of(Arrays.asList(0.5f, 111.3f, -13.0f, 14.8f, -156.0f)); + List> invalidQueryVector = List.of(Arrays.asList(0.5, 111.3)); + + List fields = List.of( + new FloatMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(docVectors, ElementType.FLOAT), + MultiDenseVectorScriptDocValuesTests.wrap(docMagnitudes), + "test", + ElementType.FLOAT, + dims + ), + new FloatMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(docVectors, ElementType.FLOAT), + MultiDenseVectorScriptDocValuesTests.wrap(docMagnitudes), + "test", + ElementType.FLOAT, + dims + ) + ); + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field("vector")).thenAnswer(mock -> field); + + // Test max similarity dot product + MaxSimDotProduct maxSimDotProduct = new MaxSimDotProduct(scoreScript, queryVector, fieldName); + float maxSimDotProductExpected = 65425.625f; // Adjust this value based on expected max similarity + assertEquals( + "maxSimDotProduct result is not equal to the expected value!", + maxSimDotProductExpected, + maxSimDotProduct.maxSimDotProduct(), + 0.001 + ); + + // Check each function rejects query vectors with the wrong dimension + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new MultiVectorScoreScriptUtils.MaxSimDotProduct(scoreScript, invalidQueryVector, fieldName) + ); + assertThat( + e.getMessage(), + containsString("query vector has a different number of dimensions [2] than the document vectors [5]") + ); + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimInvHamming(scoreScript, invalidQueryVector, fieldName)); + assertThat(e.getMessage(), containsString("hamming distance is only supported for byte or bit vectors")); + + // Check scripting infrastructure integration + assertEquals(65425.6249, new MaxSimDotProduct(scoreScript, queryVector, fieldName).maxSimDotProduct(), 0.001); + when(scoreScript._getDocId()).thenReturn(1); + e = expectThrows( + IllegalArgumentException.class, + () -> new MaxSimDotProduct(scoreScript, queryVector, fieldName).maxSimDotProduct() + ); + assertEquals("A document doesn't have a value for a multi-vector field!", e.getMessage()); + } + } + + public void testByteMultiVectorClassBindings() throws IOException { + String fieldName = "vector"; + int dims = 5; + float[][] docVector = new float[][] { { 1, 127, -128, 5, -10 } }; + float[][] magnitudes = new float[][] { { 0 } }; + for (int i = 0; i < docVector.length; i++) { + magnitudes[i][0] = (float) Math.sqrt(VectorUtil.dotProduct(docVector[i], docVector[i])); + } + List> queryVector = List.of(Arrays.asList((byte) 1, (byte) 125, (byte) -12, (byte) 2, (byte) 4)); + List> invalidQueryVector = List.of(Arrays.asList((byte) 1, (byte) 1)); + List hexidecimalString = List.of(HexFormat.of().formatHex(new byte[] { 1, 125, -12, 2, 4 })); + + List fields = List.of( + new ByteMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { docVector }, ElementType.BYTE), + MultiDenseVectorScriptDocValuesTests.wrap(magnitudes), + "test", + ElementType.BYTE, + dims + ) + ); + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field(fieldName)).thenAnswer(mock -> field); + + // Check each function rejects query vectors with the wrong dimension + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new MaxSimDotProduct(scoreScript, invalidQueryVector, fieldName) + ); + assertThat( + e.getMessage(), + containsString("query vector has a different number of dimensions [2] than the document vectors [5]") + ); + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimInvHamming(scoreScript, invalidQueryVector, fieldName)); + assertThat( + e.getMessage(), + containsString("query vector has a different number of dimensions [2] than the document vectors [5]") + ); + + // Check scripting infrastructure integration + assertEquals(17382.0, new MaxSimDotProduct(scoreScript, queryVector, fieldName).maxSimDotProduct(), 0.001); + assertEquals(17382.0, new MaxSimDotProduct(scoreScript, hexidecimalString, fieldName).maxSimDotProduct(), 0.001); + assertEquals(0.675, new MaxSimInvHamming(scoreScript, queryVector, fieldName).maxSimInvHamming(), 0.001); + assertEquals(0.675, new MaxSimInvHamming(scoreScript, hexidecimalString, fieldName).maxSimInvHamming(), 0.001); + MaxSimDotProduct maxSimDotProduct = new MaxSimDotProduct(scoreScript, queryVector, fieldName); + when(scoreScript._getDocId()).thenReturn(1); + e = expectThrows(IllegalArgumentException.class, maxSimDotProduct::maxSimDotProduct); + assertEquals("A document doesn't have a value for a multi-vector field!", e.getMessage()); + } + } + + public void testBitMultiVectorClassBindingsDotProduct() throws IOException { + String fieldName = "vector"; + int dims = 8; + float[][] docVector = new float[][] { { 124 } }; + // 124 in binary is b01111100 + List> queryVector = List.of( + Arrays.asList((byte) 1, (byte) 125, (byte) -12, (byte) 2, (byte) 4, (byte) 1, (byte) 125, (byte) -12) + ); + List> floatQueryVector = List.of(Arrays.asList(1.4f, -1.4f, 0.42f, 0.0f, 1f, -1f, -0.42f, 1.2f)); + List> invalidQueryVector = List.of(Arrays.asList((byte) 1, (byte) 1)); + List hexidecimalString = List.of(HexFormat.of().formatHex(new byte[] { 124 })); + + List fields = List.of( + new BitMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { docVector }, ElementType.BIT), + MultiDenseVectorScriptDocValuesTests.wrap(new float[][] { { 5 } }), + "test", + ElementType.BIT, + dims + ) + ); + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field(fieldName)).thenAnswer(mock -> field); + + MaxSimDotProduct function = new MaxSimDotProduct(scoreScript, queryVector, fieldName); + assertEquals( + "maxSimDotProduct result is not equal to the expected value!", + -12 + 2 + 4 + 1 + 125, + function.maxSimDotProduct(), + 0.001 + ); + + function = new MaxSimDotProduct(scoreScript, floatQueryVector, fieldName); + assertEquals( + "maxSimDotProduct result is not equal to the expected value!", + 0.42f + 0f + 1f - 1f - 0.42f, + function.maxSimDotProduct(), + 0.001 + ); + + function = new MaxSimDotProduct(scoreScript, hexidecimalString, fieldName); + assertEquals( + "maxSimDotProduct result is not equal to the expected value!", + Integer.bitCount(124), + function.maxSimDotProduct(), + 0.0 + ); + + // Check each function rejects query vectors with the wrong dimension + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new MaxSimDotProduct(scoreScript, invalidQueryVector, fieldName) + ); + assertThat( + e.getMessage(), + containsString( + "query vector contains inner vectors which have incorrect number of dimensions. " + + "Must be [1] for bitwise operations, or [8] for byte wise operations: provided [2]." + ) + ); + } + } + + public void testByteVsFloatSimilarity() throws IOException { + int dims = 5; + float[][] docVector = new float[][] { { 1f, 127f, -128f, 5f, -10f } }; + float[][] magnitudes = new float[][] { { 0 } }; + for (int i = 0; i < docVector.length; i++) { + magnitudes[i][0] = (float) Math.sqrt(VectorUtil.dotProduct(docVector[i], docVector[i])); + } + List> listFloatVector = List.of(Arrays.asList(1f, 125f, -12f, 2f, 4f)); + List> listByteVector = List.of(Arrays.asList((byte) 1, (byte) 125, (byte) -12, (byte) 2, (byte) 4)); + float[][] floatVector = new float[][] { { 1f, 125f, -12f, 2f, 4f } }; + byte[][] byteVector = new byte[][] { { (byte) 1, (byte) 125, (byte) -12, (byte) 2, (byte) 4 } }; + + List fields = List.of( + new FloatMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { docVector }, ElementType.FLOAT), + MultiDenseVectorScriptDocValuesTests.wrap(magnitudes), + "field1", + ElementType.FLOAT, + dims + ), + new ByteMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { docVector }, ElementType.BYTE), + MultiDenseVectorScriptDocValuesTests.wrap(magnitudes), + "field3", + ElementType.BYTE, + dims + ) + ); + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field("vector")).thenAnswer(mock -> field); + + int dotProductExpected = 17382; + MaxSimDotProduct maxSimDotProduct = new MaxSimDotProduct(scoreScript, listFloatVector, "vector"); + assertEquals(field.getName(), dotProductExpected, maxSimDotProduct.maxSimDotProduct(), 0.001); + maxSimDotProduct = new MaxSimDotProduct(scoreScript, listByteVector, "vector"); + assertEquals(field.getName(), dotProductExpected, maxSimDotProduct.maxSimDotProduct(), 0.001); + switch (field.getElementType()) { + case BYTE -> { + assertEquals(field.getName(), dotProductExpected, field.get().maxSimDotProduct(byteVector), 0.001); + UnsupportedOperationException e = expectThrows( + UnsupportedOperationException.class, + () -> field.get().maxSimDotProduct(floatVector) + ); + assertThat(e.getMessage(), containsString("use [float maxSimDotProduct(byte[][] queryVector)] instead")); + } + case FLOAT -> { + assertEquals(field.getName(), dotProductExpected, field.get().maxSimDotProduct(floatVector), 0.001); + UnsupportedOperationException e = expectThrows( + UnsupportedOperationException.class, + () -> field.get().maxSimDotProduct(byteVector) + ); + assertThat(e.getMessage(), containsString("use [float maxSimDotProduct(float[][] queryVector)] instead")); + } + } + } + } + + public void testByteBoundaries() throws IOException { + String fieldName = "vector"; + int dims = 1; + float[] docVector = new float[] { 0 }; + List> greaterThanVector = List.of(List.of(128)); + List> lessThanVector = List.of(List.of(-129)); + List> decimalVector = List.of(List.of(0.5)); + + List fields = List.of( + new ByteMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { { docVector } }, ElementType.BYTE), + MultiDenseVectorScriptDocValuesTests.wrap(new float[][] { { 1 } }), + "test", + ElementType.BYTE, + dims + ) + ); + + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field(fieldName)).thenAnswer(mock -> field); + + IllegalArgumentException e; + + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimDotProduct(scoreScript, greaterThanVector, fieldName)); + assertEquals( + "element_type [byte] vectors only support integers between [-128, 127] but found [128.0] at dim [0]; " + + "Preview of invalid vector: [128.0]", + e.getMessage() + ); + + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimDotProduct(scoreScript, lessThanVector, fieldName)); + assertEquals( + e.getMessage(), + "element_type [byte] vectors only support integers between [-128, 127] but found [-129.0] at dim [0]; " + + "Preview of invalid vector: [-129.0]" + ); + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimDotProduct(scoreScript, decimalVector, fieldName)); + assertEquals( + e.getMessage(), + "element_type [byte] vectors only support non-decimal values but found decimal value [0.5] at dim [0]; " + + "Preview of invalid vector: [0.5]" + ); + } + } + + public void testDimMismatch() throws IOException { + + } +} diff --git a/server/src/test/java/org/elasticsearch/script/field/vectors/MultiDenseVectorTests.java b/server/src/test/java/org/elasticsearch/script/field/vectors/MultiDenseVectorTests.java new file mode 100644 index 0000000000000..12f4b931b4d0a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/script/field/vectors/MultiDenseVectorTests.java @@ -0,0 +1,83 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script.field.vectors; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; +import org.elasticsearch.index.mapper.vectors.MultiDenseVectorFieldMapper; +import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.util.function.IntFunction; + +public class MultiDenseVectorTests extends ESTestCase { + + @BeforeClass + public static void setup() { + assumeTrue("Requires multi-dense vector support", MultiDenseVectorFieldMapper.FEATURE_FLAG.isEnabled()); + } + + public void testByteUnsupported() { + int count = randomIntBetween(1, 16); + int dims = randomIntBetween(1, 16); + byte[][] docVector = new byte[count][dims]; + float[][] queryVector = new float[count][dims]; + for (int i = 0; i < docVector.length; i++) { + random().nextBytes(docVector[i]); + for (int j = 0; j < dims; j++) { + queryVector[i][j] = randomFloat(); + } + } + + MultiDenseVector knn = newByteVector(docVector); + UnsupportedOperationException e; + + e = expectThrows(UnsupportedOperationException.class, () -> knn.maxSimDotProduct(queryVector)); + assertEquals(e.getMessage(), "use [float maxSimDotProduct(byte[][] queryVector)] instead"); + } + + public void testFloatUnsupported() { + int count = randomIntBetween(1, 16); + int dims = randomIntBetween(1, 16); + float[][] docVector = new float[count][dims]; + byte[][] queryVector = new byte[count][dims]; + for (int i = 0; i < docVector.length; i++) { + random().nextBytes(queryVector[i]); + for (int j = 0; j < dims; j++) { + docVector[i][j] = randomFloat(); + } + } + + MultiDenseVector knn = newFloatVector(docVector); + + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, () -> knn.maxSimDotProduct(queryVector)); + assertEquals(e.getMessage(), "use [float maxSimDotProduct(float[][] queryVector)] instead"); + } + + static MultiDenseVector newFloatVector(float[][] vector) { + BytesRef magnitudes = magnitudes(vector.length, i -> (float) Math.sqrt(VectorUtil.dotProduct(vector[i], vector[i]))); + return new FloatMultiDenseVector(VectorIterator.from(vector), magnitudes, vector.length, vector[0].length); + } + + static MultiDenseVector newByteVector(byte[][] vector) { + BytesRef magnitudes = magnitudes(vector.length, i -> (float) Math.sqrt(VectorUtil.dotProduct(vector[i], vector[i]))); + return new ByteMultiDenseVector(VectorIterator.from(vector), magnitudes, vector.length, vector[0].length); + } + + static BytesRef magnitudes(int count, IntFunction magnitude) { + ByteBuffer magnitudeBuffer = ByteBuffer.allocate(count * Float.BYTES).order(ByteOrder.LITTLE_ENDIAN); + for (int i = 0; i < count; i++) { + magnitudeBuffer.putFloat(magnitude.apply(i)); + } + return new BytesRef(magnitudeBuffer.array()); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AdaptingAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AdaptingAggregatorTests.java index 125b2d20cf9f3..6e9bb596e944b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AdaptingAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AdaptingAggregatorTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.aggregations; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperServiceTestCase; @@ -113,7 +114,7 @@ protected LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCt } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) { return new InternalAggregation[] { null }; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java index 8d3fe0f7f6e79..2d0622dbb6322 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; @@ -47,7 +48,7 @@ protected LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCt } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) { throw new UnsupportedOperationException(); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java index bd423999722f3..c9185fe35e677 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java @@ -137,17 +137,15 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont new InternalFiltersForF2( "f2", List.of( - new InternalFilters.InternalBucket("f2k1", k1k1, InternalAggregations.EMPTY, true, true), - new InternalFilters.InternalBucket("f2k2", k1k2, InternalAggregations.EMPTY, true, true) + new InternalFilters.InternalBucket("f2k1", k1k1, InternalAggregations.EMPTY), + new InternalFilters.InternalBucket("f2k2", k1k2, InternalAggregations.EMPTY) ), true, true, null ) ) - ), - true, - true + ) ), new InternalFilters.InternalBucket( "f1k2", @@ -157,17 +155,15 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont new InternalFiltersForF2( "f2", List.of( - new InternalFilters.InternalBucket("f2k1", k2k1, InternalAggregations.EMPTY, true, true), - new InternalFilters.InternalBucket("f2k2", k2k2, InternalAggregations.EMPTY, true, true) + new InternalFilters.InternalBucket("f2k1", k2k1, InternalAggregations.EMPTY), + new InternalFilters.InternalBucket("f2k2", k2k2, InternalAggregations.EMPTY) ), true, true, null ) ) - ), - true, - true + ) ) ), true, @@ -192,17 +188,15 @@ InternalAggregations reduced(int k1, int k2, int k1k1, int k1k2, int k2k1, int k new InternalFilters( "f2", List.of( - new InternalFilters.InternalBucket("f2k1", k1k1, InternalAggregations.EMPTY, true, true), - new InternalFilters.InternalBucket("f2k2", k1k2, InternalAggregations.EMPTY, true, true) + new InternalFilters.InternalBucket("f2k1", k1k1, InternalAggregations.EMPTY), + new InternalFilters.InternalBucket("f2k2", k1k2, InternalAggregations.EMPTY) ), true, true, null ) ) - ), - true, - true + ) ), new InternalFilters.InternalBucket( "f1k2", @@ -212,17 +206,15 @@ InternalAggregations reduced(int k1, int k2, int k1k1, int k1k2, int k2k1, int k new InternalFilters( "f2", List.of( - new InternalFilters.InternalBucket("f2k1", k2k1, InternalAggregations.EMPTY, true, true), - new InternalFilters.InternalBucket("f2k2", k2k2, InternalAggregations.EMPTY, true, true) + new InternalFilters.InternalBucket("f2k1", k2k1, InternalAggregations.EMPTY), + new InternalFilters.InternalBucket("f2k2", k2k2, InternalAggregations.EMPTY) ), true, true, null ) ) - ), - true, - true + ) ) ), true, diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java index 9b6ea7272d0f9..e796cee92c0dc 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java @@ -28,6 +28,8 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.CheckedBiConsumer; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.BucketCollector; @@ -77,7 +79,7 @@ public ScoreMode scoreMode() { collector.preCollection(); indexSearcher.search(termQuery, collector.asCollector()); collector.postCollection(); - collector.prepareSelectedBuckets(0); + collector.prepareSelectedBuckets(BigArrays.NON_RECYCLING_INSTANCE.newLongArray(1, true)); assertEquals(topDocs.scoreDocs.length, deferredCollectedDocIds.size()); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { @@ -91,7 +93,7 @@ public ScoreMode scoreMode() { collector.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), collector.asCollector()); collector.postCollection(); - collector.prepareSelectedBuckets(0); + collector.prepareSelectedBuckets(BigArrays.NON_RECYCLING_INSTANCE.newLongArray(1, true)); assertEquals(topDocs.scoreDocs.length, deferredCollectedDocIds.size()); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { @@ -141,7 +143,7 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } } }, (deferringCollector, finalCollector) -> { - deferringCollector.prepareSelectedBuckets(0, 8, 9); + deferringCollector.prepareSelectedBuckets(toLongArray(0, 8, 9)); equalTo(Map.of(0L, List.of(0, 1, 2, 3, 4, 5, 6, 7), 1L, List.of(8), 2L, List.of(9))); }); @@ -158,7 +160,7 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } } }, (deferringCollector, finalCollector) -> { - deferringCollector.prepareSelectedBuckets(0, 8, 9); + deferringCollector.prepareSelectedBuckets(toLongArray(0, 8, 9)); assertThat(finalCollector.collection, equalTo(Map.of(0L, List.of(4, 5, 6, 7), 1L, List.of(8), 2L, List.of(9)))); }); @@ -176,12 +178,20 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } } }, (deferringCollector, finalCollector) -> { - deferringCollector.prepareSelectedBuckets(0, 8, 9); + deferringCollector.prepareSelectedBuckets(toLongArray(0, 8, 9)); assertThat(finalCollector.collection, equalTo(Map.of(0L, List.of(0, 1, 2, 3), 1L, List.of(8), 2L, List.of(9)))); }); } + private LongArray toLongArray(long... lons) { + LongArray longArray = BigArrays.NON_RECYCLING_INSTANCE.newLongArray(lons.length); + for (int i = 0; i < lons.length; i++) { + longArray.set(i, lons[i]); + } + return longArray; + } + private void testCase( BiFunction leafCollector, CheckedBiConsumer verify diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java index 80f27b31ca65b..fb4c62ad66f19 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -72,7 +73,7 @@ protected LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCt } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) { return new InternalAggregation[0]; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java index c300bfed5f62a..ad2543548dcae 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java @@ -59,10 +59,9 @@ public void setUp() throws Exception { @Override protected InternalFilters createTestInstance(String name, Map metadata, InternalAggregations aggregations) { final List buckets = new ArrayList<>(); - for (int i = 0; i < keys.size(); ++i) { - String key = keys.get(i); + for (String key : keys) { int docCount = randomIntBetween(0, 1000); - buckets.add(new InternalFilters.InternalBucket(key, docCount, aggregations, keyed, keyedBucket)); + buckets.add(new InternalBucket(key, docCount, aggregations)); } return new InternalFilters(name, buckets, keyed, keyedBucket, metadata); } @@ -94,7 +93,7 @@ protected InternalFilters mutateInstance(InternalFilters instance) { case 0 -> name += randomAlphaOfLength(5); case 1 -> { buckets = new ArrayList<>(buckets); - buckets.add(new InternalBucket("test", randomIntBetween(0, 1000), InternalAggregations.EMPTY, keyed, keyedBucket)); + buckets.add(new InternalBucket("test", randomIntBetween(0, 1000), InternalAggregations.EMPTY)); } default -> { if (metadata == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java index 9e6829139d772..5eb1500e37269 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java @@ -106,7 +106,7 @@ private InternalDateHistogram createTestInstance( // rarely leave some holes to be filled up with empty buckets in case minDocCount is set to 0 if (frequently()) { long key = startingDate + intervalMillis * i; - buckets.add(new InternalDateHistogram.Bucket(key, randomIntBetween(1, 100), keyed, format, aggregations)); + buckets.add(new InternalDateHistogram.Bucket(key, randomIntBetween(1, 100), format, aggregations)); } } BucketOrder order = BucketOrder.key(randomBoolean()); @@ -181,13 +181,7 @@ protected InternalDateHistogram mutateInstance(InternalDateHistogram instance) { case 1 -> { buckets = new ArrayList<>(buckets); buckets.add( - new InternalDateHistogram.Bucket( - randomNonNegativeLong(), - randomIntBetween(1, 100), - keyed, - format, - InternalAggregations.EMPTY - ) + new InternalDateHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), format, InternalAggregations.EMPTY) ); } case 2 -> order = BucketOrder.count(randomBoolean()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java index db93bc5dfe179..f97a836712e36 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java @@ -74,7 +74,7 @@ protected InternalHistogram createTestInstance(String name, Map // rarely leave some holes to be filled up with empty buckets in case minDocCount is set to 0 if (frequently()) { final int docCount = TestUtil.nextInt(random(), 1, 50); - buckets.add(new InternalHistogram.Bucket(base + i * interval, docCount, keyed, format, aggregations)); + buckets.add(new InternalHistogram.Bucket(base + i * interval, docCount, format, aggregations)); } } BucketOrder order = BucketOrder.key(randomBoolean()); @@ -96,7 +96,7 @@ public void testHandlesNaN() { newBuckets.addAll(buckets.subList(0, buckets.size() - 1)); } InternalHistogram.Bucket b = buckets.get(buckets.size() - 1); - newBuckets.add(new InternalHistogram.Bucket(Double.NaN, b.docCount, keyed, b.format, b.aggregations)); + newBuckets.add(new InternalHistogram.Bucket(Double.NaN, b.docCount, b.format, b.aggregations)); List reduceMe = List.of(histogram, histogram2); InternalAggregationTestCase.reduce(reduceMe, mockReduceContext(mockBuilder(reduceMe)).forPartialReduction()); @@ -171,13 +171,7 @@ protected InternalHistogram mutateInstance(InternalHistogram instance) { case 1 -> { buckets = new ArrayList<>(buckets); buckets.add( - new InternalHistogram.Bucket( - randomNonNegativeLong(), - randomIntBetween(1, 100), - keyed, - format, - InternalAggregations.EMPTY - ) + new InternalHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), format, InternalAggregations.EMPTY) ); } case 2 -> order = BucketOrder.count(randomBoolean()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java index 5ca78f322491b..dc5b57619676e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java @@ -75,16 +75,7 @@ private InternalIpPrefix createTestInstance( BytesRef key = itr.next(); boolean v6 = InetAddressPoint.decode(key.bytes) instanceof Inet6Address; buckets.add( - new InternalIpPrefix.Bucket( - DocValueFormat.IP, - key, - keyed, - v6, - prefixLength, - appendPrefixLength, - randomLongBetween(0, Long.MAX_VALUE), - aggregations - ) + new InternalIpPrefix.Bucket(key, v6, prefixLength, appendPrefixLength, randomLongBetween(0, Long.MAX_VALUE), aggregations) ); } @@ -126,7 +117,6 @@ protected void assertReduced(InternalIpPrefix reduced, List in Map expectedCounts = new HashMap<>(); for (InternalIpPrefix i : inputs) { for (InternalIpPrefix.Bucket b : i.getBuckets()) { - assertThat(b.getFormat(), equalTo(DocValueFormat.IP)); long acc = expectedCounts.getOrDefault(b.getKey(), 0L); acc += b.getDocCount(); expectedCounts.put(b.getKey(), acc); @@ -146,20 +136,16 @@ protected void assertReduced(InternalIpPrefix reduced, List in public void testPartialReduceNoMinDocCount() { InternalIpPrefix.Bucket b1 = new InternalIpPrefix.Bucket( - DocValueFormat.IP, new BytesRef(InetAddressPoint.encode(InetAddresses.forString("192.168.0.1"))), false, - false, 1, false, 1, InternalAggregations.EMPTY ); InternalIpPrefix.Bucket b2 = new InternalIpPrefix.Bucket( - DocValueFormat.IP, new BytesRef(InetAddressPoint.encode(InetAddresses.forString("200.0.0.1"))), false, - false, 1, false, 2, diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java index b888e61e1bbf9..383065193c4d5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java @@ -72,7 +72,7 @@ protected InternalBinaryRange createTestInstance( for (int i = 0; i < ranges.size(); ++i) { final int docCount = randomIntBetween(1, 100); final String key = (i == nullKey) ? null : randomAlphaOfLength(10); - buckets.add(new InternalBinaryRange.Bucket(format, keyed, key, ranges.get(i).v1(), ranges.get(i).v2(), docCount, aggregations)); + buckets.add(new InternalBinaryRange.Bucket(format, key, ranges.get(i).v1(), ranges.get(i).v2(), docCount, aggregations)); } return new InternalBinaryRange(name, format, keyed, buckets, metadata); } @@ -113,7 +113,6 @@ protected InternalBinaryRange mutateInstance(InternalBinaryRange instance) { buckets.add( new InternalBinaryRange.Bucket( format, - keyed, "range_a", new BytesRef(randomAlphaOfLengthBetween(1, 20)), new BytesRef(randomAlphaOfLengthBetween(1, 20)), diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java index 255ad7c4417b3..fdfffaf8fb8e7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java @@ -81,7 +81,7 @@ protected InternalDateRange createTestInstance( int docCount = randomIntBetween(0, 1000); double from = range.v1(); double to = range.v2(); - buckets.add(new InternalDateRange.Bucket("range_" + i, from, to, docCount, aggregations, keyed, format)); + buckets.add(new InternalDateRange.Bucket("range_" + i, from, to, docCount, aggregations, format)); } return new InternalDateRange(name, buckets, format, keyed, metadata); } @@ -105,9 +105,7 @@ protected InternalDateRange mutateInstance(InternalDateRange instance) { buckets = new ArrayList<>(buckets); double from = randomDouble(); double to = from + randomDouble(); - buckets.add( - new InternalDateRange.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, false, format) - ); + buckets.add(new InternalDateRange.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, format)); } case 3 -> { if (metadata == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java index 49144ec2f40fb..dcb41322a9426 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java @@ -63,7 +63,7 @@ protected InternalGeoDistance createTestInstance( int docCount = randomIntBetween(0, 1000); double from = range.v1(); double to = range.v2(); - buckets.add(new InternalGeoDistance.Bucket("range_" + i, from, to, docCount, aggregations, keyed)); + buckets.add(new InternalGeoDistance.Bucket("range_" + i, from, to, docCount, aggregations)); } return new InternalGeoDistance(name, buckets, keyed, metadata); } @@ -86,9 +86,7 @@ protected InternalGeoDistance mutateInstance(InternalGeoDistance instance) { buckets = new ArrayList<>(buckets); double from = randomDouble(); double to = from + randomDouble(); - buckets.add( - new InternalGeoDistance.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, false) - ); + buckets.add(new InternalGeoDistance.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY)); } case 3 -> { if (metadata == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java index da0fbd94d6ed6..0d957255b6416 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java @@ -76,7 +76,7 @@ public void setUp() throws Exception { int docCount = randomIntBetween(0, 1000); double from = range.v1(); double to = range.v2(); - buckets.add(new InternalRange.Bucket("range_" + i, from, to, docCount, aggregations, keyed, format)); + buckets.add(new InternalRange.Bucket("range_" + i, from, to, docCount, aggregations, format)); } return new InternalRange<>(name, buckets, format, keyed, metadata); } @@ -100,9 +100,7 @@ protected Class interna buckets = new ArrayList<>(buckets); double from = randomDouble(); double to = from + randomDouble(); - buckets.add( - new InternalRange.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, false, format) - ); + buckets.add(new InternalRange.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, format)); } case 3 -> { if (metadata == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java index 2df6a0cfb91ca..a0a24e98ae721 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -68,7 +69,7 @@ public void testReplay() throws Exception { collector.preCollection(); indexSearcher.search(termQuery, collector.asCollector()); collector.postCollection(); - collector.prepareSelectedBuckets(0); + collector.prepareSelectedBuckets(BigArrays.NON_RECYCLING_INSTANCE.newLongArray(1, true)); assertEquals(topDocs.scoreDocs.length, deferredCollectedDocIds.size()); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorTests.java index f75f9f474c8e8..2f51a5a09a8ac 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorTests.java @@ -11,22 +11,29 @@ import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.TermQuery; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.metrics.Avg; import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.aggregations.metrics.Min; +import org.elasticsearch.search.aggregations.metrics.TopHits; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.TypeSafeMatcher; import java.io.IOException; +import java.util.Arrays; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.DoubleStream; @@ -37,6 +44,8 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notANumber; @@ -76,6 +85,35 @@ public void testAggregationSampling() throws IOException { assertThat(avgAvg, closeTo(1.5, 0.5)); } + public void testAggregationSampling_withScores() throws IOException { + long[] counts = new long[5]; + AtomicInteger integer = new AtomicInteger(); + do { + testCase(RandomSamplerAggregatorTests::writeTestDocs, (InternalRandomSampler result) -> { + counts[integer.get()] = result.getDocCount(); + if (result.getDocCount() > 0) { + TopHits agg = result.getAggregations().get("top"); + List hits = Arrays.asList(agg.getHits().getHits()); + assertThat(Strings.toString(result), hits, hasSize(1)); + assertThat(Strings.toString(result), hits.get(0).getScore(), allOf(greaterThan(0.0f), lessThan(1.0f))); + } + }, + new AggTestConfig( + new RandomSamplerAggregationBuilder("my_agg").subAggregation(AggregationBuilders.topHits("top").size(1)) + .setProbability(0.25), + longField(NUMERIC_FIELD_NAME) + ).withQuery( + new BooleanQuery.Builder().add( + new TermQuery(new Term(KEYWORD_FIELD_NAME, KEYWORD_FIELD_VALUE)), + BooleanClause.Occur.SHOULD + ).build() + ) + ); + } while (integer.incrementAndGet() < 5); + long avgCount = LongStream.of(counts).sum() / integer.get(); + assertThat(avgCount, allOf(greaterThanOrEqualTo(20L), lessThanOrEqualTo(70L))); + } + public void testAggregationSamplingNestedAggsScaled() throws IOException { // in case 0 docs get sampled, which can rarely happen // in case the test index has many segments. diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java index 9f667b3efcb61..b2f79c02baf8d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java @@ -81,11 +81,6 @@ public InternalAggregations getAggregations() { return null; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return null; - } - @Override public Object getProperty(String containingAggName, List path) { return new Object[0]; @@ -161,11 +156,6 @@ public InternalAggregations getAggregations() { return null; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return null; - } - @Override public Object getProperty(String containingAggName, List path) { return mock(InternalTDigestPercentiles.class); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java b/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java index c699e117ffbf4..d5e930321db95 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java @@ -77,7 +77,7 @@ protected SearchHit nextDoc(int doc) { } }; - SearchHit[] hits = it.iterate(null, reader, docs); + SearchHit[] hits = it.iterate(null, reader, docs, randomBoolean()); assertThat(hits.length, equalTo(docs.length)); for (int i = 0; i < hits.length; i++) { @@ -125,7 +125,7 @@ protected SearchHit nextDoc(int doc) { } }; - Exception e = expectThrows(FetchPhaseExecutionException.class, () -> it.iterate(null, reader, docs)); + Exception e = expectThrows(FetchPhaseExecutionException.class, () -> it.iterate(null, reader, docs, randomBoolean())); assertThat(e.getMessage(), containsString("Error running fetch phase for doc [" + badDoc + "]")); assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index 3699cdee3912b..d1bbc1ec5910b 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -607,17 +607,6 @@ public void testOrderSerialization() throws Exception { } } - public void testForceSourceDeprecation() throws IOException { - String highlightJson = """ - { "fields" : { }, "force_source" : true } - """; - try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightJson)) { - HighlightBuilder.fromXContent(parser); - } - - assertWarnings("Deprecated field [force_source] used, this field is unused and will be removed entirely"); - } - protected static XContentBuilder toXContent(HighlightBuilder highlight, XContentType contentType) throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(contentType); if (randomBoolean()) { diff --git a/server/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java b/server/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java index 33978b4cd6b9f..d91b4430e4f94 100644 --- a/server/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java +++ b/server/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java @@ -54,7 +54,7 @@ public void clearScroll(ClearScrollRequest request, ActionListenerpath/blob10000false\ """), handleRequest(handler, "GET", "/bucket/?uploads&prefix=path/blob")); - assertEquals(RestStatus.NOT_FOUND, handleRequest(handler, "POST", "/bucket/path/blob?uploadId=" + uploadId, Strings.format(""" + final var completeUploadResponse = handleRequest(handler, "POST", "/bucket/path/blob?uploadId=" + uploadId, Strings.format(""" @@ -272,7 +274,13 @@ public void testListAndAbortMultipartUpload() { %s 2 - """, part1Etag, part2Etag)).status()); + """, part1Etag, part2Etag)); + if (completeUploadResponse.status() == RestStatus.OK) { + // possible, but rare, indicating that S3 started processing the upload before returning an error + assertThat(completeUploadResponse.body().utf8ToString(), allOf(containsString(""), containsString("NoSuchUpload"))); + } else { + assertEquals(RestStatus.NOT_FOUND, completeUploadResponse.status()); + } } private static String getUploadId(BytesReference createUploadResponseBody) { diff --git a/test/framework/build.gradle b/test/framework/build.gradle index c61a3b1851ea9..126b95041da11 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -6,7 +6,6 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams; apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.publish' diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index 5ca52024e82f6..add110de35a0b 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -157,23 +157,14 @@ public static DataStream newInstance( .build(); } - public static String getLegacyDefaultBackingIndexName( - String dataStreamName, - long generation, - long epochMillis, - boolean isNewIndexNameFormat - ) { - if (isNewIndexNameFormat) { - return String.format( - Locale.ROOT, - BACKING_INDEX_PREFIX + "%s-%s-%06d", - dataStreamName, - DATE_FORMATTER.formatMillis(epochMillis), - generation - ); - } else { - return getLegacyDefaultBackingIndexName(dataStreamName, generation); - } + public static String getLegacyDefaultBackingIndexName(String dataStreamName, long generation, long epochMillis) { + return String.format( + Locale.ROOT, + BACKING_INDEX_PREFIX + "%s-%s-%06d", + dataStreamName, + DATE_FORMATTER.formatMillis(epochMillis), + generation + ); } public static String getLegacyDefaultBackingIndexName(String dataStreamName, long generation) { diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java index e538a9955d9b6..e86cb8562537f 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; @@ -142,52 +141,4 @@ public final void testFixedMetaFieldsAreNotConfigurable() throws IOException { ); assertEquals("Failed to parse mapping: " + fieldName() + " is not configurable", exception.getMessage()); } - - public void testTypeAndFriendsAreSilentlyIgnoredBefore_8_6_0() throws IOException { - assumeTrue("Metadata field " + fieldName() + " isn't configurable", isConfigurable()); - IndexVersion previousVersion = IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_6_0); - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.MINIMUM_COMPATIBLE, previousVersion); - assumeTrue("Metadata field " + fieldName() + " is not supported on version " + version, isSupportedOn(version)); - MapperService mapperService = createMapperService(version, mapping(b -> {})); - // these parameters were previously silently ignored, they will still be ignored in existing indices - String[] unsupportedParameters = new String[] { "fields", "copy_to", "boost", "type" }; - for (String param : unsupportedParameters) { - String mappingAsString = "{\n" - + " \"_doc\" : {\n" - + " \"" - + fieldName() - + "\" : {\n" - + " \"" - + param - + "\" : \"any\"\n" - + " }\n" - + " }\n" - + "}"; - assertNotNull(mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString))); - } - } - - public void testTypeAndFriendsAreDeprecatedFrom_8_6_0() throws IOException { - assumeTrue("Metadata field " + fieldName() + " isn't configurable", isConfigurable()); - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_6_0, IndexVersion.current()); - assumeTrue("Metadata field " + fieldName() + " is not supported on version " + version, isSupportedOn(version)); - MapperService mapperService = createMapperService(version, mapping(b -> {})); - // these parameters were previously silently ignored, they are now deprecated in new indices - String[] unsupportedParameters = new String[] { "fields", "copy_to", "boost", "type" }; - for (String param : unsupportedParameters) { - String mappingAsString = "{\n" - + " \"_doc\" : {\n" - + " \"" - + fieldName() - + "\" : {\n" - + " \"" - + param - + "\" : \"any\"\n" - + " }\n" - + " }\n" - + "}"; - assertNotNull(mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString))); - assertWarnings("Parameter [" + param + "] has no effect on metadata field [" + fieldName() + "] and will be removed in future"); - } - } } diff --git a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java index 40fb4f91c77d0..38c7b1eb04772 100644 --- a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java +++ b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java @@ -31,6 +31,7 @@ import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.plugins.MockPluginsService; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.readiness.MockReadinessService; import org.elasticsearch.readiness.ReadinessService; @@ -279,10 +280,11 @@ private MockNode( final Collection> classpathPlugins, final boolean forbidPrivateIndexSettings ) { - super(NodeConstruction.prepareConstruction(environment, new MockServiceProvider() { + super(NodeConstruction.prepareConstruction(environment, null, new MockServiceProvider() { + @Override - PluginsService newPluginService(Environment environment, Settings settings) { - return new MockPluginsService(settings, environment, classpathPlugins); + PluginsService newPluginService(Environment environment, PluginsLoader pluginsLoader) { + return new MockPluginsService(environment.settings(), environment, classpathPlugins); } }, forbidPrivateIndexSettings)); diff --git a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java index e4734f9cf021e..9e96396493bdf 100644 --- a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java +++ b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java @@ -20,7 +20,6 @@ import org.elasticsearch.plugins.spi.SPIClassIterator; import java.lang.reflect.Constructor; -import java.nio.file.Path; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -44,14 +43,18 @@ public class MockPluginsService extends PluginsService { * @param classpathPlugins Plugins that exist in the classpath which should be loaded */ public MockPluginsService(Settings settings, Environment environment, Collection> classpathPlugins) { - super(settings, environment.configFile(), environment.modulesFile(), environment.pluginsFile()); + super(settings, environment.configFile(), new PluginsLoader(environment.modulesFile(), environment.pluginsFile()) { - final Path configPath = environment.configFile(); + @Override + protected void addServerExportsService(Map> qualifiedExports) { + // tests don't run modular + } + }); List pluginsLoaded = new ArrayList<>(); for (Class pluginClass : classpathPlugins) { - Plugin plugin = loadPlugin(pluginClass, settings, configPath); + Plugin plugin = loadPlugin(pluginClass, settings, environment.configFile()); PluginDescriptor pluginInfo = new PluginDescriptor( pluginClass.getName(), "classpath plugin", @@ -69,7 +72,7 @@ public MockPluginsService(Settings settings, Environment environment, Collection if (logger.isTraceEnabled()) { logger.trace("plugin loaded from classpath [{}]", pluginInfo); } - pluginsLoaded.add(new LoadedPlugin(pluginInfo, plugin, pluginClass.getClassLoader(), ModuleLayer.boot())); + pluginsLoaded.add(new LoadedPlugin(pluginInfo, plugin, MockPluginsService.class.getClassLoader())); } loadExtensions(pluginsLoaded); this.classpathPlugins = List.copyOf(pluginsLoaded); @@ -169,9 +172,4 @@ private static List createExtensions( } return extensions; } - - @Override - protected void addServerExportsService(Map> qualifiedExports) { - // tests don't run modular - } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index b50fd4e96044c..51f66418bb44b 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -53,6 +53,8 @@ import org.apache.lucene.util.packed.PackedInts; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.TriConsumer; @@ -143,8 +145,10 @@ import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.subphase.FetchDocValuesPhase; import org.elasticsearch.search.fetch.subphase.FetchSourcePhase; +import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.internal.SubSearchContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; @@ -466,7 +470,18 @@ private SubSearchContext buildSubSearchContext( .when(subContext) .getNestedDocuments(); when(ctx.getSearchExecutionContext()).thenReturn(subContext); - + ShardSearchRequest request = new ShardSearchRequest( + OriginalIndices.NONE, + new SearchRequest().allowPartialSearchResults(randomBoolean()), + new ShardId("index", "indexUUID", 0), + 0, + 1, + AliasFilter.EMPTY, + 1f, + 0L, + null + ); + when(ctx.request()).thenReturn(request); IndexShard indexShard = mock(IndexShard.class); when(indexShard.shardId()).thenReturn(new ShardId("test", "test", 0)); when(indexShard.indexSettings()).thenReturn(indexSettings); diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractMultiClustersTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractMultiClustersTestCase.java index 7b18cf575f190..ea82c9d21ab89 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractMultiClustersTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractMultiClustersTestCase.java @@ -17,6 +17,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Strings; import org.elasticsearch.plugins.Plugin; @@ -44,6 +45,7 @@ import static org.elasticsearch.discovery.DiscoveryModule.DISCOVERY_SEED_PROVIDERS_SETTING; import static org.elasticsearch.discovery.SettingsBasedSeedHostsProvider.DISCOVERY_SEED_HOSTS_SETTING; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.not; @@ -149,19 +151,23 @@ public static void stopClusters() throws IOException { } protected void disconnectFromRemoteClusters() throws Exception { - Settings.Builder settings = Settings.builder(); final Set clusterAliases = clusterGroup.clusterAliases(); for (String clusterAlias : clusterAliases) { if (clusterAlias.equals(LOCAL_CLUSTER) == false) { - settings.putNull("cluster.remote." + clusterAlias + ".seeds"); - settings.putNull("cluster.remote." + clusterAlias + ".mode"); - settings.putNull("cluster.remote." + clusterAlias + ".proxy_address"); + removeRemoteCluster(clusterAlias); } } + } + + protected void removeRemoteCluster(String clusterAlias) throws Exception { + Settings.Builder settings = Settings.builder(); + settings.putNull("cluster.remote." + clusterAlias + ".seeds"); + settings.putNull("cluster.remote." + clusterAlias + ".mode"); + settings.putNull("cluster.remote." + clusterAlias + ".proxy_address"); client().admin().cluster().prepareUpdateSettings(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).setPersistentSettings(settings).get(); assertBusy(() -> { for (TransportService transportService : cluster(LOCAL_CLUSTER).getInstances(TransportService.class)) { - assertThat(transportService.getRemoteClusterService().getRegisteredRemoteClusterNames(), empty()); + assertThat(transportService.getRemoteClusterService().getRegisteredRemoteClusterNames(), not(contains(clusterAlias))); } }); } @@ -178,12 +184,17 @@ protected void configureAndConnectsToRemoteClusters() throws Exception { } protected void configureRemoteCluster(String clusterAlias, Collection seedNodes) throws Exception { - final String remoteClusterSettingPrefix = "cluster.remote." + clusterAlias + "."; - Settings.Builder settings = Settings.builder(); - final List seedAddresses = seedNodes.stream().map(node -> { + final var seedAddresses = seedNodes.stream().map(node -> { final TransportService transportService = cluster(clusterAlias).getInstance(TransportService.class, node); - return transportService.boundAddress().publishAddress().toString(); + return transportService.boundAddress().publishAddress(); }).toList(); + configureRemoteClusterWithSeedAddresses(clusterAlias, seedAddresses); + } + + protected void configureRemoteClusterWithSeedAddresses(String clusterAlias, Collection seedNodes) throws Exception { + final String remoteClusterSettingPrefix = "cluster.remote." + clusterAlias + "."; + Settings.Builder settings = Settings.builder(); + final List seedAddresses = seedNodes.stream().map(TransportAddress::toString).toList(); boolean skipUnavailable = skipUnavailableForRemoteClusters().containsKey(clusterAlias) ? skipUnavailableForRemoteClusters().get(clusterAlias) : DEFAULT_SKIP_UNAVAILABLE; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 207409dfcf751..5b2beaee00bfe 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -1704,7 +1704,23 @@ protected final BytesReference toShuffledXContent( boolean humanReadable, String... exceptFieldNames ) throws IOException { - BytesReference bytes = XContentHelper.toXContent(toXContent, xContentType, params, humanReadable); + return toShuffledXContent(toXContent, xContentType, RestApiVersion.current(), params, humanReadable, exceptFieldNames); + } + + /** + * Returns the bytes that represent the XContent output of the provided {@link ToXContent} object, using the provided + * {@link XContentType}. Wraps the output into a new anonymous object according to the value returned + * by the {@link ToXContent#isFragment()} method returns. Shuffles the keys to make sure that parsing never relies on keys ordering. + */ + protected final BytesReference toShuffledXContent( + ToXContent toXContent, + XContentType xContentType, + RestApiVersion restApiVersion, + ToXContent.Params params, + boolean humanReadable, + String... exceptFieldNames + ) throws IOException { + BytesReference bytes = XContentHelper.toXContent(toXContent, xContentType, restApiVersion, params, humanReadable); try (XContentParser parser = createParser(xContentType.xContent(), bytes)) { try (XContentBuilder builder = shuffleXContent(parser, rarely(), exceptFieldNames)) { return BytesReference.bytes(builder); @@ -2315,10 +2331,18 @@ public static void safeAwait(CyclicBarrier barrier) { * flag and asserting that the latch is indeed completed before the timeout. */ public static void safeAwait(CountDownLatch countDownLatch) { + safeAwait(countDownLatch, SAFE_AWAIT_TIMEOUT); + } + + /** + * Await on the given {@link CountDownLatch} with a supplied timeout, preserving the thread's interrupt status + * flag and asserting that the latch is indeed completed before the timeout. + */ + public static void safeAwait(CountDownLatch countDownLatch, TimeValue timeout) { try { assertTrue( "safeAwait: CountDownLatch did not reach zero within the timeout", - countDownLatch.await(SAFE_AWAIT_TIMEOUT.millis(), TimeUnit.MILLISECONDS) + countDownLatch.await(timeout.millis(), TimeUnit.MILLISECONDS) ); } catch (InterruptedException e) { Thread.currentThread().interrupt(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 28c9905386091..8ca9c0709b359 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -63,12 +63,12 @@ import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.health.node.selection.HealthNode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.rest.RestStatus; @@ -112,6 +112,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -396,30 +397,11 @@ public void initClient() throws IOException { assert nodesVersions != null; } - /** - * Override to provide additional test-only historical features. - * - * Note: This extension point cannot be used to add cluster features. The provided {@link FeatureSpecification}s - * must contain only historical features, otherwise an assertion error is thrown. - */ - protected List additionalTestOnlyHistoricalFeatures() { - return List.of(); - } - protected final TestFeatureService createTestFeatureService( Map> clusterStateFeatures, Set semanticNodeVersions ) { - // Historical features information is unavailable when using legacy test plugins - if (ESRestTestFeatureService.hasFeatureMetadata() == false) { - logger.warn( - "This test is running on the legacy test framework; historical features from production code will not be available. " - + "You need to port the test to the new test plugins in order to use historical features from production code. " - + "If this is a legacy feature used only in tests, you can add it to a test-only FeatureSpecification such as {}.", - RestTestLegacyFeatures.class.getCanonicalName() - ); - } - return new ESRestTestFeatureService(additionalTestOnlyHistoricalFeatures(), semanticNodeVersions, clusterStateFeatures.values()); + return new ESRestTestFeatureService(semanticNodeVersions, clusterStateFeatures.values()); } protected static boolean has(ProductFeature feature) { @@ -719,10 +701,6 @@ protected boolean preserveTemplatesUponCompletion() { * all feature states, deleting system indices, system associated indices, and system data streams. */ protected boolean resetFeatureStates() { - // ML reset fails when ML is disabled in versions before 8.7 - if (isMlEnabled() == false && clusterHasFeature(RestTestLegacyFeatures.ML_STATE_RESET_FALLBACK_ON_DISABLED) == false) { - return false; - } return true; } @@ -917,50 +895,46 @@ private void wipeCluster() throws Exception { * slows down the test because xpack will just recreate * them. */ - // In case of bwc testing, we need to delete component and composable - // index templates only for clusters that support this historical feature - if (clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED)) { - try { - Request getTemplatesRequest = new Request("GET", "_index_template"); - Map composableIndexTemplates = XContentHelper.convertToMap( - JsonXContent.jsonXContent, - EntityUtils.toString(adminClient().performRequest(getTemplatesRequest).getEntity()), - false - ); - List names = ((List) composableIndexTemplates.get("index_templates")).stream() - .map(ct -> (String) ((Map) ct).get("name")) - .filter(name -> isXPackTemplate(name) == false) - .collect(Collectors.toList()); - if (names.isEmpty() == false) { - try { - adminClient().performRequest(new Request("DELETE", "_index_template/" + String.join(",", names))); - } catch (ResponseException e) { - logger.warn(() -> format("unable to remove multiple composable index templates %s", names), e); - } + try { + Request getTemplatesRequest = new Request("GET", "_index_template"); + Map composableIndexTemplates = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + EntityUtils.toString(adminClient().performRequest(getTemplatesRequest).getEntity()), + false + ); + List names = ((List) composableIndexTemplates.get("index_templates")).stream() + .map(ct -> (String) ((Map) ct).get("name")) + .filter(name -> isXPackTemplate(name) == false) + .collect(Collectors.toList()); + if (names.isEmpty() == false) { + try { + adminClient().performRequest(new Request("DELETE", "_index_template/" + String.join(",", names))); + } catch (ResponseException e) { + logger.warn(() -> format("unable to remove multiple composable index templates %s", names), e); } - } catch (Exception e) { - logger.debug("ignoring exception removing all composable index templates", e); - // We hit a version of ES that doesn't support index templates v2 yet, so it's safe to ignore } - try { - Request compReq = new Request("GET", "_component_template"); - String componentTemplates = EntityUtils.toString(adminClient().performRequest(compReq).getEntity()); - Map cTemplates = XContentHelper.convertToMap(JsonXContent.jsonXContent, componentTemplates, false); - List names = ((List) cTemplates.get("component_templates")).stream() - .map(ct -> (String) ((Map) ct).get("name")) - .filter(name -> isXPackTemplate(name) == false) - .collect(Collectors.toList()); - if (names.isEmpty() == false) { - try { - adminClient().performRequest(new Request("DELETE", "_component_template/" + String.join(",", names))); - } catch (ResponseException e) { - logger.warn(() -> format("unable to remove multiple component templates %s", names), e); - } + } catch (Exception e) { + logger.debug("ignoring exception removing all composable index templates", e); + // We hit a version of ES that doesn't support index templates v2 yet, so it's safe to ignore + } + try { + Request compReq = new Request("GET", "_component_template"); + String componentTemplates = EntityUtils.toString(adminClient().performRequest(compReq).getEntity()); + Map cTemplates = XContentHelper.convertToMap(JsonXContent.jsonXContent, componentTemplates, false); + List names = ((List) cTemplates.get("component_templates")).stream() + .map(ct -> (String) ((Map) ct).get("name")) + .filter(name -> isXPackTemplate(name) == false) + .collect(Collectors.toList()); + if (names.isEmpty() == false) { + try { + adminClient().performRequest(new Request("DELETE", "_component_template/" + String.join(",", names))); + } catch (ResponseException e) { + logger.warn(() -> format("unable to remove multiple component templates %s", names), e); } - } catch (Exception e) { - logger.debug("ignoring exception removing all component templates", e); - // We hit a version of ES that doesn't support index templates v2 yet, so it's safe to ignore } + } catch (Exception e) { + logger.debug("ignoring exception removing all component templates", e); + // We hit a version of ES that doesn't support index templates v2 yet, so it's safe to ignore } if (has(ProductFeature.LEGACY_TEMPLATES)) { @@ -1058,29 +1032,25 @@ private Set getAllUnexpectedTemplates() throws IOException { Set unexpectedTemplates = new HashSet<>(); if (preserveDataStreamsUponCompletion() == false && preserveTemplatesUponCompletion() == false) { if (has(ProductFeature.XPACK)) { - // In case of bwc testing, we need to delete component and composable - // index templates only for clusters that support this historical feature - if (clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED)) { - Request getTemplatesRequest = new Request("GET", "_index_template"); - Map composableIndexTemplates = XContentHelper.convertToMap( - JsonXContent.jsonXContent, - EntityUtils.toString(adminClient().performRequest(getTemplatesRequest).getEntity()), - false - ); - unexpectedTemplates.addAll( - ((List) composableIndexTemplates.get("index_templates")).stream() - .map(ct -> (String) ((Map) ct).get("name")) - .filter(name -> isXPackTemplate(name) == false) - .collect(Collectors.toSet()) - ); - Request compReq = new Request("GET", "_component_template"); - String componentTemplates = EntityUtils.toString(adminClient().performRequest(compReq).getEntity()); - Map cTemplates = XContentHelper.convertToMap(JsonXContent.jsonXContent, componentTemplates, false); - ((List) cTemplates.get("component_templates")).stream() + Request getTemplatesRequest = new Request("GET", "_index_template"); + Map composableIndexTemplates = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + EntityUtils.toString(adminClient().performRequest(getTemplatesRequest).getEntity()), + false + ); + unexpectedTemplates.addAll( + ((List) composableIndexTemplates.get("index_templates")).stream() .map(ct -> (String) ((Map) ct).get("name")) .filter(name -> isXPackTemplate(name) == false) - .forEach(unexpectedTemplates::add); - } + .collect(Collectors.toSet()) + ); + Request compReq = new Request("GET", "_component_template"); + String componentTemplates = EntityUtils.toString(adminClient().performRequest(compReq).getEntity()); + Map cTemplates = XContentHelper.convertToMap(JsonXContent.jsonXContent, componentTemplates, false); + ((List) cTemplates.get("component_templates")).stream() + .map(ct -> (String) ((Map) ct).get("name")) + .filter(name -> isXPackTemplate(name) == false) + .forEach(unexpectedTemplates::add); if (has(ProductFeature.LEGACY_TEMPLATES)) { Request getLegacyTemplatesRequest = new Request("GET", "_template"); @@ -1142,6 +1112,7 @@ protected static void wipeAllIndices(boolean preserveSecurityIndices) throws IOE } final Request deleteRequest = new Request("DELETE", Strings.collectionToCommaDelimitedString(indexPatterns)); deleteRequest.addParameter("expand_wildcards", "open,closed,hidden"); + deleteRequest.setOptions(deleteRequest.getOptions().toBuilder().setWarningsHandler(ignoreAsyncSearchWarning()).build()); final Response response = adminClient().performRequest(deleteRequest); try (InputStream is = response.getEntity().getContent()) { assertTrue((boolean) XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true).get("acknowledged")); @@ -1154,6 +1125,30 @@ protected static void wipeAllIndices(boolean preserveSecurityIndices) throws IOE } } + // Make warnings handler that ignores the .async-search warning since .async-search may randomly appear when async requests are slow + // See: https://github.com/elastic/elasticsearch/issues/117099 + protected static WarningsHandler ignoreAsyncSearchWarning() { + return new WarningsHandler() { + @Override + public boolean warningsShouldFailRequest(List warnings) { + if (warnings.isEmpty()) { + return false; + } + return warnings.equals( + List.of( + "this request accesses system indices: [.async-search], " + + "but in a future major version, direct access to system indices will be prevented by default" + ) + ) == false; + } + + @Override + public String toString() { + return "ignore .async-search warning"; + } + }; + } + protected static void wipeDataStreams() throws IOException { try { if (hasXPack()) { @@ -1840,8 +1835,9 @@ public static CreateIndexResponse createIndex(RestClient client, String name, Se if (settings != null && settings.getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) == false) { expectSoftDeletesWarning(request, name); + } else if (isSyntheticSourceConfiguredInMapping(mapping)) { + request.setOptions(expectVersionSpecificWarnings(v -> v.compatible(SourceFieldMapper.DEPRECATION_WARNING))); } - final Response response = client.performRequest(request); try (var parser = responseAsParser(response)) { return TestResponseParsers.parseCreateIndexResponse(parser); @@ -1885,6 +1881,27 @@ protected static void expectSoftDeletesWarning(Request request, String indexName })); } + @SuppressWarnings("unchecked") + protected static boolean isSyntheticSourceConfiguredInMapping(String mapping) { + if (mapping == null) { + return false; + } + var mappings = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + mapping.trim().startsWith("{") ? mapping : '{' + mapping + '}', + false + ); + if (mappings.containsKey("_doc")) { + mappings = (Map) mappings.get("_doc"); + } + Map sourceMapper = (Map) mappings.get(SourceFieldMapper.NAME); + if (sourceMapper == null) { + return false; + } + Object mode = sourceMapper.get("mode"); + return mode != null && mode.toString().toLowerCase(Locale.ROOT).equals("synthetic"); + } + protected static Map getIndexSettings(String index) throws IOException { Request request = new Request("GET", "/" + index + "/_settings"); request.addParameter("flat_settings", "true"); @@ -2282,7 +2299,7 @@ protected static Map> getClusterStateFeatures(RestClient adm */ protected static IndexVersion minimumIndexVersion() throws IOException { final Request request = new Request("GET", "_nodes"); - request.addParameter("filter_path", "nodes.*.version,nodes.*.max_index_version"); + request.addParameter("filter_path", "nodes.*.version,nodes.*.max_index_version,nodes.*.index_version"); final Response response = adminClient().performRequest(request); final Map nodes = ObjectPath.createFromResponse(response).evaluate("nodes"); @@ -2290,10 +2307,13 @@ protected static IndexVersion minimumIndexVersion() throws IOException { IndexVersion minVersion = null; for (Map.Entry node : nodes.entrySet()) { Map nodeData = (Map) node.getValue(); - String versionStr = (String) nodeData.get("max_index_version"); + Object versionStr = nodeData.get("index_version"); + if (versionStr == null) { + versionStr = nodeData.get("max_index_version"); + } // fallback on version if index version is not there IndexVersion indexVersion = versionStr != null - ? IndexVersion.fromId(Integer.parseInt(versionStr)) + ? IndexVersion.fromId(Integer.parseInt(versionStr.toString())) : IndexVersion.fromId( parseLegacyVersion((String) nodeData.get("version")).map(Version::id).orElse(IndexVersions.MINIMUM_COMPATIBLE.id()) ); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java index 66c24f157ddfe..9054dc6f94182 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java @@ -13,9 +13,6 @@ import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.Strings; import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.features.FeatureData; -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.json.JsonXContent; @@ -25,13 +22,9 @@ import java.io.InputStream; import java.io.UncheckedIOException; import java.nio.file.Files; -import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; -import java.util.List; -import java.util.Map; import java.util.Set; import java.util.function.BiConsumer; import java.util.function.Predicate; @@ -48,34 +41,12 @@ class ESRestTestFeatureService implements TestFeatureService { */ private static final Pattern VERSION_FEATURE_PATTERN = Pattern.compile("gte_v(\\d+\\.\\d+\\.\\d+)"); - private final Set knownHistoricalFeatureNames; private final Collection nodeVersions; private final Collection> nodeFeatures; - private final Collection> nodeHistoricalFeatures; - ESRestTestFeatureService(List featureSpecs, Set nodeVersions, Collection> nodeFeatures) { - List specs = new ArrayList<>(featureSpecs); - specs.add(new RestTestLegacyFeatures()); - if (MetadataHolder.HISTORICAL_FEATURES != null) { - specs.add(MetadataHolder.HISTORICAL_FEATURES); - } - FeatureData featureData = FeatureData.createFromSpecifications(specs); - assert featureData.getNodeFeatures().isEmpty() - : Strings.format( - "Only historical features can be injected via ESRestTestCase#additionalTestOnlyHistoricalFeatures(), rejecting %s", - featureData.getNodeFeatures().keySet() - ); - this.knownHistoricalFeatureNames = featureData.getHistoricalFeatures().lastEntry().getValue(); + ESRestTestFeatureService(Set nodeVersions, Collection> nodeFeatures) { this.nodeVersions = nodeVersions; this.nodeFeatures = nodeFeatures; - this.nodeHistoricalFeatures = nodeVersions.stream() - .map(featureData.getHistoricalFeatures()::floorEntry) - .map(Map.Entry::getValue) - .toList(); - } - - public static boolean hasFeatureMetadata() { - return MetadataHolder.HISTORICAL_FEATURES != null; } private static boolean checkCollection(Collection coll, Predicate pred, boolean any) { @@ -84,11 +55,10 @@ private static boolean checkCollection(Collection coll, Predicate pred @Override public boolean clusterHasFeature(String featureId, boolean any) { - if (checkCollection(nodeFeatures, s -> s.contains(featureId), any) - || checkCollection(nodeHistoricalFeatures, s -> s.contains(featureId), any)) { + if (checkCollection(nodeFeatures, s -> s.contains(featureId), any)) { return true; } - if (MetadataHolder.FEATURE_NAMES.contains(featureId) || knownHistoricalFeatureNames.contains(featureId)) { + if (MetadataHolder.FEATURE_NAMES.contains(featureId)) { return false; // feature known but not present } @@ -132,24 +102,20 @@ public boolean clusterHasFeature(String featureId, boolean any) { return false; } + public static boolean hasFeatureMetadata() { + return MetadataHolder.FEATURE_NAMES.isEmpty() == false; + } + private static class MetadataHolder { - private static final FeatureSpecification HISTORICAL_FEATURES; private static final Set FEATURE_NAMES; static { String metadataPath = System.getProperty("tests.features.metadata.path"); if (metadataPath == null) { FEATURE_NAMES = emptySet(); - HISTORICAL_FEATURES = null; } else { Set featureNames = new HashSet<>(); - Map historicalFeatures = new HashMap<>(); loadFeatureMetadata(metadataPath, (key, value) -> { - if (key.equals("historical_features") && value instanceof Map map) { - for (var entry : map.entrySet()) { - historicalFeatures.put(new NodeFeature((String) entry.getKey()), Version.fromString((String) entry.getValue())); - } - } if (key.equals("feature_names") && value instanceof Collection collection) { for (var entry : collection) { featureNames.add((String) entry); @@ -157,13 +123,6 @@ private static class MetadataHolder { } }); FEATURE_NAMES = Collections.unmodifiableSet(featureNames); - Map unmodifiableHistoricalFeatures = Collections.unmodifiableMap(historicalFeatures); - HISTORICAL_FEATURES = new FeatureSpecification() { - @Override - public Map getHistoricalFeatures() { - return unmodifiableHistoricalFeatures; - } - }; } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java index 9ddcf39d24d98..0c466b9162eb8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java @@ -138,11 +138,6 @@ public HttpResponse createResponse(RestStatus status, ChunkedRestResponseBodyPar @Override public void release() {} - @Override - public HttpRequest releaseAndCopy() { - return this; - } - @Override public Exception getInboundException() { return inboundException; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java deleted file mode 100644 index e43aa940a4881..0000000000000 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.test.rest; - -import org.elasticsearch.Version; -import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Map; - -import static java.util.Map.entry; -import static org.elasticsearch.cluster.ClusterState.VERSION_INTRODUCING_TRANSPORT_VERSIONS; - -/** - * This class groups historical features that have been removed from the production codebase, but are still used by the test - * framework to support BwC tests. Rather than leaving them in the main src we group them here, so it's clear they are not used in - * production code anymore. - */ -public class RestTestLegacyFeatures implements FeatureSpecification { - public static final NodeFeature ML_STATE_RESET_FALLBACK_ON_DISABLED = new NodeFeature("ml.state_reset_fallback_on_disabled"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature COMPONENT_TEMPLATE_SUPPORTED = new NodeFeature("indices.component_template_supported"); - public static final NodeFeature ML_NEW_MEMORY_FORMAT = new NodeFeature("ml.new_memory_format"); - - /** These are "pure test" features: normally we would not need them, and test for TransportVersion/fallback to Version (see for example - * {@code ESRestTestCase#minimumTransportVersion()}. However, some tests explicitly check and validate the content of a response, so - * we need these features to support them. - */ - public static final NodeFeature TRANSPORT_VERSION_SUPPORTED = new NodeFeature("transport_version_supported"); - public static final NodeFeature STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION = new NodeFeature( - "state.transport_version_to_nodes_version" - ); - - // Ref: https://github.com/elastic/elasticsearch/pull/86416 - public static final NodeFeature ML_MEMORY_OVERHEAD_FIXED = new NodeFeature("ml.memory_overhead_fixed"); - - // QA - rolling upgrade tests - public static final NodeFeature DESIRED_NODE_API_SUPPORTED = new NodeFeature("desired_node_supported"); - public static final NodeFeature SECURITY_UPDATE_API_KEY = new NodeFeature("security.api_key_update"); - public static final NodeFeature SECURITY_BULK_UPDATE_API_KEY = new NodeFeature("security.api_key_bulk_update"); - - public static final NodeFeature TSDB_NEW_INDEX_FORMAT = new NodeFeature("indices.tsdb_new_format"); - public static final NodeFeature TSDB_GENERALLY_AVAILABLE = new NodeFeature("indices.tsdb_supported"); - - public static final NodeFeature TSDB_DOWNSAMPLING_STABLE = new NodeFeature("indices.tsdb_downsampling_stable"); - - /* - * A composable index template with no template defined in the body is mistakenly always assumed to not be a time series template. - * Fixed in #98840 - */ - public static final NodeFeature TSDB_EMPTY_TEMPLATE_FIXED = new NodeFeature("indices.tsdb_empty_composable_template_fixed"); - public static final NodeFeature SYNTHETIC_SOURCE_SUPPORTED = new NodeFeature("indices.synthetic_source"); - - public static final NodeFeature DESIRED_BALANCED_ALLOCATOR_SUPPORTED = new NodeFeature("allocator.desired_balance"); - - /* - * Cancel shard allocation command is broken for initial desired balance versions - * and might allocate shard on the node where it is not supposed to be. This - * is fixed by https://github.com/elastic/elasticsearch/pull/93635. - */ - public static final NodeFeature DESIRED_BALANCED_ALLOCATOR_FIXED = new NodeFeature("allocator.desired_balance_fixed"); - public static final NodeFeature INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED = new NodeFeature("settings.indexing_slowlog_level_removed"); - public static final NodeFeature DEPRECATION_WARNINGS_LEAK_FIXED = new NodeFeature("deprecation_warnings_leak_fixed"); - - // QA - Full cluster restart - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature REPLICATION_OF_CLOSED_INDICES = new NodeFeature("indices.closed_replication_supported"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature SOFT_DELETES_ENFORCED = new NodeFeature("indices.soft_deletes_enforced"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature NEW_TRANSPORT_COMPRESSED_SETTING = new NodeFeature("transport.new_compressed_setting"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature SERVICE_ACCOUNTS_SUPPORTED = new NodeFeature("auth.service_accounts_supported"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature TRANSFORM_SUPPORTED = new NodeFeature("transform.supported"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature SLM_SUPPORTED = new NodeFeature("slm.supported"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature DATA_STREAMS_SUPPORTED = new NodeFeature("data_stream.supported"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature NEW_DATA_STREAMS_INDEX_NAME_FORMAT = new NodeFeature("data_stream.new_index_name_format"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature DISABLE_FIELD_NAMES_FIELD_REMOVED = new NodeFeature("disable_of_field_names_field_removed"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature ML_NLP_SUPPORTED = new NodeFeature("ml.nlp_supported"); - - // YAML - public static final NodeFeature REST_ELASTIC_PRODUCT_HEADER_PRESENT = new NodeFeature("action.rest.product_header_present"); - - @Override - public Map getHistoricalFeatures() { - return Map.ofEntries( - entry(COMPONENT_TEMPLATE_SUPPORTED, Version.V_7_8_0), - entry(ML_STATE_RESET_FALLBACK_ON_DISABLED, Version.V_8_7_0), - entry(SECURITY_UPDATE_API_KEY, Version.V_8_4_0), - entry(SECURITY_BULK_UPDATE_API_KEY, Version.V_8_5_0), - entry(ML_NEW_MEMORY_FORMAT, Version.V_8_11_0), - entry(TRANSPORT_VERSION_SUPPORTED, VERSION_INTRODUCING_TRANSPORT_VERSIONS), - entry(STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION, Version.V_8_11_0), - entry(ML_MEMORY_OVERHEAD_FIXED, Version.V_8_2_1), - entry(REST_ELASTIC_PRODUCT_HEADER_PRESENT, Version.V_8_0_1), - entry(DESIRED_NODE_API_SUPPORTED, Version.V_8_1_0), - entry(TSDB_NEW_INDEX_FORMAT, Version.V_8_2_0), - entry(SYNTHETIC_SOURCE_SUPPORTED, Version.V_8_4_0), - entry(DESIRED_BALANCED_ALLOCATOR_SUPPORTED, Version.V_8_6_0), - entry(DESIRED_BALANCED_ALLOCATOR_FIXED, Version.V_8_7_1), - entry(TSDB_GENERALLY_AVAILABLE, Version.V_8_7_0), - entry(TSDB_DOWNSAMPLING_STABLE, Version.V_8_10_0), - entry(TSDB_EMPTY_TEMPLATE_FIXED, Version.V_8_11_0), - entry(INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED, Version.V_8_0_0), - entry(DEPRECATION_WARNINGS_LEAK_FIXED, Version.V_7_17_9), - entry(REPLICATION_OF_CLOSED_INDICES, Version.V_7_2_0), - entry(SOFT_DELETES_ENFORCED, Version.V_8_0_0), - entry(NEW_TRANSPORT_COMPRESSED_SETTING, Version.V_7_14_0), - entry(SERVICE_ACCOUNTS_SUPPORTED, Version.V_7_13_0), - entry(TRANSFORM_SUPPORTED, Version.V_7_2_0), - entry(SLM_SUPPORTED, Version.V_7_4_0), - entry(DATA_STREAMS_SUPPORTED, Version.V_7_9_0), - entry(NEW_DATA_STREAMS_INDEX_NAME_FORMAT, Version.V_7_11_0), - entry(DISABLE_FIELD_NAMES_FIELD_REMOVED, Version.V_8_0_0), - entry(ML_NLP_SUPPORTED, Version.V_8_0_0) - ); - } -} diff --git a/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json b/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json index e69c2db6ff400..58e0ea5fd9fc5 100644 --- a/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json +++ b/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json @@ -21,7 +21,52 @@ "dict_dec":{ "type":"dictionary_decompounder", "word_list":["donau", "dampf", "schiff", "spargel", "creme", "suppe"] - } + }, + "hyphenation_dec_only_longest_match": { + "type": "hyphenation_decompounder", + "hyphenation_patterns_path": "de_DR.xml", + "word_list": [ + "fuss", + "fussball", + "ballpumpe", + "ball", + "pumpe", + "kaffee", + "fee", + "maschine" + ], + "only_longest_match": true + }, + "hyphenation_dec_no_sub_matches": { + "type": "hyphenation_decompounder", + "hyphenation_patterns_path": "de_DR.xml", + "word_list": [ + "fuss", + "fussball", + "ballpumpe", + "ball", + "pumpe", + "kaffee", + "fee", + "maschine" + ], + "no_sub_matches": true + }, + "hyphenation_dec_no_overlapping_matches": { + "type": "hyphenation_decompounder", + "hyphenation_patterns_path": "de_DR.xml", + "word_list": [ + "fuss", + "fussball", + "ballpumpe", + "ball", + "pumpe", + "kaffee", + "fee", + "maschine" + ], + "no_overlapping_matches": true + } }, "analyzer":{ "standard":{ @@ -47,6 +92,18 @@ "decompoundingAnalyzer":{ "tokenizer":"standard", "filter":["dict_dec"] + }, + "hyphenationDecompoundingAnalyzerOnlyLongestMatch":{ + "tokenizer":"standard", + "filter":["hyphenation_dec_only_longest_match"] + }, + "hyphenationDecompoundingAnalyzerNoSubMatches": { + "tokenizer":"standard", + "filter":["hyphenation_dec_no_sub_matches"] + }, + "hyphenationDecompoundingAnalyzerNoOverlappingMatches":{ + "tokenizer":"standard", + "filter":["hyphenation_dec_no_overlapping_matches"] } } } diff --git a/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.yml b/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.yml index 82f933296a314..095b27e0fa071 100644 --- a/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.yml +++ b/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.yml @@ -15,6 +15,21 @@ index : dict_dec : type : dictionary_decompounder word_list : [donau, dampf, schiff, spargel, creme, suppe] + hyphenation_dec_only_longest_match : + type : hyphenation_decompounder + hyphenation_patterns_path : de_DR.xml + word_list : [fuss, fussball, ballpumpe, ball, pumpe, kaffee, fee, maschine] + only_longest_match : true + hyphenation_dec_no_sub_matches : + type : hyphenation_decompounder + hyphenation_patterns_path : de_DR.xml + word_list : [fuss, fussball, ballpumpe, ball, pumpe, kaffee, fee, maschine] + no_sub_matches : true + hyphenation_dec_no_overlapping_matches : + type : hyphenation_decompounder + hyphenation_patterns_path : de_DR.xml + word_list : [fuss, fussball, ballpumpe, ball, pumpe, kaffee, fee, maschine] + no_overlapping_matches: true analyzer : standard : type : standard @@ -37,3 +52,13 @@ index : decompoundingAnalyzer : tokenizer : standard filter : [dict_dec] + hyphenationDecompoundingAnalyzerOnlyLongestMatch : + tokenizer : standard + filter : [hyphenation_dec_only_longest_match] + hyphenationDecompoundingAnalyzerNoSubMatches: + tokenizer: standard + filter : [hyphenation_dec_no_sub_matches] + hyphenationDecompoundingAnalyzerNoOverlappingMatches: + tokenizer: standard + filter : [hyphenation_dec_no_overlapping_matches] + diff --git a/test/immutable-collections-patch/build.gradle b/test/immutable-collections-patch/build.gradle index 381c0cd6dd044..85a199af2d477 100644 --- a/test/immutable-collections-patch/build.gradle +++ b/test/immutable-collections-patch/build.gradle @@ -9,7 +9,6 @@ import org.elasticsearch.gradle.OS import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.java' diff --git a/test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractor.java b/test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/ClusterFeaturesMetadataExtractor.java similarity index 69% rename from test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractor.java rename to test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/ClusterFeaturesMetadataExtractor.java index 3ffa27126fac8..3a090a1b3fadc 100644 --- a/test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractor.java +++ b/test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/ClusterFeaturesMetadataExtractor.java @@ -9,9 +9,8 @@ package org.elasticsearch.extractor.features; -import org.elasticsearch.Version; -import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.logging.LogConfigurator; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xcontent.XContentGenerator; @@ -25,14 +24,12 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; -import java.util.HashMap; import java.util.HashSet; -import java.util.Map; import java.util.ServiceLoader; import java.util.Set; import java.util.stream.Stream; -public class HistoricalFeaturesMetadataExtractor { +public class ClusterFeaturesMetadataExtractor { private final ClassLoader classLoader; static { @@ -40,7 +37,7 @@ public class HistoricalFeaturesMetadataExtractor { LogConfigurator.configureESLogging(); } - public HistoricalFeaturesMetadataExtractor(ClassLoader classLoader) { + public ClusterFeaturesMetadataExtractor(ClassLoader classLoader) { this.classLoader = classLoader; } @@ -56,9 +53,7 @@ public static void main(String[] args) { printUsageAndExit(); } - new HistoricalFeaturesMetadataExtractor(HistoricalFeaturesMetadataExtractor.class.getClassLoader()).generateMetadataFile( - outputFile - ); + new ClusterFeaturesMetadataExtractor(ClusterFeaturesMetadataExtractor.class.getClassLoader()).generateMetadataFile(outputFile); } public void generateMetadataFile(Path outputFile) { @@ -67,13 +62,7 @@ public void generateMetadataFile(Path outputFile) { XContentGenerator generator = JsonXContent.jsonXContent.createGenerator(os) ) { generator.writeStartObject(); - extractHistoricalFeatureMetadata((historical, names) -> { - generator.writeFieldName("historical_features"); - generator.writeStartObject(); - for (Map.Entry entry : historical.entrySet()) { - generator.writeStringField(entry.getKey().id(), entry.getValue().toString()); - } - generator.writeEndObject(); + extractClusterFeaturesMetadata(names -> { generator.writeFieldName("feature_names"); generator.writeStartArray(); for (var entry : names) { @@ -87,22 +76,19 @@ public void generateMetadataFile(Path outputFile) { } } - void extractHistoricalFeatureMetadata(CheckedBiConsumer, Set, IOException> metadataConsumer) - throws IOException { - Map historicalFeatures = new HashMap<>(); + void extractClusterFeaturesMetadata(CheckedConsumer, IOException> metadataConsumer) throws IOException { Set featureNames = new HashSet<>(); ServiceLoader featureSpecLoader = ServiceLoader.load(FeatureSpecification.class, classLoader); for (FeatureSpecification featureSpecification : featureSpecLoader) { - historicalFeatures.putAll(featureSpecification.getHistoricalFeatures()); Stream.concat(featureSpecification.getFeatures().stream(), featureSpecification.getTestFeatures().stream()) .map(NodeFeature::id) .forEach(featureNames::add); } - metadataConsumer.accept(historicalFeatures, featureNames); + metadataConsumer.accept(featureNames); } private static void printUsageAndExit() { - System.err.println("Usage: HistoricalFeaturesMetadataExtractor "); + System.err.println("Usage: ClusterFeaturesMetadataExtractor "); System.exit(1); } } diff --git a/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java b/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/ClusterFeaturesMetadataExtractorTests.java similarity index 66% rename from test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java rename to test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/ClusterFeaturesMetadataExtractorTests.java index e230982073699..af69aaff86cc5 100644 --- a/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java +++ b/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/ClusterFeaturesMetadataExtractorTests.java @@ -9,8 +9,6 @@ package org.elasticsearch.extractor.features; -import org.elasticsearch.Version; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.json.JsonXContent; @@ -21,34 +19,26 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Collection; -import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import static org.elasticsearch.xcontent.XContentParserConfiguration.EMPTY; -import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.not; -public class HistoricalFeaturesMetadataExtractorTests extends ESTestCase { +public class ClusterFeaturesMetadataExtractorTests extends ESTestCase { @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); public void testExtractHistoricalMetadata() throws IOException { - HistoricalFeaturesMetadataExtractor extractor = new HistoricalFeaturesMetadataExtractor(this.getClass().getClassLoader()); - Map nodeFeatureVersionMap = new HashMap<>(); + ClusterFeaturesMetadataExtractor extractor = new ClusterFeaturesMetadataExtractor(this.getClass().getClassLoader()); Set featureNamesSet = new HashSet<>(); - extractor.extractHistoricalFeatureMetadata((historical, names) -> { - nodeFeatureVersionMap.putAll(historical); - featureNamesSet.addAll(names); - }); - assertThat(nodeFeatureVersionMap, not(anEmptyMap())); + extractor.extractClusterFeaturesMetadata(featureNamesSet::addAll); assertThat(featureNamesSet, not(empty())); assertThat(featureNamesSet, hasItem("test_features_enabled")); @@ -56,11 +46,7 @@ public void testExtractHistoricalMetadata() throws IOException { extractor.generateMetadataFile(outputFile); try (XContentParser parser = JsonXContent.jsonXContent.createParser(EMPTY, Files.newInputStream(outputFile))) { Map parsedMap = parser.map(); - assertThat(parsedMap, hasKey("historical_features")); assertThat(parsedMap, hasKey("feature_names")); - @SuppressWarnings("unchecked") - Map historicalFeaturesMap = (Map) (parsedMap.get("historical_features")); - nodeFeatureVersionMap.forEach((key, value) -> assertThat(historicalFeaturesMap, hasEntry(key.id(), value.toString()))); @SuppressWarnings("unchecked") Collection featureNamesList = (Collection) (parsedMap.get("feature_names")); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index f996db92e57f4..11787866af0d7 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -18,9 +18,7 @@ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), - SUB_OBJECTS_AUTO_ENABLED("es.sub_objects_auto_feature_flag_enabled=true", Version.fromString("8.16.0"), null), - INFERENCE_DEFAULT_ELSER("es.inference_default_elser_feature_flag_enabled=true", Version.fromString("8.16.0"), null), - ML_SCALE_FROM_ZERO("es.ml_scale_from_zero_feature_flag_enabled=true", Version.fromString("8.16.0"), null); + SUB_OBJECTS_AUTO_ENABLED("es.sub_objects_auto_feature_flag_enabled=true", Version.fromString("8.16.0"), null); public final String systemProperty; public final Version from; diff --git a/test/test-clusters/src/main/resources/fips/fips_java.policy b/test/test-clusters/src/main/resources/fips/fips_java.policy index c259b0bc908d8..781e1247db7a5 100644 --- a/test/test-clusters/src/main/resources/fips/fips_java.policy +++ b/test/test-clusters/src/main/resources/fips/fips_java.policy @@ -5,6 +5,7 @@ grant { permission java.security.SecurityPermission "getProperty.jdk.tls.disabledAlgorithms"; permission java.security.SecurityPermission "getProperty.jdk.certpath.disabledAlgorithms"; permission java.security.SecurityPermission "getProperty.jdk.tls.server.defaultDHEParameters"; + permission java.security.SecurityPermission "getProperty.org.bouncycastle.ec.max_f2m_field_size"; permission java.lang.RuntimePermission "getProtectionDomain"; permission java.util.PropertyPermission "java.runtime.name", "read"; permission org.bouncycastle.crypto.CryptoServicesPermission "tlsAlgorithmsEnabled"; @@ -20,6 +21,6 @@ grant { }; // rely on the caller's socket permissions, the JSSE TLS implementation here is always allowed to connect -grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.17.jar" { +grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.19.jar" { permission java.net.SocketPermission "*", "connect"; }; diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 86c3f42a6a8ec..627554f6b261d 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -20,8 +20,8 @@ import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.core.Tuple; import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponseException; @@ -371,13 +371,7 @@ public void execute(ClientYamlTestExecutionContext executionContext) throws IOEx ? executionContext.getClientYamlTestCandidate().getTestPath() : null; - var fixedProductionHeader = executionContext.clusterHasFeature( - RestTestLegacyFeatures.REST_ELASTIC_PRODUCT_HEADER_PRESENT.id(), - false - ); - if (fixedProductionHeader) { - checkElasticProductHeader(response.getHeaders("X-elastic-product")); - } + checkElasticProductHeader(response.getHeaders("X-elastic-product")); checkWarningHeaders(response.getWarningHeaders(), testPath); } catch (ClientYamlTestResponseException e) { checkResponseException(e, executionContext); @@ -502,6 +496,8 @@ public void checkWarningHeaders(final List warningHeaders, String testPa } } + unexpected.removeIf(s -> s.endsWith(SourceFieldMapper.DEPRECATION_WARNING + "\"")); + if (unexpected.isEmpty() == false || unmatched.isEmpty() == false || missing.isEmpty() == false diff --git a/x-pack/plugin/analytics/build.gradle b/x-pack/plugin/analytics/build.gradle index ddc075cc9adcc..00f28b4badc3d 100644 --- a/x-pack/plugin/analytics/build.gradle +++ b/x-pack/plugin/analytics/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java index 0f732d2017c74..c6bfb5b1b2778 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java @@ -122,17 +122,16 @@ public InternalAggregations getAggregations() { } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public void bucketToXContent(XContentBuilder builder, Params params, boolean showDocCountError) throws IOException { builder.startObject(); builder.field(CommonFields.KEY.getPreferredName(), getKey()); builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString()); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); - if (getShowDocCountError()) { + if (showDocCountError) { builder.field(DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), getDocCountError()); } aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -589,7 +588,7 @@ public List getBuckets() { @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - return doXContentCommon(builder, params, docCountError, otherDocCount, buckets); + return doXContentCommon(builder, params, showTermDocCountError, docCountError, otherDocCount, buckets); } @Override diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java index 85882a5c56851..1691aedf543f4 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java @@ -20,6 +20,8 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.ObjectArrayPriorityQueue; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Releasables; @@ -235,57 +237,62 @@ protected void doClose() { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - InternalMultiTerms.Bucket[][] topBucketsPerOrd = new InternalMultiTerms.Bucket[owningBucketOrds.length][]; - long[] otherDocCounts = new long[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - long bucketsInOrd = bucketOrds.bucketsInOrd(owningBucketOrds[ordIdx]); - - int size = (int) Math.min(bucketsInOrd, bucketCountThresholds.getShardSize()); - try ( - ObjectArrayPriorityQueue ordered = new BucketPriorityQueue<>( - size, - bigArrays(), - partiallyBuiltBucketComparator - ) - ) { - InternalMultiTerms.Bucket spare = null; - BytesRef spareKey = null; - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - otherDocCounts[ordIdx] += docCount; - if (docCount < bucketCountThresholds.getShardMinDocCount()) { - continue; - } - if (spare == null) { - spare = new InternalMultiTerms.Bucket(null, 0, null, showTermDocCountError, 0, formats, keyConverters); - spareKey = new BytesRef(); + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + try ( + LongArray otherDocCounts = bigArrays().newLongArray(owningBucketOrds.size(), true); + ObjectArray topBucketsPerOrd = bigArrays().newObjectArray(owningBucketOrds.size()) + ) { + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + final long owningBucketOrd = owningBucketOrds.get(ordIdx); + long bucketsInOrd = bucketOrds.bucketsInOrd(owningBucketOrd); + + int size = (int) Math.min(bucketsInOrd, bucketCountThresholds.getShardSize()); + try ( + ObjectArrayPriorityQueue ordered = new BucketPriorityQueue<>( + size, + bigArrays(), + partiallyBuiltBucketComparator + ) + ) { + InternalMultiTerms.Bucket spare = null; + BytesRef spareKey = null; + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + otherDocCounts.increment(ordIdx, docCount); + if (docCount < bucketCountThresholds.getShardMinDocCount()) { + continue; + } + if (spare == null) { + checkRealMemoryCBForInternalBucket(); + spare = new InternalMultiTerms.Bucket(null, 0, null, showTermDocCountError, 0, formats, keyConverters); + spareKey = new BytesRef(); + } + ordsEnum.readValue(spareKey); + spare.terms = unpackTerms(spareKey); + spare.docCount = docCount; + spare.bucketOrd = ordsEnum.ord(); + spare = ordered.insertWithOverflow(spare); } - ordsEnum.readValue(spareKey); - spare.terms = unpackTerms(spareKey); - spare.docCount = docCount; - spare.bucketOrd = ordsEnum.ord(); - spare = ordered.insertWithOverflow(spare); - } - // Get the top buckets - InternalMultiTerms.Bucket[] bucketsForOrd = new InternalMultiTerms.Bucket[(int) ordered.size()]; - topBucketsPerOrd[ordIdx] = bucketsForOrd; - for (int b = (int) ordered.size() - 1; b >= 0; --b) { - topBucketsPerOrd[ordIdx][b] = ordered.pop(); - otherDocCounts[ordIdx] -= topBucketsPerOrd[ordIdx][b].getDocCount(); + // Get the top buckets + InternalMultiTerms.Bucket[] bucketsForOrd = new InternalMultiTerms.Bucket[(int) ordered.size()]; + topBucketsPerOrd.set(ordIdx, bucketsForOrd); + for (int b = (int) ordered.size() - 1; b >= 0; --b) { + InternalMultiTerms.Bucket[] buckets = topBucketsPerOrd.get(ordIdx); + buckets[b] = ordered.pop(); + otherDocCounts.increment(ordIdx, -buckets[b].getDocCount()); + } } } - } - buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); + buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - result[ordIdx] = buildResult(otherDocCounts[ordIdx], topBucketsPerOrd[ordIdx]); + return buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildResult(otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)) + ); } - return result; } InternalMultiTerms buildResult(long otherDocCount, InternalMultiTerms.Bucket[] topBuckets) { @@ -305,7 +312,7 @@ InternalMultiTerms buildResult(long otherDocCount, InternalMultiTerms.Bucket[] t bucketCountThresholds.getShardSize(), showTermDocCountError, otherDocCount, - List.of(topBuckets), + Arrays.asList(topBuckets), 0, formats, keyConverters, diff --git a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java index 466c9e4f006dc..228ac401b96bb 100644 --- a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java +++ b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java @@ -10,7 +10,6 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ClientHelper; @@ -33,8 +32,7 @@ public APMIndexTemplateRegistry( ClusterService clusterService, ThreadPool threadPool, Client client, - NamedXContentRegistry xContentRegistry, - FeatureService featureService + NamedXContentRegistry xContentRegistry ) { super( nodeSettings, @@ -42,7 +40,6 @@ public APMIndexTemplateRegistry( threadPool, client, xContentRegistry, - featureService, templateFilter(isDataStreamsLifecycleOnlyMode(clusterService.getSettings())) ); } diff --git a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java index aefb45f6186c1..0be95c337838a 100644 --- a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java +++ b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java @@ -48,14 +48,7 @@ public Collection createComponents(PluginServices services) { Settings settings = services.environment().settings(); ClusterService clusterService = services.clusterService(); registry.set( - new APMIndexTemplateRegistry( - settings, - clusterService, - services.threadPool(), - services.client(), - services.xContentRegistry(), - services.featureService() - ) + new APMIndexTemplateRegistry(settings, clusterService, services.threadPool(), services.client(), services.xContentRegistry()) ); if (enabled) { APMIndexTemplateRegistry registryInstance = registry.get(); diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml index ac6462c86676c..a5a3a7433f4c1 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml @@ -4,6 +4,7 @@ _meta: managed: true template: mappings: + date_detection: false dynamic: true dynamic_templates: - numeric_labels: diff --git a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMDSLOnlyTests.java b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMDSLOnlyTests.java index 476b504339e62..b18e95b55dde0 100644 --- a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMDSLOnlyTests.java +++ b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMDSLOnlyTests.java @@ -14,8 +14,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.datastreams.DataStreamFeatures; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -24,7 +22,6 @@ import org.junit.After; import org.junit.Before; -import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; @@ -56,15 +53,13 @@ public void createRegistryAndClient() { additionalSettings, clusterSettings ); - FeatureService featureService = new FeatureService(List.of(new DataStreamFeatures())); apmIndexTemplateRegistry = new APMIndexTemplateRegistry( Settings.EMPTY, clusterService, threadPool, client, - NamedXContentRegistry.EMPTY, - featureService + NamedXContentRegistry.EMPTY ); apmIndexTemplateRegistry.setEnabled(true); } diff --git a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java index 4a2b9265b3b05..32e7c2225e19d 100644 --- a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java +++ b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.apmdata; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -30,8 +29,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.datastreams.DataStreamFeatures; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.test.ClusterServiceUtils; @@ -92,9 +89,8 @@ public void createRegistryAndClient() { threadPool = new TestThreadPool(this.getClass().getName()); client = new VerifyingClient(threadPool); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool, clusterSettings); - FeatureService featureService = new FeatureService(List.of(new DataStreamFeatures())); stackTemplateRegistryAccessor = new StackTemplateRegistryAccessor( - new StackTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client, NamedXContentRegistry.EMPTY, featureService) + new StackTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client, NamedXContentRegistry.EMPTY) ); apmIndexTemplateRegistry = new APMIndexTemplateRegistry( @@ -102,8 +98,7 @@ public void createRegistryAndClient() { clusterService, threadPool, client, - NamedXContentRegistry.EMPTY, - featureService + NamedXContentRegistry.EMPTY ); apmIndexTemplateRegistry.setEnabled(true); } @@ -408,25 +403,6 @@ public void testIndexTemplateConventions() throws Exception { } } - public void testThatNothingIsInstalledWhenAllNodesAreNotUpdated() { - DiscoveryNode updatedNode = DiscoveryNodeUtils.create("updatedNode"); - DiscoveryNode outdatedNode = DiscoveryNodeUtils.create("outdatedNode", ESTestCase.buildNewFakeTransportAddress(), Version.V_8_10_0); - DiscoveryNodes nodes = DiscoveryNodes.builder() - .localNodeId("updatedNode") - .masterNodeId("updatedNode") - .add(updatedNode) - .add(outdatedNode) - .build(); - - client.setVerifier((a, r, l) -> { - fail("if some cluster mode are not updated to at least v.8.11.0 nothing should happen"); - return null; - }); - - ClusterChangedEvent event = createClusterChangedEvent(Map.of(), Map.of(), nodes); - apmIndexTemplateRegistry.clusterChanged(event); - } - public void testILMComponentTemplatesInstalled() throws Exception { int ilmFallbackCount = 0; for (Map.Entry entry : apmIndexTemplateRegistry.getComponentTemplateConfigs().entrySet()) { diff --git a/x-pack/plugin/async-search/qa/rest/build.gradle b/x-pack/plugin/async-search/qa/rest/build.gradle index c950646930779..eb758c2c0ef5e 100644 --- a/x-pack/plugin/async-search/qa/rest/build.gradle +++ b/x-pack/plugin/async-search/qa/rest/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.base-internal-es-plugin' apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/autoscaling/qa/rest/build.gradle b/x-pack/plugin/autoscaling/qa/rest/build.gradle index c79644ee31225..903e76fd986cf 100644 --- a/x-pack/plugin/autoscaling/qa/rest/build.gradle +++ b/x-pack/plugin/autoscaling/qa/rest/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index e25d7fb359acb..26040529b04df 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -1,6 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask import org.elasticsearch.gradle.util.GradleUtils @@ -81,7 +87,6 @@ tasks.named("precommit").configure { } tasks.named("yamlRestCompatTestTransform").configure({ task -> - task.skipTest("security/10_forbidden/Test bulk response with invalid credentials", "warning does not exist for compatibility") task.skipTest("esql/60_usage/Basic ESQL usage output (telemetry)", "The telemetry output changed. We dropped a column. That's safe.") task.skipTest("inference/inference_crud/Test get all", "Assertions on number of inference models break due to default configs") task.skipTest("esql/60_usage/Basic ESQL usage output (telemetry) snapshot version", "The number of functions is constantly increasing") @@ -89,5 +94,6 @@ tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("esql/80_text/reverse text", "The output type changed from TEXT to KEYWORD.") task.skipTest("esql/80_text/values function", "The output type changed from TEXT to KEYWORD.") task.skipTest("privileges/11_builtin/Test get builtin privileges" ,"unnecessary to test compatibility") + task.skipTest("esql/61_enrich_ip/Invalid IP strings", "We switched from exceptions to null+warnings for ENRICH runtime errors") }) diff --git a/x-pack/plugin/ccr/qa/build.gradle b/x-pack/plugin/ccr/qa/build.gradle index 4be504e616920..d5bc38d2e8dd5 100644 --- a/x-pack/plugin/ccr/qa/build.gradle +++ b/x-pack/plugin/ccr/qa/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.java' diff --git a/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle b/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle index da39d221f92f1..86f974ed13359 100644 --- a/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle +++ b/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle @@ -1,5 +1,16 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' @@ -11,6 +22,8 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa') } +def clusterPath = getPath() + def leaderCluster = testClusters.register("leader-cluster") { testDistribution = 'DEFAULT' setting 'xpack.license.self_generated.type', 'trial' @@ -24,7 +37,19 @@ def followCluster = testClusters.register("follow-cluster") { setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' - setting 'cluster.remote.leader_cluster.seeds', { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def leaderUris = leaderInfo.map { it.getAllTransportPortURI() } + + setting 'cluster.remote.leader_cluster.seeds', + { "\"${leaderUris.get().join(",")}\"" }, IGNORE_VALUE } tasks.register("leader-cluster", RestIntegTestTask) { @@ -41,7 +66,7 @@ tasks.register("writeJavaPolicy") { policyFile.write( [ "grant {", - " permission java.io.FilePermission \"${-> testClusters."follow-cluster".getFirstNode().getServerLog()}\", \"read\";", + " permission java.io.FilePermission \"${-> followCluster.map { it.getFirstNode().getServerLog() }.get()}\", \"read\";", "};" ].join("\n") ) @@ -50,11 +75,28 @@ tasks.register("writeJavaPolicy") { tasks.register("follow-cluster", RestIntegTestTask) { dependsOn 'writeJavaPolicy', "leader-cluster" - useCluster leaderCluster - systemProperty 'tests.target_cluster', 'follow' - nonInputProperties.systemProperty 'java.security.policy', "file://${policyFile}" - nonInputProperties.systemProperty 'tests.leader_host', leaderCluster.map(c -> c.allHttpSocketURI.get(0)) - nonInputProperties.systemProperty 'log', followCluster.map(c -> c.getFirstNode().getServerLog()) + useCluster leaderCluster + systemProperty 'tests.target_cluster', 'follow' + nonInputProperties.systemProperty 'java.security.policy', "file://${policyFile}" + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def followInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("follow-cluster") + it.parameters.service = serviceProvider + } + def leaderUri = leaderInfo.map { it.getAllHttpSocketURI().get(0) } + def followerUri = followInfo.map { it.getAllHttpSocketURI().get(0) } + + nonInputProperties.systemProperty 'tests.leader_host', leaderUri + nonInputProperties.systemProperty 'log', followCluster.map(c -> c.getFirstNode().getServerLog()) } tasks.named("check").configure { dependsOn "follow-cluster" } diff --git a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle index 2475a56aa87aa..61678784e6b38 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle @@ -1,6 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' @@ -12,6 +23,7 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa') } +def clusterPath = getPath() def leaderCluster = testClusters.register('leader-cluster') { testDistribution = 'DEFAULT' setting 'xpack.license.self_generated.type', 'trial' @@ -21,12 +33,23 @@ def leaderCluster = testClusters.register('leader-cluster') { } def middleCluster = testClusters.register('middle-cluster') { - testDistribution = 'DEFAULT' - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.enabled', 'true' - user username: 'admin', password: 'admin-password', role: 'superuser' - setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + testDistribution = 'DEFAULT' + setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.security.enabled', 'true' + user username: 'admin', password: 'admin-password', role: 'superuser' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def leaderUris = leaderInfo.map { it.getAllTransportPortURI() } + setting 'cluster.remote.leader_cluster.seeds', + { "\"${leaderUris.get().join(",")}\"" }, IGNORE_VALUE } tasks.register("leader-cluster", RestIntegTestTask) { @@ -40,30 +63,74 @@ tasks.register("middle-cluster", RestIntegTestTask) { useCluster testClusters.named("leader-cluster") systemProperty 'tests.target_cluster', 'middle' systemProperty 'tests.leader_cluster_repository_path', "${buildDir}/cluster/shared/repo/leader-cluster" - nonInputProperties.systemProperty 'tests.leader_host',leaderCluster.map(c -> c.allHttpSocketURI.get(0)) -} + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + nonInputProperties.systemProperty 'tests.leader_host', leaderUri +} tasks.register('follow-cluster', RestIntegTestTask) { dependsOn "leader-cluster", "middle-cluster" - useCluster leaderCluster - useCluster middleCluster - systemProperty 'tests.target_cluster', 'follow' - systemProperty 'tests.leader_cluster_repository_path', "${buildDir}/cluster/shared/repo/leader-cluster" - nonInputProperties.systemProperty 'tests.leader_host', leaderCluster.map(c -> c.allHttpSocketURI.get(0)) - nonInputProperties.systemProperty 'tests.middle_host', middleCluster.map(c -> c.allHttpSocketURI.get(0)) + useCluster leaderCluster + useCluster middleCluster + systemProperty 'tests.target_cluster', 'follow' + systemProperty 'tests.leader_cluster_repository_path', "${buildDir}/cluster/shared/repo/leader-cluster" + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + + def middleUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("middle-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + nonInputProperties.systemProperty 'tests.leader_host', leaderUri + nonInputProperties.systemProperty 'tests.middle_host', middleUri } -testClusters.matching {it.name == "follow-cluster" }.configureEach { +testClusters.matching { it.name == "follow-cluster" }.configureEach { testDistribution = 'DEFAULT' setting 'xpack.monitoring.collection.enabled', 'true' setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUris = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.getAllTransportPortURI() } + + def middleUris = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("middle-cluster") + it.parameters.service = serviceProvider + }.map { it.getAllTransportPortURI() } + setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + { "\"${leaderUris.get().join(",")}\"" }, IGNORE_VALUE setting 'cluster.remote.middle_cluster.seeds', - { "\"${middleCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + { "\"${middleUris.get().join(",")}\"" }, IGNORE_VALUE } diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java index 53e068ae6126e..0bb4afe51b85a 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.RestStatus; @@ -366,8 +367,10 @@ public void testSyntheticSource() throws Exception { final String leaderIndexName = "synthetic_leader"; if ("leader".equals(targetCluster)) { logger.info("Running against leader cluster"); - createIndex(adminClient(), leaderIndexName, Settings.EMPTY, """ - "_source": {"mode": "synthetic"}, + Settings settings = Settings.builder() + .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) + .build(); + createIndex(adminClient(), leaderIndexName, settings, """ "properties": {"kwd": {"type": "keyword"}}}""", null); for (int i = 0; i < numDocs; i++) { logger.info("Indexing doc [{}]", i); @@ -392,7 +395,6 @@ public void testSyntheticSource() throws Exception { } assertBusy(() -> { verifyDocuments(client(), followIndexName, numDocs); - assertMap(getIndexMappingAsMap(followIndexName), matchesMap().extraOk().entry("_source", Map.of("mode", "synthetic"))); if (overrideNumberOfReplicas) { assertMap(getIndexSettingsAsMap(followIndexName), matchesMap().extraOk().entry("index.number_of_replicas", "0")); } else { diff --git a/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle b/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle index 7661ea08b057d..ff342accef277 100644 --- a/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle +++ b/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle @@ -1,5 +1,9 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -10,6 +14,8 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa:') } +def clusterPath = getPath() + def leaderCluster = testClusters.register('leader-cluster') { testDistribution = 'DEFAULT' setting 'xpack.security.enabled', 'true' @@ -21,8 +27,20 @@ def followerCluster = testClusters.register('follow-cluster') { setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def leaderUris = leaderInfo.map { it.getAllTransportPortURI() } + setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + { "\"${leaderUris.get().join(",")}\"" }, IGNORE_VALUE } tasks.register('leader-cluster', RestIntegTestTask) { @@ -34,7 +52,19 @@ tasks.register('follow-cluster', RestIntegTestTask) { dependsOn 'leader-cluster' useCluster leaderCluster systemProperty 'tests.target_cluster', 'follow' - nonInputProperties.systemProperty 'tests.leader_host', followerCluster.map(c -> c.allHttpSocketURI.get(0)) + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def followInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("follow-cluster") + it.parameters.service = serviceProvider + } + def followUri = followInfo.map { it.allHttpSocketURI.get(0) } + + nonInputProperties.systemProperty 'tests.leader_host', followUri } tasks.named("check").configure { dependsOn "follow-cluster" } diff --git a/x-pack/plugin/ccr/qa/restart/build.gradle b/x-pack/plugin/ccr/qa/restart/build.gradle index 47d37801e2dcf..848beb1da10ae 100644 --- a/x-pack/plugin/ccr/qa/restart/build.gradle +++ b/x-pack/plugin/ccr/qa/restart/build.gradle @@ -1,6 +1,10 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -9,6 +13,8 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa') } +def clusterPath = getPath() + def leaderCluster = testClusters.register('leader-cluster') { testDistribution = 'DEFAULT' setting 'xpack.license.self_generated.type', 'trial' @@ -22,12 +28,23 @@ def followCluster = testClusters.register('follow-cluster') { setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def leaderUri = leaderInfo.map { it.getAllTransportPortURI().get(0) } + setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().get(0)}\"" }, IGNORE_VALUE + { "\"${leaderUri.get()}\"" }, IGNORE_VALUE nameCustomization = { 'follow' } } - tasks.register('leader-cluster', RestIntegTestTask) { mustRunAfter("precommit") systemProperty 'tests.target_cluster', 'leader' @@ -37,8 +54,19 @@ tasks.register('follow-cluster', RestIntegTestTask) { dependsOn 'leader-cluster' useCluster leaderCluster systemProperty 'tests.target_cluster', 'follow' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + nonInputProperties.systemProperty 'tests.leader_host', - "${-> leaderCluster.get().getAllHttpSocketURI().get(0)}" + "${-> leaderUri.get() }" } tasks.register("followClusterRestartTest", StandaloneRestIntegTestTask) { @@ -48,10 +76,27 @@ tasks.register("followClusterRestartTest", StandaloneRestIntegTestTask) { systemProperty 'tests.rest.load_packaged', 'false' systemProperty 'tests.target_cluster', 'follow-restart' + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + + def followUris = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("follow-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.join(",") } + + nonInputProperties.systemProperty 'tests.leader_host', leaderUri + nonInputProperties.systemProperty 'tests.rest.cluster', followUris + doFirst { - followCluster.get().restart() - nonInputProperties.systemProperty 'tests.leader_host', leaderCluster.map(c-> c.getAllHttpSocketURI().get(0)) - nonInputProperties.systemProperty 'tests.rest.cluster', followCluster.map(c -> c.getAllHttpSocketURI().join(",")) + serviceProvider.get().restart(clusterPath, "follow-cluster") } } diff --git a/x-pack/plugin/ccr/qa/security/build.gradle b/x-pack/plugin/ccr/qa/security/build.gradle index 5515aefeaa091..454a9ae721736 100644 --- a/x-pack/plugin/ccr/qa/security/build.gradle +++ b/x-pack/plugin/ccr/qa/security/build.gradle @@ -1,4 +1,9 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' @@ -10,26 +15,38 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa') } +def clusterPath = getPath() + def leadCluster = testClusters.register('leader-cluster') { - testDistribution = 'DEFAULT' - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.enabled', 'true' - extraConfigFile 'roles.yml', file('leader-roles.yml') - user username: "test_admin", role: "superuser" - user username: "test_ccr", role: "ccruser" + testDistribution = 'DEFAULT' + setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.security.enabled', 'true' + extraConfigFile 'roles.yml', file('leader-roles.yml') + user username: "test_admin", role: "superuser" + user username: "test_ccr", role: "ccruser" } testClusters.register('follow-cluster') { - testDistribution = 'DEFAULT' - setting 'cluster.remote.leader_cluster.seeds', { - "\"${leadCluster.get().getAllTransportPortURI().join(",")}\"" - }, IGNORE_VALUE - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.enabled', 'true' - setting 'xpack.monitoring.collection.enabled', 'false' // will be enabled by tests - extraConfigFile 'roles.yml', file('follower-roles.yml') - user username: "test_admin", role: "superuser" - user username: "test_ccr", role: "ccruser" + testDistribution = 'DEFAULT' + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUris = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.AllTransportPortURI } + + setting 'cluster.remote.leader_cluster.seeds', { + "\"${leaderUris.get().join(",")}\"" + }, IGNORE_VALUE + setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.security.enabled', 'true' + setting 'xpack.monitoring.collection.enabled', 'false' // will be enabled by tests + extraConfigFile 'roles.yml', file('follower-roles.yml') + user username: "test_admin", role: "superuser" + user username: "test_ccr", role: "ccruser" } tasks.register('leader-cluster', RestIntegTestTask) { @@ -41,7 +58,17 @@ def followerClusterTestTask = tasks.register('follow-cluster', RestIntegTestTask dependsOn 'leader-cluster' useCluster leadCluster systemProperty 'tests.target_cluster', 'follow' - nonInputProperties.systemProperty 'tests.leader_host', leadCluster.map(c-> c.getAllHttpSocketURI().get(0)) + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + + nonInputProperties.systemProperty 'tests.leader_host', leaderUri } tasks.named("check").configure { dependsOn(followerClusterTestTask) } diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index b4f17cb436df5..51d770936e64e 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -1,7 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.Version - import java.nio.file.Paths apply plugin: 'elasticsearch.internal-es-plugin' @@ -65,7 +70,7 @@ dependencies { testImplementation project(path: ':modules:rest-root') testImplementation project(path: ':modules:health-shards-availability') // Needed for Fips140ProviderVerificationTests - testCompileOnly('org.bouncycastle:bc-fips:1.0.2.4') + testCompileOnly('org.bouncycastle:bc-fips:1.0.2.5') testImplementation(project(':x-pack:license-tools')) { transitive = false diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java index 56c8e87d1c502..d86c15aa14bc9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java @@ -136,11 +136,11 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; LicensedFeature that = (LicensedFeature) o; - return Objects.equals(name, that.name); + return Objects.equals(name, that.name) && Objects.equals(family, that.family); } @Override public int hashCode() { - return Objects.hash(name); + return Objects.hash(name, family); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/HealthApiUsageTransportAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/HealthApiUsageTransportAction.java index 06393dfa3bade..155ea0ffdcbc3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/HealthApiUsageTransportAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/HealthApiUsageTransportAction.java @@ -13,8 +13,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.health.stats.HealthApiStatsAction; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.protocol.xpack.XPackUsageRequest; @@ -30,10 +28,7 @@ */ public class HealthApiUsageTransportAction extends XPackUsageFeatureTransportAction { - static final NodeFeature SUPPORTS_HEALTH_STATS = new NodeFeature("health.supports_health_stats"); - private final Client client; - private final FeatureService featureService; @Inject public HealthApiUsageTransportAction( @@ -42,8 +37,7 @@ public HealthApiUsageTransportAction( ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - Client client, - FeatureService featureService + Client client ) { super( XPackUsageFeatureAction.HEALTH.name(), @@ -54,7 +48,6 @@ public HealthApiUsageTransportAction( indexNameExpressionResolver ); this.client = client; - this.featureService = featureService; } @Override @@ -70,7 +63,7 @@ protected void masterOperation( client.threadPool().getThreadContext() ); - if (state.clusterRecovered() && featureService.clusterHasFeature(state, SUPPORTS_HEALTH_STATS)) { + if (state.clusterRecovered()) { HealthApiStatsAction.Request statsRequest = new HealthApiStatsAction.Request(); statsRequest.setParentTask(clusterService.localNode().getId(), task.getId()); client.execute(HealthApiStatsAction.INSTANCE, statsRequest, preservingListener.delegateFailureAndWrap((l, r) -> { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java index b885a90c30e57..f966bf97f4764 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java @@ -7,13 +7,11 @@ package org.elasticsearch.xpack.core; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.license.License; import org.elasticsearch.xpack.core.datatiers.NodesDataTiersUsageTransportAction; -import java.util.Map; import java.util.Set; /** @@ -32,9 +30,4 @@ public Set getFeatures() { LOGSDB_TELMETRY_STATS ); } - - @Override - public Map getHistoricalFeatures() { - return Map.of(HealthApiUsageTransportAction.SUPPORTS_HEALTH_STATS, Version.V_8_7_0); - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java index cd44aaafbfae2..05eb7551330b2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java @@ -130,11 +130,11 @@ public boolean equals(Object obj) { * still result in unique snapshot names. */ public static String generateSnapshotName(String name) { - return generateSnapshotName(name, new IndexNameExpressionResolver.ResolverContext()); + return generateSnapshotName(name, System.currentTimeMillis()); } - public static String generateSnapshotName(String name, IndexNameExpressionResolver.Context context) { - String candidate = IndexNameExpressionResolver.resolveDateMathExpression(name, context.getStartTime()); + public static String generateSnapshotName(String name, long now) { + String candidate = IndexNameExpressionResolver.resolveDateMathExpression(name, now); // TODO: we are breaking the rules of UUIDs by lowercasing this here, find an alternative (snapshot names must be lowercase) return candidate + "-" + UUIDs.randomBase64UUID().toLowerCase(Locale.ROOT); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java index f8cb371687d72..26f4f5c92073c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java @@ -58,10 +58,22 @@ public class IndexLifecycleMetadata implements Metadata.Custom { private final Map policyMetadatas; private final OperationMode operationMode; + // a slightly different view of the policyMetadatas -- it's hot in a couple of places so we pre-calculate it + private final Map policies; + + private static Map policiesMap(final Map policyMetadatas) { + final Map policies = new HashMap<>(policyMetadatas.size()); + for (LifecyclePolicyMetadata policyMetadata : policyMetadatas.values()) { + LifecyclePolicy policy = policyMetadata.getPolicy(); + policies.put(policy.getName(), policy); + } + return Collections.unmodifiableMap(policies); + } public IndexLifecycleMetadata(Map policies, OperationMode operationMode) { this.policyMetadatas = Collections.unmodifiableMap(policies); this.operationMode = operationMode; + this.policies = policiesMap(policyMetadatas); } public IndexLifecycleMetadata(StreamInput in) throws IOException { @@ -72,6 +84,7 @@ public IndexLifecycleMetadata(StreamInput in) throws IOException { } this.policyMetadatas = policies; this.operationMode = in.readEnum(OperationMode.class); + this.policies = policiesMap(policyMetadatas); } @Override @@ -93,13 +106,7 @@ public OperationMode getOperationMode() { } public Map getPolicies() { - // note: this loop is unrolled rather than streaming-style because it's hot enough to show up in a flamegraph - Map policies = new HashMap<>(policyMetadatas.size()); - for (LifecyclePolicyMetadata policyMetadata : policyMetadatas.values()) { - LifecyclePolicy policy = policyMetadata.getPolicy(); - policies.put(policy.getName(), policy); - } - return Collections.unmodifiableMap(policies); + return policies; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java index f4ac89124cddb..68e0f7e1ac885 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java @@ -76,12 +76,6 @@ public TextExpansionConfig( this.vocabularyConfig = Optional.ofNullable(vocabularyConfig) .orElse(new VocabularyConfig(InferenceIndexConstants.nativeDefinitionStore())); this.tokenization = tokenization == null ? Tokenization.createDefault() : tokenization; - if (this.tokenization instanceof BertTokenization == false) { - throw ExceptionsHelper.badRequestException( - "text expansion models must be configured with BERT tokenizer, [{}] given", - this.tokenization.getName() - ); - } this.resultsField = resultsField; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/InferenceProcessorInfoExtractor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/InferenceProcessorInfoExtractor.java index 83f7832645270..ad8a55a5f8443 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/InferenceProcessorInfoExtractor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/InferenceProcessorInfoExtractor.java @@ -51,7 +51,7 @@ public static int countInferenceProcessors(ClusterState state) { } Counter counter = Counter.newCounter(); ingestMetadata.getPipelines().forEach((pipelineId, configuration) -> { - Map configMap = configuration.getConfigAsMap(); + Map configMap = configuration.getConfig(); List> processorConfigs = (List>) configMap.get(PROCESSORS_KEY); for (Map processorConfigWithKey : processorConfigs) { for (Map.Entry entry : processorConfigWithKey.entrySet()) { @@ -73,7 +73,7 @@ public static Set getModelIdsFromInferenceProcessors(IngestMetadata inge Set modelIds = new LinkedHashSet<>(); ingestMetadata.getPipelines().forEach((pipelineId, configuration) -> { - Map configMap = configuration.getConfigAsMap(); + Map configMap = configuration.getConfig(); List> processorConfigs = readList(configMap, PROCESSORS_KEY); for (Map processorConfigWithKey : processorConfigs) { for (Map.Entry entry : processorConfigWithKey.entrySet()) { @@ -100,7 +100,7 @@ public static Map> pipelineIdsByResource(ClusterState state, return pipelineIdsByModelIds; } ingestMetadata.getPipelines().forEach((pipelineId, configuration) -> { - Map configMap = configuration.getConfigAsMap(); + Map configMap = configuration.getConfig(); List> processorConfigs = readList(configMap, PROCESSORS_KEY); for (Map processorConfigWithKey : processorConfigs) { for (Map.Entry entry : processorConfigWithKey.entrySet()) { @@ -131,7 +131,7 @@ public static Set pipelineIdsForResource(ClusterState state, Set return pipelineIds; } ingestMetadata.getPipelines().forEach((pipelineId, configuration) -> { - Map configMap = configuration.getConfigAsMap(); + Map configMap = configuration.getConfig(); List> processorConfigs = readList(configMap, PROCESSORS_KEY); for (Map processorConfigWithKey : processorConfigs) { for (Map.Entry entry : processorConfigWithKey.entrySet()) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 259e66f633bac..cc589b53eaa1a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -484,6 +484,22 @@ static RoleDescriptor kibanaSystem(String name) { // Endpoint heartbeat. Kibana reads from these to determine metering/billing for // endpoints. RoleDescriptor.IndicesPrivileges.builder().indices(".logs-endpoint.heartbeat-*").privileges("read", "create_index").build(), + // Security Solution workflows insights. Kibana creates, manages, and uses these + // to provide users with insights on potential configuration improvements + RoleDescriptor.IndicesPrivileges.builder() + .indices(".edr-workflow-insights-*") + .privileges( + "create_index", + "auto_configure", + "manage", + "read", + "write", + "delete", + TransportUpdateSettingsAction.TYPE.name(), + TransportPutMappingAction.TYPE.name(), + RolloverAction.NAME + ) + .build(), // For connectors telemetry. Will be removed once we switched to connectors API RoleDescriptor.IndicesPrivileges.builder().indices(".elastic-connectors*").privileges("read").build() }, null, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index 2380c13e147d5..fc14ec6811014 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -402,67 +402,6 @@ private static Map initializeReservedRoles() { "Grants access necessary for the APM system user to send system-level data (such as monitoring) to Elasticsearch.\n" ) ), - entry( - "apm_user", - new RoleDescriptor( - "apm_user", - null, - new RoleDescriptor.IndicesPrivileges[] { - // Self managed APM Server - // Can be removed in 8.0 - RoleDescriptor.IndicesPrivileges.builder().indices("apm-*").privileges("read", "view_index_metadata").build(), - - // APM Server under fleet (data streams) - RoleDescriptor.IndicesPrivileges.builder().indices("logs-apm.*").privileges("read", "view_index_metadata").build(), - RoleDescriptor.IndicesPrivileges.builder().indices("logs-apm-*").privileges("read", "view_index_metadata").build(), - RoleDescriptor.IndicesPrivileges.builder() - .indices("metrics-apm.*") - .privileges("read", "view_index_metadata") - .build(), - RoleDescriptor.IndicesPrivileges.builder() - .indices("metrics-apm-*") - .privileges("read", "view_index_metadata") - .build(), - RoleDescriptor.IndicesPrivileges.builder() - .indices("traces-apm.*") - .privileges("read", "view_index_metadata") - .build(), - RoleDescriptor.IndicesPrivileges.builder() - .indices("traces-apm-*") - .privileges("read", "view_index_metadata") - .build(), - - // Machine Learning indices. Only needed for legacy reasons - // Can be removed in 8.0 - RoleDescriptor.IndicesPrivileges.builder() - .indices(".ml-anomalies*") - .privileges("read", "view_index_metadata") - .build(), - - // Annotations - RoleDescriptor.IndicesPrivileges.builder() - .indices("observability-annotations") - .privileges("read", "view_index_metadata") - .build() }, - new RoleDescriptor.ApplicationResourcePrivileges[] { - RoleDescriptor.ApplicationResourcePrivileges.builder() - .application("kibana-*") - .resources("*") - .privileges("reserved_ml_apm_user") - .build() }, - null, - null, - MetadataUtils.getDeprecatedReservedMetadata( - "This role will be removed in a future major release. Please use editor and viewer roles instead" - ), - null, - null, - null, - null, - "Grants the privileges required for APM users (such as read and view_index_metadata privileges " - + "on the apm-* and .ml-anomalies* indices)." - ) - ), entry( "inference_admin", new RoleDescriptor( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java index 05f4e560b73c1..f160b704e9e12 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java @@ -580,14 +580,16 @@ private void addIndexLifecyclePoliciesIfMissing(ClusterState state) { logger.trace("running in data stream lifecycle only mode. skipping the installation of ILM policies."); return; } - IndexLifecycleMetadata metadata = state.metadata().custom(IndexLifecycleMetadata.TYPE); + final IndexLifecycleMetadata metadata = state.metadata().custom(IndexLifecycleMetadata.TYPE); + final Map policies = metadata != null ? metadata.getPolicies() : Map.of(); + for (LifecyclePolicy policy : getLifecyclePolicies()) { final AtomicBoolean creationCheck = policyCreationsInProgress.computeIfAbsent( policy.getName(), key -> new AtomicBoolean(false) ); if (creationCheck.compareAndSet(false, true)) { - final LifecyclePolicy currentPolicy = metadata != null ? metadata.getPolicies().get(policy.getName()) : null; + final LifecyclePolicy currentPolicy = policies.get(policy.getName()); if (Objects.isNull(currentPolicy)) { logger.debug("adding lifecycle policy [{}] for [{}], because it doesn't exist", policy.getName(), getOrigin()); putPolicy(policy, creationCheck); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/YamlTemplateRegistry.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/YamlTemplateRegistry.java index a30236b2fef28..cf0a73963f864 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/YamlTemplateRegistry.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/YamlTemplateRegistry.java @@ -11,15 +11,12 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -44,16 +41,12 @@ */ public abstract class YamlTemplateRegistry extends IndexTemplateRegistry { private static final Logger logger = LogManager.getLogger(YamlTemplateRegistry.class); - // this node feature is a redefinition of {@link DataStreamFeatures#DATA_STREAM_LIFECYCLE} and it's meant to avoid adding a - // dependency to the data-streams module just for this - public static final NodeFeature DATA_STREAM_LIFECYCLE = new NodeFeature("data_stream.lifecycle"); private final int version; private final Map componentTemplates; private final Map composableIndexTemplates; private final List ingestPipelines; private final List lifecyclePolicies; - private final FeatureService featureService; private volatile boolean enabled; public YamlTemplateRegistry( @@ -61,10 +54,9 @@ public YamlTemplateRegistry( ClusterService clusterService, ThreadPool threadPool, Client client, - NamedXContentRegistry xContentRegistry, - FeatureService featureService + NamedXContentRegistry xContentRegistry ) { - this(nodeSettings, clusterService, threadPool, client, xContentRegistry, featureService, ignored -> true); + this(nodeSettings, clusterService, threadPool, client, xContentRegistry, ignored -> true); } @SuppressWarnings({ "unchecked", "this-escape" }) @@ -74,7 +66,6 @@ public YamlTemplateRegistry( ThreadPool threadPool, Client client, NamedXContentRegistry xContentRegistry, - FeatureService featureService, Predicate templateFilter ) { super(nodeSettings, clusterService, threadPool, client, xContentRegistry); @@ -123,7 +114,6 @@ public YamlTemplateRegistry( .filter(templateFilter) .map(this::loadLifecyclePolicy) .collect(Collectors.toList()); - this.featureService = featureService; } catch (IOException e) { throw new ElasticsearchException(e); } @@ -152,13 +142,6 @@ public void close() { clusterService.removeListener(this); } - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - // Ensure current version of the components are installed only after versions that support data stream lifecycle - // due to the use of the feature in all the `@lifecycle` component templates - return featureService.clusterHasFeature(event.state(), DATA_STREAM_LIFECYCLE); - } - @Override protected boolean requiresMasterNode() { return true; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/PivotConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/PivotConfig.java index 47f7fea8dc199..6e78c2e8d3ef3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/PivotConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/PivotConfig.java @@ -184,7 +184,7 @@ public void checkForDeprecations(String id, NamedXContentRegistry namedXContentR onDeprecation.accept( // max_page_search_size got deprecated in 7.8, still accepted for 8.x, to be removed in 9.x new DeprecationIssue( - Level.WARNING, + Level.CRITICAL, "Transform [" + id + "] uses the deprecated setting [max_page_search_size]", TransformDeprecations.MAX_PAGE_SEARCH_SIZE_BREAKING_CHANGES_URL, TransformDeprecations.ACTION_MAX_PAGE_SEARCH_SIZE_IS_DEPRECATED, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java index 04fe20901749b..e889d25cd7a96 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java @@ -14,6 +14,7 @@ import java.util.Arrays; import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -228,6 +229,50 @@ public void testLastUsedMomentaryFeature() { assertThat(lastUsed.get(usage), equalTo(200L)); } + public void testLastUsedMomentaryFeatureWithSameNameDifferentFamily() { + LicensedFeature.Momentary featureFamilyA = LicensedFeature.momentary("familyA", "goldFeature", GOLD); + LicensedFeature.Momentary featureFamilyB = LicensedFeature.momentary("familyB", "goldFeature", GOLD); + + AtomicInteger currentTime = new AtomicInteger(100); // non zero start time + XPackLicenseState licenseState = new XPackLicenseState(currentTime::get); + + featureFamilyA.check(licenseState); + featureFamilyB.check(licenseState); + + Map lastUsed = licenseState.getLastUsed(); + assertThat("feature.check tracks usage separately by family", lastUsed, aMapWithSize(2)); + Set actualFeatures = lastUsed.entrySet() + .stream() + .map(it -> new FeatureInfoWithTimestamp(it.getKey().feature().getFamily(), it.getKey().feature().getName(), it.getValue())) + .collect(Collectors.toSet()); + assertThat( + actualFeatures, + containsInAnyOrder( + new FeatureInfoWithTimestamp("familyA", "goldFeature", 100L), + new FeatureInfoWithTimestamp("familyB", "goldFeature", 100L) + ) + ); + + currentTime.set(200); + featureFamilyB.check(licenseState); + + lastUsed = licenseState.getLastUsed(); + assertThat("feature.check tracks usage separately by family", lastUsed, aMapWithSize(2)); + actualFeatures = lastUsed.entrySet() + .stream() + .map(it -> new FeatureInfoWithTimestamp(it.getKey().feature().getFamily(), it.getKey().feature().getName(), it.getValue())) + .collect(Collectors.toSet()); + assertThat( + actualFeatures, + containsInAnyOrder( + new FeatureInfoWithTimestamp("familyA", "goldFeature", 100L), + new FeatureInfoWithTimestamp("familyB", "goldFeature", 200L) + ) + ); + } + + private record FeatureInfoWithTimestamp(String family, String featureName, Long timestamp) {} + public void testLastUsedPersistentFeature() { LicensedFeature.Persistent goldFeature = LicensedFeature.persistent("family", "goldFeature", GOLD); AtomicInteger currentTime = new AtomicInteger(100); // non zero start time diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java index ce8cd5ae46ace..bee6351582bc9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.LifecycleExecutionState; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.RepositoriesMetadata; @@ -185,13 +184,12 @@ public void testNameGeneration() { assertThat(generateSnapshotName("name"), startsWith("name-")); assertThat(generateSnapshotName("name").length(), greaterThan("name-".length())); - IndexNameExpressionResolver.ResolverContext resolverContext = new IndexNameExpressionResolver.ResolverContext(time); - assertThat(generateSnapshotName("", resolverContext), startsWith("name-2019.03.15-")); - assertThat(generateSnapshotName("", resolverContext).length(), greaterThan("name-2019.03.15-".length())); + assertThat(generateSnapshotName("", time), startsWith("name-2019.03.15-")); + assertThat(generateSnapshotName("", time).length(), greaterThan("name-2019.03.15-".length())); - assertThat(generateSnapshotName("", resolverContext), startsWith("name-2019.03.01-")); + assertThat(generateSnapshotName("", time), startsWith("name-2019.03.01-")); - assertThat(generateSnapshotName("", resolverContext), startsWith("name-2019-03-15.21:09:00-")); + assertThat(generateSnapshotName("", time), startsWith("name-2019-03-15.21:09:00-")); } public void testNameValidation() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForNoFollowersStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForNoFollowersStepTests.java index 21e3155501995..01a12fb795316 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForNoFollowersStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForNoFollowersStepTests.java @@ -189,10 +189,7 @@ public void testNoShardStats() { shardStats[0] = sStats; mockXPackInfo(true, true); - mockIndexStatsCall( - indexName, - new IndexStats(indexName, "uuid", ClusterHealthStatus.GREEN, IndexMetadata.State.OPEN, null, null, shardStats) - ); + mockIndexStatsCall(indexName, new IndexStats(indexName, "uuid", ClusterHealthStatus.GREEN, IndexMetadata.State.OPEN, shardStats)); final SetOnce conditionMetHolder = new SetOnce<>(); final SetOnce stepInfoHolder = new SetOnce<>(); @@ -292,7 +289,7 @@ private IndexStats randomIndexStats(boolean isLeaderIndex, int numOfShards) { for (int i = 0; i < numOfShards; i++) { shardStats[i] = randomShardStats(isLeaderIndex); } - return new IndexStats(randomAlphaOfLength(5), randomAlphaOfLength(10), null, null, null, null, shardStats); + return new IndexStats(randomAlphaOfLength(5), randomAlphaOfLength(10), null, null, shardStats); } private ShardStats randomShardStats(boolean isLeaderIndex) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigTests.java index cf4630899ab53..a91cceec8a167 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigTests.java @@ -7,10 +7,8 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.inference.InferenceConfigItemTestCase; @@ -67,13 +65,4 @@ protected TextExpansionConfig doParseInstance(XContentParser parser) throws IOEx protected TextExpansionConfig mutateInstanceForVersion(TextExpansionConfig instance, TransportVersion version) { return instance; } - - public void testBertTokenizationOnly() { - ElasticsearchStatusException e = expectThrows( - ElasticsearchStatusException.class, - () -> new TextExpansionConfig(null, RobertaTokenizationTests.createRandom(), null) - ); - assertEquals(RestStatus.BAD_REQUEST, e.status()); - assertEquals("text expansion models must be configured with BERT tokenizer, [roberta] given", e.getMessage()); - } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java index 218876c7d40e8..3ca6777512420 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java @@ -31,9 +31,12 @@ import org.elasticsearch.xpack.core.XPackClientPlugin; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; +import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissionGroup; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; +import org.elasticsearch.xpack.core.security.authz.restriction.Workflow; +import org.elasticsearch.xpack.core.security.authz.restriction.WorkflowResolver; import org.hamcrest.Matchers; import java.io.IOException; @@ -47,7 +50,6 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.SECURITY_ROLE_DESCRIPTION; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.WORKFLOWS_RESTRICTION_VERSION; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivileges; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivilegesBuilder; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteClusterPermissions; import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS; @@ -1338,38 +1340,191 @@ public void testIsEmpty() { } } - public void testHasPrivilegesOtherThanIndex() { + public void testHasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster() { + // any index and some cluster privileges are allowed assertThat( new RoleDescriptor( "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), // all of these are allowed + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(false) + ); + // any index and some cluster privileges are allowed + assertThat( + new RoleDescriptor( + "name", + new String[] { "manage_security" }, // unlikely we will ever support allowing manage security across clusters + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + + // application privileges are not allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + new ApplicationResourcePrivileges[] { + ApplicationResourcePrivileges.builder().application("app").privileges("foo").resources("res").build() }, + null, + null, + null, + null, + null, + null, + null, + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + + // configurable cluster privileges are not allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.ManageApplicationPrivileges(Collections.singleton("foo")) }, + null, + null, + null, + null, + null, + null, + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + + // run as is not allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + null, + new String[] { "foo" }, + null, + null, + null, + null, + null, + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + + // workflows restriction is not allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, null, - randomBoolean() ? null : randomIndicesPrivileges(1, 5), null, null, null, null, null, null, + new RoleDescriptor.Restriction(WorkflowResolver.allWorkflows().stream().map(Workflow::name).toArray(String[]::new)), + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + // remote indices privileges are not allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + null, + null, + null, + null, + new RoleDescriptor.RemoteIndicesPrivileges[] { + RoleDescriptor.RemoteIndicesPrivileges.builder("rmt").indices("idx").privileges("foo").build() }, null, null, null ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + // remote cluster privileges are not allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + null, + null, + null, + null, + null, + new RemoteClusterPermissions().addGroup( + new RemoteClusterPermissionGroup( + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new String[] { "rmt" } + ) + ), + null, + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + + // metadata, transient metadata and description are allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + null, + null, + Collections.singletonMap("foo", "bar"), + Collections.singletonMap("foo", "bar"), + null, + null, + null, + "description" + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), is(false) ); - final RoleDescriptor roleDescriptor = RoleDescriptorTestHelper.builder() - .allowReservedMetadata(true) - .allowRemoteIndices(true) - .allowRestriction(true) - .allowDescription(true) - .allowRemoteClusters(true) - .build(); - final boolean expected = roleDescriptor.hasClusterPrivileges() - || roleDescriptor.hasConfigurableClusterPrivileges() - || roleDescriptor.hasApplicationPrivileges() - || roleDescriptor.hasRunAs() - || roleDescriptor.hasRemoteIndicesPrivileges() - || roleDescriptor.hasWorkflowsRestriction(); - assertThat(roleDescriptor.hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), equalTo(expected)); } private static void resetFieldPermssionsCache() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index fb4d822b7655c..17579fd6368ce 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -1106,6 +1106,28 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(indexAbstraction), is(true)); }); + // index for Security Solution workflow insights + Arrays.asList(".edr-workflow-insights-" + randomAlphaOfLength(randomIntBetween(0, 13))).forEach((index) -> { + final IndexAbstraction indexAbstraction = mockIndexAbstraction(index); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportMultiSearchAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(READ_CROSS_CLUSTER_NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportUpdateSettingsAction.TYPE.name()).test(indexAbstraction), + is(true) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportPutMappingAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(indexAbstraction), is(true)); + }); + // Data telemetry reads mappings, metadata and stats of indices Arrays.asList(randomAlphaOfLengthBetween(8, 24), "packetbeat-*").forEach((index) -> { logger.info("index name [{}]", index); @@ -3058,89 +3080,6 @@ public void testAPMSystemRole() { assertNoAccessAllowed(APMSystemRole, XPackPlugin.ASYNC_RESULTS_INDEX + randomAlphaOfLengthBetween(0, 2)); } - public void testAPMUserRole() { - final TransportRequest request = mock(TransportRequest.class); - final Authentication authentication = AuthenticationTestHelper.builder().build(); - - final RoleDescriptor roleDescriptor = ReservedRolesStore.roleDescriptor("apm_user"); - assertNotNull(roleDescriptor); - assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); - - final String allowedApplicationActionPattern = "example/custom/action/*"; - final String kibanaApplicationWithRandomIndex = "kibana-" + randomFrom(randomAlphaOfLengthBetween(8, 24), ".kibana"); - Role role = Role.buildFromRoleDescriptor( - roleDescriptor, - new FieldPermissionsCache(Settings.EMPTY), - RESTRICTED_INDICES, - List.of( - new ApplicationPrivilegeDescriptor( - kibanaApplicationWithRandomIndex, - "reserved_ml_apm_user", - Set.of(allowedApplicationActionPattern), - Map.of() - ) - ) - ); - - assertThat(role.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); - assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 12)), is(false)); - - assertNoAccessAllowed(role, "foo"); - assertNoAccessAllowed(role, "foo-apm"); - assertNoAccessAllowed(role, "foo-logs-apm.bar"); - assertNoAccessAllowed(role, "foo-logs-apm-bar"); - assertNoAccessAllowed(role, "foo-traces-apm.bar"); - assertNoAccessAllowed(role, "foo-traces-apm-bar"); - assertNoAccessAllowed(role, "foo-metrics-apm.bar"); - assertNoAccessAllowed(role, "foo-metrics-apm-bar"); - - assertOnlyReadAllowed(role, "logs-apm." + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, "logs-apm-" + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, "traces-apm." + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, "traces-apm-" + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, "metrics-apm." + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, "metrics-apm-" + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, "apm-" + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT); - - assertOnlyReadAllowed(role, "observability-annotations"); - - assertThat( - role.application().grants(ApplicationPrivilegeTests.createPrivilege(kibanaApplicationWithRandomIndex, "app-foo", "foo"), "*"), - is(false) - ); - assertThat( - role.application() - .grants( - ApplicationPrivilegeTests.createPrivilege( - kibanaApplicationWithRandomIndex, - "app-reserved_ml_apm_user", - allowedApplicationActionPattern - ), - "*" - ), - is(true) - ); - - final String otherApplication = "logstash-" + randomAlphaOfLengthBetween(8, 24); - assertThat( - role.application().grants(ApplicationPrivilegeTests.createPrivilege(otherApplication, "app-foo", "foo"), "*"), - is(false) - ); - assertThat( - role.application() - .grants( - ApplicationPrivilegeTests.createPrivilege( - otherApplication, - "app-reserved_ml_apm_user", - allowedApplicationActionPattern - ), - "*" - ), - is(false) - ); - } - public void testMachineLearningAdminRole() { final TransportRequest request = mock(TransportRequest.class); final Authentication authentication = AuthenticationTestHelper.builder().build(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java index bbf80279b0b2a..60db8b6522518 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java @@ -218,7 +218,7 @@ public void testThatDelegateTrustManagerIsRespected() throws Exception { if (cert.endsWith("/ca")) { assertTrusted(trustManager, cert); } else { - assertNotValid(trustManager, cert, inFipsJvm() ? "Unable to find certificate chain." : "PKIX path building failed.*"); + assertNotValid(trustManager, cert, inFipsJvm() ? "Unable to construct a valid chain" : "PKIX path building failed.*"); } } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java index e396712cbc360..356fac4539137 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java @@ -726,7 +726,7 @@ private static void assertPutPipelineAction( putRequest.getSource(), putRequest.getXContentType() ); - List processors = (List) pipelineConfiguration.getConfigAsMap().get("processors"); + List processors = (List) pipelineConfiguration.getConfig().get("processors"); assertThat(processors, hasSize(1)); Map setProcessor = (Map) ((Map) processors.get(0)).get("set"); assertNotNull(setProcessor.get("field")); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java index f1c2de11496bf..8cfecc432c661 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java @@ -930,7 +930,7 @@ public void testCheckForDeprecations() { equalTo( Collections.singletonList( new DeprecationIssue( - Level.WARNING, + Level.CRITICAL, "Transform [" + id + "] uses the deprecated setting [max_page_search_size]", TransformDeprecations.MAX_PAGE_SEARCH_SIZE_BREAKING_CHANGES_URL, TransformDeprecations.ACTION_MAX_PAGE_SEARCH_SIZE_IS_DEPRECATED, @@ -952,7 +952,7 @@ public void testCheckForDeprecations() { equalTo( List.of( new DeprecationIssue( - Level.WARNING, + Level.CRITICAL, "Transform [" + id + "] uses the deprecated setting [max_page_search_size]", TransformDeprecations.MAX_PAGE_SEARCH_SIZE_BREAKING_CHANGES_URL, TransformDeprecations.ACTION_MAX_PAGE_SEARCH_SIZE_IS_DEPRECATED, @@ -982,7 +982,7 @@ public void testCheckForDeprecations() { null ), new DeprecationIssue( - Level.WARNING, + Level.CRITICAL, "Transform [" + id + "] uses the deprecated setting [max_page_search_size]", TransformDeprecations.MAX_PAGE_SEARCH_SIZE_BREAKING_CHANGES_URL, TransformDeprecations.ACTION_MAX_PAGE_SEARCH_SIZE_IS_DEPRECATED, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json index 8f50ebd334f16..f90d2202db0d3 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json @@ -9,6 +9,7 @@ "sort": { "field": [ "profiling.project.id", + "k8s.namespace.name", "orchestrator.resource.name", "host.name", "container.name", @@ -80,6 +81,9 @@ }, "container.id": { "type": "keyword" + }, + "k8s.namespace.name": { + "type": "keyword" } } } diff --git a/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle b/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle index a9580f4e14d6b..7e61533c818ec 100644 --- a/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle +++ b/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.util.GradleUtils -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.base-internal-es-plugin' apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/deprecation/qa/rest/build.gradle b/x-pack/plugin/deprecation/qa/rest/build.gradle index 9a8b228763fe0..45b543d910a75 100644 --- a/x-pack/plugin/deprecation/qa/rest/build.gradle +++ b/x-pack/plugin/deprecation/qa/rest/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.util.GradleUtils -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.base-internal-es-plugin' apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle index c4f2a239d48e2..236c851febd6c 100644 --- a/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle +++ b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle @@ -6,7 +6,6 @@ */ import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-yaml-rest-test' diff --git a/x-pack/plugin/downsample/qa/rest/build.gradle b/x-pack/plugin/downsample/qa/rest/build.gradle index c5cfbea000ebe..54e07558464d1 100644 --- a/x-pack/plugin/downsample/qa/rest/build.gradle +++ b/x-pack/plugin/downsample/qa/rest/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-test-artifact' diff --git a/x-pack/plugin/downsample/qa/with-security/build.gradle b/x-pack/plugin/downsample/qa/with-security/build.gradle index 849c242f372bd..29980b95d0291 100644 --- a/x-pack/plugin/downsample/qa/with-security/build.gradle +++ b/x-pack/plugin/downsample/qa/with-security/build.gradle @@ -6,7 +6,6 @@ */ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' diff --git a/x-pack/plugin/enrich/qa/rest-with-advanced-security/build.gradle b/x-pack/plugin/enrich/qa/rest-with-advanced-security/build.gradle index 2e649e718b081..6a1f820e36205 100644 --- a/x-pack/plugin/enrich/qa/rest-with-advanced-security/build.gradle +++ b/x-pack/plugin/enrich/qa/rest-with-advanced-security/build.gradle @@ -1,6 +1,11 @@ -apply plugin: 'elasticsearch.legacy-java-rest-test' +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ -import org.elasticsearch.gradle.internal.info.BuildParams +apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { javaRestTestImplementation project(path: xpackModule('core')) diff --git a/x-pack/plugin/enrich/qa/rest-with-security/build.gradle b/x-pack/plugin/enrich/qa/rest-with-security/build.gradle index 844cfcc61adff..17a213a6e7f0d 100644 --- a/x-pack/plugin/enrich/qa/rest-with-security/build.gradle +++ b/x-pack/plugin/enrich/qa/rest-with-security/build.gradle @@ -1,6 +1,11 @@ -apply plugin: 'elasticsearch.legacy-java-rest-test' +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ -import org.elasticsearch.gradle.internal.info.BuildParams +apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { javaRestTestImplementation project(path: xpackModule('core')) diff --git a/x-pack/plugin/enrich/qa/rest/build.gradle b/x-pack/plugin/enrich/qa/rest/build.gradle index 637ab21a98fd7..cf3c687004cbb 100644 --- a/x-pack/plugin/enrich/qa/rest/build.gradle +++ b/x-pack/plugin/enrich/qa/rest/build.gradle @@ -1,10 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.legacy-java-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' - import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams restResources { restApi { diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyReindexPipeline.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyReindexPipeline.java index 7cddd7e037742..512955a5fe2eb 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyReindexPipeline.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyReindexPipeline.java @@ -56,7 +56,7 @@ static boolean exists(ClusterState clusterState) { if (ingestMetadata != null) { final PipelineConfiguration pipeline = ingestMetadata.getPipelines().get(pipelineName()); if (pipeline != null) { - Object version = pipeline.getConfigAsMap().get("version"); + Object version = pipeline.getConfig().get("version"); return version instanceof Number number && number.intValue() >= ENRICH_PIPELINE_LAST_UPDATED_VERSION; } } diff --git a/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle b/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle index 47a1ffaa37fa4..1e1973a118074 100644 --- a/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle +++ b/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle @@ -5,7 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index a354ca4b4b31c..df1c76ccf770f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -456,7 +456,6 @@ public Collection createComponents(PluginServices services) { // Behavioral analytics components final AnalyticsTemplateRegistry analyticsTemplateRegistry = new AnalyticsTemplateRegistry( services.clusterService(), - services.featureService(), services.threadPool(), services.client(), services.xContentRegistry() @@ -466,7 +465,6 @@ public Collection createComponents(PluginServices services) { // Connector components final ConnectorTemplateRegistry connectorTemplateRegistry = new ConnectorTemplateRegistry( services.clusterService(), - services.featureService(), services.threadPool(), services.client(), services.xContentRegistry() diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java index aa200f7ae9acb..214f9150dfcc5 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java @@ -32,7 +32,7 @@ protected final BaseRestHandler.RestChannelConsumer prepareRequest(RestRequest r // We need to consume parameters and content from the REST request in order to bypass unrecognized param errors // and return a license error. request.params().keySet().forEach(key -> request.param(key, "")); - request.content(); + request.releasableContent(); return channel -> channel.sendResponse( new RestResponse(channel, LicenseUtils.newComplianceException(this.licenseState, this.product)) ); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java index 86882a28ec39f..ba121f2cf865e 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java @@ -7,15 +7,11 @@ package org.elasticsearch.xpack.application; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.xpack.application.analytics.AnalyticsTemplateRegistry; -import org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry; import org.elasticsearch.xpack.application.rules.action.ListQueryRulesetsAction; import org.elasticsearch.xpack.application.rules.retriever.QueryRuleRetrieverBuilder; -import java.util.Map; import java.util.Set; import static org.elasticsearch.xpack.application.rules.action.TestQueryRulesetAction.QUERY_RULES_TEST_API; @@ -30,14 +26,4 @@ public Set getFeatures() { ListQueryRulesetsAction.QUERY_RULE_LIST_TYPES ); } - - @Override - public Map getHistoricalFeatures() { - return Map.of( - ConnectorTemplateRegistry.CONNECTOR_TEMPLATES_FEATURE, - Version.V_8_10_0, - AnalyticsTemplateRegistry.ANALYTICS_TEMPLATE_FEATURE, - Version.V_8_12_0 - ); - } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java index d9f433b8052bf..99a239dd617a2 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java @@ -7,13 +7,10 @@ package org.elasticsearch.xpack.application.analytics; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -36,8 +33,6 @@ public class AnalyticsTemplateRegistry extends IndexTemplateRegistry { - public static final NodeFeature ANALYTICS_TEMPLATE_FEATURE = new NodeFeature("behavioral_analytics.templates"); - // This number must be incremented when we make changes to built-in templates. static final int REGISTRY_VERSION = 3; @@ -100,17 +95,13 @@ protected List getIngestPipelines() { ) ); - private final FeatureService featureService; - public AnalyticsTemplateRegistry( ClusterService clusterService, - FeatureService featureService, ThreadPool threadPool, Client client, NamedXContentRegistry xContentRegistry ) { super(Settings.EMPTY, clusterService, threadPool, client, xContentRegistry); - this.featureService = featureService; } @Override @@ -138,9 +129,4 @@ protected boolean requiresMasterNode() { // there and the ActionNotFoundTransportException errors are then prevented. return true; } - - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - return featureService.clusterHasFeature(event.state(), ANALYTICS_TEMPLATE_FEATURE); - } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java index 41976bc6b4272..9b8cc7cfdbe4f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java @@ -8,13 +8,10 @@ package org.elasticsearch.xpack.application.connector; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -33,8 +30,6 @@ public class ConnectorTemplateRegistry extends IndexTemplateRegistry { - public static final NodeFeature CONNECTOR_TEMPLATES_FEATURE = new NodeFeature("elastic-connectors.templates"); - // This number must be incremented when we make changes to built-in templates. static final int REGISTRY_VERSION = 3; @@ -153,17 +148,13 @@ protected List getIngestPipelines() { ) ); - private final FeatureService featureService; - public ConnectorTemplateRegistry( ClusterService clusterService, - FeatureService featureService, ThreadPool threadPool, Client client, NamedXContentRegistry xContentRegistry ) { super(Settings.EMPTY, clusterService, threadPool, client, xContentRegistry); - this.featureService = featureService; } @Override @@ -186,9 +177,4 @@ protected boolean requiresMasterNode() { // Necessary to prevent conflicts in some mixed-cluster environments with pre-7.7 nodes return true; } - - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - return featureService.clusterHasFeature(event.state(), CONNECTOR_TEMPLATES_FEATURE); - } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandlerTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandlerTests.java index 681b14e8be61c..1099603e9be07 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandlerTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandlerTests.java @@ -61,7 +61,7 @@ public List routes() { }; FakeRestRequest fakeRestRequest = new FakeRestRequest(); - FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, true, isLicensed ? 0 : 1); + FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, randomBoolean(), isLicensed ? 0 : 1); try (var threadPool = createThreadPool()) { final var client = new NoOpNodeClient(threadPool); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistryTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistryTests.java index 50102b8cfcf53..fb2fb11c7460f 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistryTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistryTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.application.analytics; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -31,7 +30,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.test.ClusterServiceUtils; @@ -42,7 +40,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.application.EnterpriseSearchFeatures; import org.elasticsearch.xpack.core.ilm.IndexLifecycleMetadata; import org.elasticsearch.xpack.core.ilm.LifecyclePolicy; import org.elasticsearch.xpack.core.ilm.LifecyclePolicyMetadata; @@ -78,13 +75,7 @@ public void createRegistryAndClient() { threadPool = new TestThreadPool(this.getClass().getName()); client = new VerifyingClient(threadPool); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); - registry = new AnalyticsTemplateRegistry( - clusterService, - new FeatureService(List.of(new EnterpriseSearchFeatures())), - threadPool, - client, - NamedXContentRegistry.EMPTY - ); + registry = new AnalyticsTemplateRegistry(clusterService, threadPool, client, NamedXContentRegistry.EMPTY); } @After @@ -282,25 +273,6 @@ public void testThatNonExistingPipelinesAreAddedImmediately() throws Exception { assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getIngestPipelines().size()))); } - public void testThatNothingIsInstalledWhenAllNodesAreNotUpdated() { - DiscoveryNode updatedNode = DiscoveryNodeUtils.create("updatedNode"); - DiscoveryNode outdatedNode = DiscoveryNodeUtils.create("outdatedNode", ESTestCase.buildNewFakeTransportAddress(), Version.V_8_7_0); - DiscoveryNodes nodes = DiscoveryNodes.builder() - .localNodeId("updatedNode") - .masterNodeId("updatedNode") - .add(updatedNode) - .add(outdatedNode) - .build(); - - client.setVerifier((a, r, l) -> { - fail("if some cluster mode are not updated to at least v.8.8.0 nothing should happen"); - return null; - }); - - ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), Collections.emptyMap(), nodes); - registry.clusterChanged(event); - } - // ------------- /** diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java index 3fbc5cd749cb2..a4c7015afafcb 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.application.connector; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -31,7 +30,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.test.ClusterServiceUtils; @@ -41,7 +39,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.application.EnterpriseSearchFeatures; import org.elasticsearch.xpack.core.ilm.IndexLifecycleMetadata; import org.elasticsearch.xpack.core.ilm.LifecyclePolicy; import org.elasticsearch.xpack.core.ilm.LifecyclePolicyMetadata; @@ -81,8 +78,7 @@ public void createRegistryAndClient() { threadPool = new TestThreadPool(this.getClass().getName()); client = new VerifyingClient(threadPool); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); - FeatureService featureService = new FeatureService(List.of(new EnterpriseSearchFeatures())); - registry = new ConnectorTemplateRegistry(clusterService, featureService, threadPool, client, NamedXContentRegistry.EMPTY); + registry = new ConnectorTemplateRegistry(clusterService, threadPool, client, NamedXContentRegistry.EMPTY); } @After @@ -310,25 +306,6 @@ public void testThatNonExistingPipelinesAreAddedImmediately() throws Exception { assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getIngestPipelines().size()))); } - public void testThatNothingIsInstalledWhenAllNodesAreNotUpdated() { - DiscoveryNode updatedNode = DiscoveryNodeUtils.create("updatedNode"); - DiscoveryNode outdatedNode = DiscoveryNodeUtils.create("outdatedNode", ESTestCase.buildNewFakeTransportAddress(), Version.V_8_9_0); - DiscoveryNodes nodes = DiscoveryNodes.builder() - .localNodeId("updatedNode") - .masterNodeId("updatedNode") - .add(updatedNode) - .add(outdatedNode) - .build(); - - client.setVerifier((a, r, l) -> { - fail("if some cluster mode are not updated to at least v.8.10.0 nothing should happen"); - return null; - }); - - ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), Collections.emptyMap(), nodes); - registry.clusterChanged(event); - } - // ------------- /** diff --git a/x-pack/plugin/eql/build.gradle b/x-pack/plugin/eql/build.gradle index b0b5fefa37fcd..9ae67f0e27c2b 100644 --- a/x-pack/plugin/eql/build.gradle +++ b/x-pack/plugin/eql/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { diff --git a/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle b/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle index cbea0896264d5..bc1a44f94d18a 100644 --- a/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle +++ b/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle @@ -5,9 +5,7 @@ * 2.0. */ - import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/x-pack/plugin/eql/qa/correctness/build.gradle b/x-pack/plugin/eql/qa/correctness/build.gradle index a791356499f5c..7ca6e8f134d20 100644 --- a/x-pack/plugin/eql/qa/correctness/build.gradle +++ b/x-pack/plugin/eql/qa/correctness/build.gradle @@ -1,9 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.java' apply plugin: 'elasticsearch.legacy-java-rest-test' apply plugin: 'elasticsearch.internal-testclusters' import org.elasticsearch.gradle.testclusters.RunTask -import org.elasticsearch.gradle.internal.info.BuildParams dependencies { javaRestTestImplementation project(':test:framework') diff --git a/x-pack/plugin/eql/qa/mixed-node/build.gradle b/x-pack/plugin/eql/qa/mixed-node/build.gradle index d3aa227c7ef88..bbeb439ab6155 100644 --- a/x-pack/plugin/eql/qa/mixed-node/build.gradle +++ b/x-pack/plugin/eql/qa/mixed-node/build.gradle @@ -1,8 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.legacy-java-rest-test' apply plugin: 'elasticsearch.bwc-test' import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask dependencies { diff --git a/x-pack/plugin/eql/qa/rest/build.gradle b/x-pack/plugin/eql/qa/rest/build.gradle index 00f196d863f2e..0ffecefb934f7 100644 --- a/x-pack/plugin/eql/qa/rest/build.gradle +++ b/x-pack/plugin/eql/qa/rest/build.gradle @@ -1,10 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-test-artifact' -import org.elasticsearch.gradle.internal.info.BuildParams - restResources { restApi { include '_common', 'bulk', 'indices', 'eql' diff --git a/x-pack/plugin/eql/qa/security/build.gradle b/x-pack/plugin/eql/qa/security/build.gradle index 1f0f949cab706..9072a9a7bad3e 100644 --- a/x-pack/plugin/eql/qa/security/build.gradle +++ b/x-pack/plugin/eql/qa/security/build.gradle @@ -1,6 +1,11 @@ -apply plugin: 'elasticsearch.internal-java-rest-test' +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ -import org.elasticsearch.gradle.internal.info.BuildParams +apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { javaRestTestImplementation project(path: xpackModule('eql:qa:common')) diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java index 45f42a754910d..53debedafc3d8 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.esql.core.expression; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -34,11 +33,6 @@ public abstract class Attribute extends NamedExpression { */ protected static final String SYNTHETIC_ATTRIBUTE_NAME_PREFIX = "$$"; - public static List getNamedWriteables() { - // TODO add UnsupportedAttribute when these are moved to the same project - return List.of(FieldAttribute.ENTRY, MetadataAttribute.ENTRY, ReferenceAttribute.ENTRY); - } - // can the attr be null - typically used in JOINs private final Nullability nullability; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java index 0be7f65d767c7..00765a8c0528c 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.esql.core.expression; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.capabilities.Resolvable; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; @@ -15,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.StringUtils; -import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -29,14 +27,6 @@ * (which is a type of expression) with a single child, c. */ public abstract class Expression extends Node implements Resolvable { - public static List getNamedWriteables() { - List entries = new ArrayList<>(); - for (NamedWriteableRegistry.Entry e : NamedExpression.getNamedWriteables()) { - entries.add(new NamedWriteableRegistry.Entry(Expression.class, e.name, in -> (NamedExpression) e.reader.read(in))); - } - entries.add(Literal.ENTRY); - return entries; - } public static class TypeResolution { private final boolean failed; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ExpressionCoreWritables.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ExpressionCoreWritables.java new file mode 100644 index 0000000000000..174a0321a3057 --- /dev/null +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ExpressionCoreWritables.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.core.expression; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; + +import java.util.ArrayList; +import java.util.List; + +public class ExpressionCoreWritables { + + public static List getNamedWriteables() { + List entries = new ArrayList<>(); + entries.addAll(expressions()); + entries.addAll(namedExpressions()); + entries.addAll(attributes()); + return entries; + } + + public static List expressions() { + List entries = new ArrayList<>(); + // add entries as expressions + for (NamedWriteableRegistry.Entry e : namedExpressions()) { + entries.add(new NamedWriteableRegistry.Entry(Expression.class, e.name, in -> (Expression) e.reader.read(in))); + } + entries.add(Literal.ENTRY); + return entries; + } + + public static List namedExpressions() { + List entries = new ArrayList<>(); + // add entries as named writeables + for (NamedWriteableRegistry.Entry e : attributes()) { + entries.add(new NamedWriteableRegistry.Entry(NamedExpression.class, e.name, in -> (NamedExpression) e.reader.read(in))); + } + entries.add(Alias.ENTRY); + return entries; + } + + public static List attributes() { + return List.of(FieldAttribute.ENTRY, MetadataAttribute.ENTRY, ReferenceAttribute.ENTRY); + } +} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java index 3b018f09e5ebd..f425fc7110a41 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java @@ -7,11 +7,9 @@ package org.elasticsearch.xpack.esql.core.expression; import org.elasticsearch.common.io.stream.NamedWriteable; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.Source; -import java.util.ArrayList; import java.util.List; import java.util.Objects; @@ -20,14 +18,6 @@ * (by converting to an attribute). */ public abstract class NamedExpression extends Expression implements NamedWriteable { - public static List getNamedWriteables() { - List entries = new ArrayList<>(); - for (NamedWriteableRegistry.Entry e : Attribute.getNamedWriteables()) { - entries.add(new NamedWriteableRegistry.Entry(NamedExpression.class, e.name, in -> (NamedExpression) e.reader.read(in))); - } - entries.add(Alias.ENTRY); - return entries; - } private final String name; private final NameId id; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java index cad5c631088f2..a1afcdbf1f77c 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.esql.core.expression.function; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Nullability; @@ -42,6 +43,11 @@ public Nullability nullable() { return Expressions.nullable(children()); } + /** Return true if this function can be executed under the provided {@link XPackLicenseState}, otherwise false.*/ + public boolean checkLicense(XPackLicenseState state) { + return true; + } + @Override public int hashCode() { return Objects.hash(getClass(), children()); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java index 29a567e83211d..b23593804f8fe 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.esql.core.expression.predicate.fulltext; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -23,10 +22,6 @@ public abstract class FullTextPredicate extends Expression { - public static List getNamedWriteables() { - return List.of(MatchQueryPredicate.ENTRY, MultiMatchQueryPredicate.ENTRY, StringQueryPredicate.ENTRY); - } - public enum Operator { AND, OR; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/StringQueryPredicate.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/StringQueryPredicate.java deleted file mode 100644 index 95000a5364e12..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/StringQueryPredicate.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.fulltext; - -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -import static java.util.Collections.emptyList; - -public final class StringQueryPredicate extends FullTextPredicate { - - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( - Expression.class, - "StringQueryPredicate", - StringQueryPredicate::new - ); - - private final Map fields; - - public StringQueryPredicate(Source source, String query, String options) { - super(source, query, options, emptyList()); - - // inferred - this.fields = FullTextUtils.parseFields(optionMap(), source); - } - - StringQueryPredicate(StreamInput in) throws IOException { - super(in); - assert super.children().isEmpty(); - this.fields = FullTextUtils.parseFields(optionMap(), source()); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, StringQueryPredicate::new, query(), options()); - } - - @Override - public Expression replaceChildren(List newChildren) { - throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); - } - - public Map fields() { - return fields; - } - - @Override - public String getWriteableName() { - return ENTRY.name; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java index b6383fac33299..7836522c77130 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java @@ -12,7 +12,6 @@ import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MultiMatchQueryPredicate; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; @@ -26,7 +25,6 @@ import org.elasticsearch.xpack.esql.core.querydsl.query.MultiMatchQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.NotQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.Query; -import org.elasticsearch.xpack.esql.core.querydsl.query.QueryStringQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.RegexQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.WildcardQuery; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -73,18 +71,6 @@ private static Query translateField(RegexMatch e, String targetFieldName) { } } - public static class StringQueries extends ExpressionTranslator { - - @Override - protected Query asQuery(StringQueryPredicate q, TranslatorHandler handler) { - return doTranslate(q, handler); - } - - public static Query doTranslate(StringQueryPredicate q, TranslatorHandler handler) { - return new QueryStringQuery(q.source(), q.query(), q.fields(), q); - } - } - public static class MultiMatches extends ExpressionTranslator { @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java index d84a471815a9a..61b480968e974 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java @@ -12,7 +12,6 @@ import org.elasticsearch.plugins.Plugin; public class EsqlCorePlugin extends Plugin implements ExtensiblePlugin { - public static final FeatureFlag DATE_NANOS_FEATURE_FLAG = new FeatureFlag("esql_date_nanos"); public static final FeatureFlag SEMANTIC_TEXT_FEATURE_FLAG = new FeatureFlag("esql_semantic_text"); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java index 8ac90e6314174..8dcb87749ae48 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java @@ -14,7 +14,6 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryStringQueryBuilder; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.tree.Source; import java.util.Collections; @@ -55,20 +54,13 @@ public class QueryStringQuery extends Query { private final String query; private final Map fields; - private StringQueryPredicate predicate; private final Map options; - // dedicated constructor for QueryTranslator - public QueryStringQuery(Source source, String query, String fieldName) { - this(source, query, Collections.singletonMap(fieldName, Float.valueOf(1.0f)), null); - } - - public QueryStringQuery(Source source, String query, Map fields, StringQueryPredicate predicate) { + public QueryStringQuery(Source source, String query, Map fields, Map options) { super(source); this.query = query; this.fields = fields; - this.predicate = predicate; - this.options = predicate == null ? Collections.emptyMap() : predicate.optionMap(); + this.options = options == null ? Collections.emptyMap() : options; } @Override @@ -95,7 +87,7 @@ public String query() { @Override public int hashCode() { - return Objects.hash(query, fields, predicate); + return Objects.hash(query, fields); } @Override @@ -109,7 +101,7 @@ public boolean equals(Object obj) { } QueryStringQuery other = (QueryStringQuery) obj; - return Objects.equals(query, other.query) && Objects.equals(fields, other.fields) && Objects.equals(predicate, other.predicate); + return Objects.equals(query, other.query) && Objects.equals(fields, other.fields) && Objects.equals(options, other.options); } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index 347e6b43099fc..1c65dd386667f 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -209,7 +209,6 @@ public enum DataType { * check that sending them to a function produces a sane error message. */ public static final Map UNDER_CONSTRUCTION = Map.ofEntries( - Map.entry(DATE_NANOS, EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), Map.entry(SEMANTIC_TEXT, EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG) ); @@ -591,6 +590,13 @@ public DataType noText() { return isString(this) ? KEYWORD : this; } + public boolean isDate() { + return switch (this) { + case DATETIME, DATE_NANOS -> true; + default -> false; + }; + } + /** * Named parameters with default values. It's just easier to do this with * a builder in java.... diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQueryTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQueryTests.java index 0f80011961092..22e7b93e84ce1 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQueryTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQueryTests.java @@ -10,42 +10,40 @@ import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.util.StringUtils; import java.util.Collections; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; public class QueryStringQueryTests extends ESTestCase { public void testQueryBuilding() { - QueryStringQueryBuilder qb = getBuilder("lenient=true"); + QueryStringQueryBuilder qb = getBuilder(Map.of("lenient", "true")); assertThat(qb.lenient(), equalTo(true)); - qb = getBuilder("lenient=true;default_operator=AND"); + qb = getBuilder(Map.of("lenient", "true", "default_operator", "AND")); assertThat(qb.lenient(), equalTo(true)); assertThat(qb.defaultOperator(), equalTo(Operator.AND)); - Exception e = expectThrows(IllegalArgumentException.class, () -> getBuilder("pizza=yummy")); + Exception e = expectThrows(IllegalArgumentException.class, () -> getBuilder(Map.of("pizza", "yummy"))); assertThat(e.getMessage(), equalTo("illegal query_string option [pizza]")); - e = expectThrows(ElasticsearchParseException.class, () -> getBuilder("type=aoeu")); + e = expectThrows(ElasticsearchParseException.class, () -> getBuilder(Map.of("type", "aoeu"))); assertThat(e.getMessage(), equalTo("failed to parse [multi_match] query type [aoeu]. unknown type.")); } - private static QueryStringQueryBuilder getBuilder(String options) { + private static QueryStringQueryBuilder getBuilder(Map options) { final Source source = new Source(1, 1, StringUtils.EMPTY); - final StringQueryPredicate mmqp = new StringQueryPredicate(source, "eggplant", options); - final QueryStringQuery mmq = new QueryStringQuery(source, "eggplant", Collections.singletonMap("foo", 1.0f), mmqp); - return (QueryStringQueryBuilder) mmq.asBuilder(); + final QueryStringQuery query = new QueryStringQuery(source, "eggplant", Collections.singletonMap("foo", 1.0f), options); + return (QueryStringQueryBuilder) query.asBuilder(); } public void testToString() { final Source source = new Source(1, 1, StringUtils.EMPTY); - final StringQueryPredicate mmqp = new StringQueryPredicate(source, "eggplant", ""); - final QueryStringQuery mmq = new QueryStringQuery(source, "eggplant", Collections.singletonMap("foo", 1.0f), mmqp); + final QueryStringQuery mmq = new QueryStringQuery(source, "eggplant", Collections.singletonMap("foo", 1.0f), Map.of()); assertEquals("QueryStringQuery@1:2[{foo=1.0}:eggplant]", mmq.toString()); } } diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 6541fcd84afef..f92c895cc5b7b 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -1,8 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + plugins { id 'idea' } -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask; import org.elasticsearch.gradle.internal.util.SourceDirectoryCommandLineArgumentProvider; import static org.elasticsearch.gradle.util.PlatformUtils.normalize @@ -83,7 +89,6 @@ tasks.named("test").configure { } } File functionsFolder = file("build/testrun/test/temp/esql/functions") - File signatureFolder = file("build/testrun/test/temp/esql/functions/signature") File typesFolder = file("build/testrun/test/temp/esql/functions/types") def functionsDocFolder = file("${rootDir}/docs/reference/esql/functions") def effectiveProjectDir = projectDir diff --git a/x-pack/plugin/esql/compute/build.gradle b/x-pack/plugin/esql/compute/build.gradle index 3deac4925c951..609c778df5929 100644 --- a/x-pack/plugin/esql/compute/build.gradle +++ b/x-pack/plugin/esql/compute/build.gradle @@ -608,6 +608,27 @@ tasks.named('stringTemplates').configure { it.outputFile = "org/elasticsearch/compute/aggregation/RateDoubleAggregator.java" } + File stdDevAggregatorInputFile = file("src/main/java/org/elasticsearch/compute/aggregation/X-StdDevAggregator.java.st") + template { + it.properties = intProperties + it.inputFile = stdDevAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/StdDevIntAggregator.java" + } + template { + it.properties = longProperties + it.inputFile = stdDevAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/StdDevLongAggregator.java" + } + template { + it.properties = floatProperties + it.inputFile = stdDevAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/StdDevFloatAggregator.java" + } + template { + it.properties = doubleProperties + it.inputFile = stdDevAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/StdDevDoubleAggregator.java" + } File topAggregatorInputFile = new File("${projectDir}/src/main/java/org/elasticsearch/compute/aggregation/X-TopAggregator.java.st") template { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevDoubleAggregator.java new file mode 100644 index 0000000000000..3a1185d34fa23 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevDoubleAggregator.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * A standard deviation aggregation definition for double. + * This class is generated. Edit `X-StdDevAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "mean", type = "DOUBLE"), + @IntermediateState(name = "m2", type = "DOUBLE"), + @IntermediateState(name = "count", type = "LONG") } +) +@GroupingAggregator +public class StdDevDoubleAggregator { + + public static StdDevStates.SingleState initSingle() { + return new StdDevStates.SingleState(); + } + + public static void combine(StdDevStates.SingleState state, double value) { + state.add(value); + } + + public static void combineIntermediate(StdDevStates.SingleState state, double mean, double m2, long count) { + state.combine(mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.SingleState state, DriverContext driverContext) { + return state.evaluateFinal(driverContext); + } + + public static StdDevStates.GroupingState initGrouping(BigArrays bigArrays) { + return new StdDevStates.GroupingState(bigArrays); + } + + public static void combine(StdDevStates.GroupingState current, int groupId, double value) { + current.add(groupId, value); + } + + public static void combineStates(StdDevStates.GroupingState current, int groupId, StdDevStates.GroupingState state, int statePosition) { + current.combine(groupId, state.getOrNull(statePosition)); + } + + public static void combineIntermediate(StdDevStates.GroupingState state, int groupId, double mean, double m2, long count) { + state.combine(groupId, mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.GroupingState state, IntVector selected, DriverContext driverContext) { + return state.evaluateFinal(selected, driverContext); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevFloatAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevFloatAggregator.java new file mode 100644 index 0000000000000..51c22e7e29c1e --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevFloatAggregator.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * A standard deviation aggregation definition for float. + * This class is generated. Edit `X-StdDevAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "mean", type = "DOUBLE"), + @IntermediateState(name = "m2", type = "DOUBLE"), + @IntermediateState(name = "count", type = "LONG") } +) +@GroupingAggregator +public class StdDevFloatAggregator { + + public static StdDevStates.SingleState initSingle() { + return new StdDevStates.SingleState(); + } + + public static void combine(StdDevStates.SingleState state, float value) { + state.add(value); + } + + public static void combineIntermediate(StdDevStates.SingleState state, double mean, double m2, long count) { + state.combine(mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.SingleState state, DriverContext driverContext) { + return state.evaluateFinal(driverContext); + } + + public static StdDevStates.GroupingState initGrouping(BigArrays bigArrays) { + return new StdDevStates.GroupingState(bigArrays); + } + + public static void combine(StdDevStates.GroupingState current, int groupId, float value) { + current.add(groupId, value); + } + + public static void combineStates(StdDevStates.GroupingState current, int groupId, StdDevStates.GroupingState state, int statePosition) { + current.combine(groupId, state.getOrNull(statePosition)); + } + + public static void combineIntermediate(StdDevStates.GroupingState state, int groupId, double mean, double m2, long count) { + state.combine(groupId, mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.GroupingState state, IntVector selected, DriverContext driverContext) { + return state.evaluateFinal(selected, driverContext); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevIntAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevIntAggregator.java new file mode 100644 index 0000000000000..24eae35cb3249 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevIntAggregator.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * A standard deviation aggregation definition for int. + * This class is generated. Edit `X-StdDevAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "mean", type = "DOUBLE"), + @IntermediateState(name = "m2", type = "DOUBLE"), + @IntermediateState(name = "count", type = "LONG") } +) +@GroupingAggregator +public class StdDevIntAggregator { + + public static StdDevStates.SingleState initSingle() { + return new StdDevStates.SingleState(); + } + + public static void combine(StdDevStates.SingleState state, int value) { + state.add(value); + } + + public static void combineIntermediate(StdDevStates.SingleState state, double mean, double m2, long count) { + state.combine(mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.SingleState state, DriverContext driverContext) { + return state.evaluateFinal(driverContext); + } + + public static StdDevStates.GroupingState initGrouping(BigArrays bigArrays) { + return new StdDevStates.GroupingState(bigArrays); + } + + public static void combine(StdDevStates.GroupingState current, int groupId, int value) { + current.add(groupId, value); + } + + public static void combineStates(StdDevStates.GroupingState current, int groupId, StdDevStates.GroupingState state, int statePosition) { + current.combine(groupId, state.getOrNull(statePosition)); + } + + public static void combineIntermediate(StdDevStates.GroupingState state, int groupId, double mean, double m2, long count) { + state.combine(groupId, mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.GroupingState state, IntVector selected, DriverContext driverContext) { + return state.evaluateFinal(selected, driverContext); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevLongAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevLongAggregator.java new file mode 100644 index 0000000000000..888ace30a0c8e --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevLongAggregator.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * A standard deviation aggregation definition for long. + * This class is generated. Edit `X-StdDevAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "mean", type = "DOUBLE"), + @IntermediateState(name = "m2", type = "DOUBLE"), + @IntermediateState(name = "count", type = "LONG") } +) +@GroupingAggregator +public class StdDevLongAggregator { + + public static StdDevStates.SingleState initSingle() { + return new StdDevStates.SingleState(); + } + + public static void combine(StdDevStates.SingleState state, long value) { + state.add(value); + } + + public static void combineIntermediate(StdDevStates.SingleState state, double mean, double m2, long count) { + state.combine(mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.SingleState state, DriverContext driverContext) { + return state.evaluateFinal(driverContext); + } + + public static StdDevStates.GroupingState initGrouping(BigArrays bigArrays) { + return new StdDevStates.GroupingState(bigArrays); + } + + public static void combine(StdDevStates.GroupingState current, int groupId, long value) { + current.add(groupId, value); + } + + public static void combineStates(StdDevStates.GroupingState current, int groupId, StdDevStates.GroupingState state, int statePosition) { + current.combine(groupId, state.getOrNull(statePosition)); + } + + public static void combineIntermediate(StdDevStates.GroupingState state, int groupId, double mean, double m2, long count) { + state.combine(groupId, mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.GroupingState state, IntVector selected, DriverContext driverContext) { + return state.evaluateFinal(selected, driverContext); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunction.java new file mode 100644 index 0000000000000..dd6cc89401a99 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunction.java @@ -0,0 +1,178 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link StdDevDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevDoubleAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("mean", ElementType.DOUBLE), + new IntermediateStateDesc("m2", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final DriverContext driverContext; + + private final StdDevStates.SingleState state; + + private final List channels; + + public StdDevDoubleAggregatorFunction(DriverContext driverContext, List channels, + StdDevStates.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static StdDevDoubleAggregatorFunction create(DriverContext driverContext, + List channels) { + return new StdDevDoubleAggregatorFunction(driverContext, channels, StdDevDoubleAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + DoubleBlock block = page.getBlock(channels.get(0)); + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + DoubleBlock block = page.getBlock(channels.get(0)); + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(DoubleVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + StdDevDoubleAggregator.combine(state, vector.getDouble(i)); + } + } + + private void addRawVector(DoubleVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + StdDevDoubleAggregator.combine(state, vector.getDouble(i)); + } + } + + private void addRawBlock(DoubleBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + StdDevDoubleAggregator.combine(state, block.getDouble(i)); + } + } + } + + private void addRawBlock(DoubleBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + StdDevDoubleAggregator.combine(state, block.getDouble(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block meanUncast = page.getBlock(channels.get(0)); + if (meanUncast.areAllValuesNull()) { + return; + } + DoubleVector mean = ((DoubleBlock) meanUncast).asVector(); + assert mean.getPositionCount() == 1; + Block m2Uncast = page.getBlock(channels.get(1)); + if (m2Uncast.areAllValuesNull()) { + return; + } + DoubleVector m2 = ((DoubleBlock) m2Uncast).asVector(); + assert m2.getPositionCount() == 1; + Block countUncast = page.getBlock(channels.get(2)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert count.getPositionCount() == 1; + StdDevDoubleAggregator.combineIntermediate(state, mean.getDouble(0), m2.getDouble(0), count.getLong(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = StdDevDoubleAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..313eed4ae97ae --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunctionSupplier.java @@ -0,0 +1,38 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link StdDevDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public StdDevDoubleAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public StdDevDoubleAggregatorFunction aggregator(DriverContext driverContext) { + return StdDevDoubleAggregatorFunction.create(driverContext, channels); + } + + @Override + public StdDevDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return StdDevDoubleGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "std_dev of doubles"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..da49c254e353a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleGroupingAggregatorFunction.java @@ -0,0 +1,223 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link StdDevDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("mean", ElementType.DOUBLE), + new IntermediateStateDesc("m2", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final StdDevStates.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public StdDevDoubleGroupingAggregatorFunction(List channels, + StdDevStates.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static StdDevDoubleGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new StdDevDoubleGroupingAggregatorFunction(channels, StdDevDoubleAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + DoubleBlock valuesBlock = page.getBlock(channels.get(0)); + DoubleVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + StdDevDoubleAggregator.combine(state, groupId, values.getDouble(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + StdDevDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + StdDevDoubleAggregator.combine(state, groupId, values.getDouble(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + StdDevDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block meanUncast = page.getBlock(channels.get(0)); + if (meanUncast.areAllValuesNull()) { + return; + } + DoubleVector mean = ((DoubleBlock) meanUncast).asVector(); + Block m2Uncast = page.getBlock(channels.get(1)); + if (m2Uncast.areAllValuesNull()) { + return; + } + DoubleVector m2 = ((DoubleBlock) m2Uncast).asVector(); + Block countUncast = page.getBlock(channels.get(2)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert mean.getPositionCount() == m2.getPositionCount() && mean.getPositionCount() == count.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + StdDevDoubleAggregator.combineIntermediate(state, groupId, mean.getDouble(groupPosition + positionOffset), m2.getDouble(groupPosition + positionOffset), count.getLong(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + StdDevStates.GroupingState inState = ((StdDevDoubleGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + StdDevDoubleAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = StdDevDoubleAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunction.java new file mode 100644 index 0000000000000..bf8c4854f6b93 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunction.java @@ -0,0 +1,180 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.FloatBlock; +import org.elasticsearch.compute.data.FloatVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link StdDevFloatAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevFloatAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("mean", ElementType.DOUBLE), + new IntermediateStateDesc("m2", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final DriverContext driverContext; + + private final StdDevStates.SingleState state; + + private final List channels; + + public StdDevFloatAggregatorFunction(DriverContext driverContext, List channels, + StdDevStates.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static StdDevFloatAggregatorFunction create(DriverContext driverContext, + List channels) { + return new StdDevFloatAggregatorFunction(driverContext, channels, StdDevFloatAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + FloatBlock block = page.getBlock(channels.get(0)); + FloatVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + FloatBlock block = page.getBlock(channels.get(0)); + FloatVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(FloatVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + StdDevFloatAggregator.combine(state, vector.getFloat(i)); + } + } + + private void addRawVector(FloatVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + StdDevFloatAggregator.combine(state, vector.getFloat(i)); + } + } + + private void addRawBlock(FloatBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + StdDevFloatAggregator.combine(state, block.getFloat(i)); + } + } + } + + private void addRawBlock(FloatBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + StdDevFloatAggregator.combine(state, block.getFloat(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block meanUncast = page.getBlock(channels.get(0)); + if (meanUncast.areAllValuesNull()) { + return; + } + DoubleVector mean = ((DoubleBlock) meanUncast).asVector(); + assert mean.getPositionCount() == 1; + Block m2Uncast = page.getBlock(channels.get(1)); + if (m2Uncast.areAllValuesNull()) { + return; + } + DoubleVector m2 = ((DoubleBlock) m2Uncast).asVector(); + assert m2.getPositionCount() == 1; + Block countUncast = page.getBlock(channels.get(2)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert count.getPositionCount() == 1; + StdDevFloatAggregator.combineIntermediate(state, mean.getDouble(0), m2.getDouble(0), count.getLong(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = StdDevFloatAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..25dfa54895eda --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunctionSupplier.java @@ -0,0 +1,38 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link StdDevFloatAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public StdDevFloatAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public StdDevFloatAggregatorFunction aggregator(DriverContext driverContext) { + return StdDevFloatAggregatorFunction.create(driverContext, channels); + } + + @Override + public StdDevFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return StdDevFloatGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "std_dev of floats"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..bf994aaf2840e --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatGroupingAggregatorFunction.java @@ -0,0 +1,225 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.FloatBlock; +import org.elasticsearch.compute.data.FloatVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link StdDevFloatAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("mean", ElementType.DOUBLE), + new IntermediateStateDesc("m2", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final StdDevStates.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public StdDevFloatGroupingAggregatorFunction(List channels, + StdDevStates.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static StdDevFloatGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new StdDevFloatGroupingAggregatorFunction(channels, StdDevFloatAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + FloatBlock valuesBlock = page.getBlock(channels.get(0)); + FloatVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, FloatBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + StdDevFloatAggregator.combine(state, groupId, values.getFloat(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, FloatVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + StdDevFloatAggregator.combine(state, groupId, values.getFloat(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, FloatBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + StdDevFloatAggregator.combine(state, groupId, values.getFloat(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, FloatVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + StdDevFloatAggregator.combine(state, groupId, values.getFloat(groupPosition + positionOffset)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block meanUncast = page.getBlock(channels.get(0)); + if (meanUncast.areAllValuesNull()) { + return; + } + DoubleVector mean = ((DoubleBlock) meanUncast).asVector(); + Block m2Uncast = page.getBlock(channels.get(1)); + if (m2Uncast.areAllValuesNull()) { + return; + } + DoubleVector m2 = ((DoubleBlock) m2Uncast).asVector(); + Block countUncast = page.getBlock(channels.get(2)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert mean.getPositionCount() == m2.getPositionCount() && mean.getPositionCount() == count.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + StdDevFloatAggregator.combineIntermediate(state, groupId, mean.getDouble(groupPosition + positionOffset), m2.getDouble(groupPosition + positionOffset), count.getLong(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + StdDevStates.GroupingState inState = ((StdDevFloatGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + StdDevFloatAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = StdDevFloatAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunction.java new file mode 100644 index 0000000000000..4a5585a7dd454 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunction.java @@ -0,0 +1,180 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link StdDevIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevIntAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("mean", ElementType.DOUBLE), + new IntermediateStateDesc("m2", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final DriverContext driverContext; + + private final StdDevStates.SingleState state; + + private final List channels; + + public StdDevIntAggregatorFunction(DriverContext driverContext, List channels, + StdDevStates.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static StdDevIntAggregatorFunction create(DriverContext driverContext, + List channels) { + return new StdDevIntAggregatorFunction(driverContext, channels, StdDevIntAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + IntBlock block = page.getBlock(channels.get(0)); + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + IntBlock block = page.getBlock(channels.get(0)); + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(IntVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + StdDevIntAggregator.combine(state, vector.getInt(i)); + } + } + + private void addRawVector(IntVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + StdDevIntAggregator.combine(state, vector.getInt(i)); + } + } + + private void addRawBlock(IntBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + StdDevIntAggregator.combine(state, block.getInt(i)); + } + } + } + + private void addRawBlock(IntBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + StdDevIntAggregator.combine(state, block.getInt(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block meanUncast = page.getBlock(channels.get(0)); + if (meanUncast.areAllValuesNull()) { + return; + } + DoubleVector mean = ((DoubleBlock) meanUncast).asVector(); + assert mean.getPositionCount() == 1; + Block m2Uncast = page.getBlock(channels.get(1)); + if (m2Uncast.areAllValuesNull()) { + return; + } + DoubleVector m2 = ((DoubleBlock) m2Uncast).asVector(); + assert m2.getPositionCount() == 1; + Block countUncast = page.getBlock(channels.get(2)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert count.getPositionCount() == 1; + StdDevIntAggregator.combineIntermediate(state, mean.getDouble(0), m2.getDouble(0), count.getLong(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = StdDevIntAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..5a762d6606a25 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunctionSupplier.java @@ -0,0 +1,38 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link StdDevIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public StdDevIntAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public StdDevIntAggregatorFunction aggregator(DriverContext driverContext) { + return StdDevIntAggregatorFunction.create(driverContext, channels); + } + + @Override + public StdDevIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return StdDevIntGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "std_dev of ints"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..139cc24d3541f --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntGroupingAggregatorFunction.java @@ -0,0 +1,223 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link StdDevIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevIntGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("mean", ElementType.DOUBLE), + new IntermediateStateDesc("m2", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final StdDevStates.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public StdDevIntGroupingAggregatorFunction(List channels, + StdDevStates.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static StdDevIntGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new StdDevIntGroupingAggregatorFunction(channels, StdDevIntAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + IntBlock valuesBlock = page.getBlock(channels.get(0)); + IntVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + StdDevIntAggregator.combine(state, groupId, values.getInt(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + StdDevIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + StdDevIntAggregator.combine(state, groupId, values.getInt(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + StdDevIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block meanUncast = page.getBlock(channels.get(0)); + if (meanUncast.areAllValuesNull()) { + return; + } + DoubleVector mean = ((DoubleBlock) meanUncast).asVector(); + Block m2Uncast = page.getBlock(channels.get(1)); + if (m2Uncast.areAllValuesNull()) { + return; + } + DoubleVector m2 = ((DoubleBlock) m2Uncast).asVector(); + Block countUncast = page.getBlock(channels.get(2)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert mean.getPositionCount() == m2.getPositionCount() && mean.getPositionCount() == count.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + StdDevIntAggregator.combineIntermediate(state, groupId, mean.getDouble(groupPosition + positionOffset), m2.getDouble(groupPosition + positionOffset), count.getLong(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + StdDevStates.GroupingState inState = ((StdDevIntGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + StdDevIntAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = StdDevIntAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunction.java new file mode 100644 index 0000000000000..b5ed31116a90c --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunction.java @@ -0,0 +1,178 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link StdDevLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevLongAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("mean", ElementType.DOUBLE), + new IntermediateStateDesc("m2", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final DriverContext driverContext; + + private final StdDevStates.SingleState state; + + private final List channels; + + public StdDevLongAggregatorFunction(DriverContext driverContext, List channels, + StdDevStates.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static StdDevLongAggregatorFunction create(DriverContext driverContext, + List channels) { + return new StdDevLongAggregatorFunction(driverContext, channels, StdDevLongAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(LongVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + StdDevLongAggregator.combine(state, vector.getLong(i)); + } + } + + private void addRawVector(LongVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + StdDevLongAggregator.combine(state, vector.getLong(i)); + } + } + + private void addRawBlock(LongBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + StdDevLongAggregator.combine(state, block.getLong(i)); + } + } + } + + private void addRawBlock(LongBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + StdDevLongAggregator.combine(state, block.getLong(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block meanUncast = page.getBlock(channels.get(0)); + if (meanUncast.areAllValuesNull()) { + return; + } + DoubleVector mean = ((DoubleBlock) meanUncast).asVector(); + assert mean.getPositionCount() == 1; + Block m2Uncast = page.getBlock(channels.get(1)); + if (m2Uncast.areAllValuesNull()) { + return; + } + DoubleVector m2 = ((DoubleBlock) m2Uncast).asVector(); + assert m2.getPositionCount() == 1; + Block countUncast = page.getBlock(channels.get(2)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert count.getPositionCount() == 1; + StdDevLongAggregator.combineIntermediate(state, mean.getDouble(0), m2.getDouble(0), count.getLong(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = StdDevLongAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..09b996201ef16 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunctionSupplier.java @@ -0,0 +1,38 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link StdDevLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public StdDevLongAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public StdDevLongAggregatorFunction aggregator(DriverContext driverContext) { + return StdDevLongAggregatorFunction.create(driverContext, channels); + } + + @Override + public StdDevLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return StdDevLongGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "std_dev of longs"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..da7a5f4bdea0d --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongGroupingAggregatorFunction.java @@ -0,0 +1,223 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link StdDevLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevLongGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("mean", ElementType.DOUBLE), + new IntermediateStateDesc("m2", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final StdDevStates.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public StdDevLongGroupingAggregatorFunction(List channels, + StdDevStates.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static StdDevLongGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new StdDevLongGroupingAggregatorFunction(channels, StdDevLongAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + LongBlock valuesBlock = page.getBlock(channels.get(0)); + LongVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + StdDevLongAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + StdDevLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + StdDevLongAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + StdDevLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block meanUncast = page.getBlock(channels.get(0)); + if (meanUncast.areAllValuesNull()) { + return; + } + DoubleVector mean = ((DoubleBlock) meanUncast).asVector(); + Block m2Uncast = page.getBlock(channels.get(1)); + if (m2Uncast.areAllValuesNull()) { + return; + } + DoubleVector m2 = ((DoubleBlock) m2Uncast).asVector(); + Block countUncast = page.getBlock(channels.get(2)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert mean.getPositionCount() == m2.getPositionCount() && mean.getPositionCount() == count.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + StdDevLongAggregator.combineIntermediate(state, groupId, mean.getDouble(groupPosition + positionOffset), m2.getDouble(groupPosition + positionOffset), count.getLong(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + StdDevStates.GroupingState inState = ((StdDevLongGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + StdDevLongAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = StdDevLongAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/StdDevStates.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/StdDevStates.java new file mode 100644 index 0000000000000..bff8903fd3bec --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/StdDevStates.java @@ -0,0 +1,211 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasables; + +public final class StdDevStates { + + private StdDevStates() {} + + static final class SingleState implements AggregatorState { + + private final WelfordAlgorithm welfordAlgorithm; + + SingleState() { + this(0, 0, 0); + } + + SingleState(double mean, double m2, long count) { + this.welfordAlgorithm = new WelfordAlgorithm(mean, m2, count); + } + + public void add(long value) { + welfordAlgorithm.add(value); + } + + public void add(double value) { + welfordAlgorithm.add(value); + } + + public void add(int value) { + welfordAlgorithm.add(value); + } + + public void combine(double mean, double m2, long count) { + welfordAlgorithm.add(mean, m2, count); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + assert blocks.length >= offset + 3; + BlockFactory blockFactory = driverContext.blockFactory(); + blocks[offset + 0] = blockFactory.newConstantDoubleBlockWith(mean(), 1); + blocks[offset + 1] = blockFactory.newConstantDoubleBlockWith(m2(), 1); + blocks[offset + 2] = blockFactory.newConstantLongBlockWith(count(), 1); + } + + @Override + public void close() {} + + public double mean() { + return welfordAlgorithm.mean(); + } + + public double m2() { + return welfordAlgorithm.m2(); + } + + public long count() { + return welfordAlgorithm.count(); + } + + public double evaluateFinal() { + return welfordAlgorithm.evaluate(); + } + + public Block evaluateFinal(DriverContext driverContext) { + final long count = count(); + final double m2 = m2(); + if (count == 0 || Double.isFinite(m2) == false) { + return driverContext.blockFactory().newConstantNullBlock(1); + } + return driverContext.blockFactory().newConstantDoubleBlockWith(evaluateFinal(), 1); + } + } + + static final class GroupingState implements GroupingAggregatorState { + + private ObjectArray states; + private final BigArrays bigArrays; + + GroupingState(BigArrays bigArrays) { + this.states = bigArrays.newObjectArray(1); + this.bigArrays = bigArrays; + } + + WelfordAlgorithm getOrNull(int position) { + if (position < states.size()) { + return states.get(position); + } else { + return null; + } + } + + public void combine(int groupId, WelfordAlgorithm state) { + if (state == null) { + return; + } + combine(groupId, state.mean(), state.m2(), state.count()); + } + + public void combine(int groupId, double meanValue, double m2Value, long countValue) { + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + state = new WelfordAlgorithm(meanValue, m2Value, countValue); + states.set(groupId, state); + } else { + state.add(meanValue, m2Value, countValue); + } + } + + public WelfordAlgorithm getOrSet(int groupId) { + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + state = new WelfordAlgorithm(); + states.set(groupId, state); + } + return state; + } + + public void add(int groupId, long value) { + var state = getOrSet(groupId); + state.add(value); + } + + public void add(int groupId, double value) { + var state = getOrSet(groupId); + state.add(value); + } + + public void add(int groupId, int value) { + var state = getOrSet(groupId); + state.add(value); + } + + private void ensureCapacity(int groupId) { + states = bigArrays.grow(states, groupId + 1); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + assert blocks.length >= offset + 3 : "blocks=" + blocks.length + ",offset=" + offset; + try ( + var meanBuilder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount()); + var m2Builder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount()); + var countBuilder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount()); + ) { + for (int i = 0; i < selected.getPositionCount(); i++) { + final var groupId = selected.getInt(i); + final var state = groupId < states.size() ? states.get(groupId) : null; + if (state != null) { + meanBuilder.appendDouble(state.mean()); + m2Builder.appendDouble(state.m2()); + countBuilder.appendLong(state.count()); + } else { + meanBuilder.appendDouble(0.0); + m2Builder.appendDouble(0.0); + countBuilder.appendLong(0); + } + } + blocks[offset + 0] = meanBuilder.build(); + blocks[offset + 1] = m2Builder.build(); + blocks[offset + 2] = countBuilder.build(); + } + } + + public Block evaluateFinal(IntVector selected, DriverContext driverContext) { + try (DoubleBlock.Builder builder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount())) { + for (int i = 0; i < selected.getPositionCount(); i++) { + final var groupId = selected.getInt(i); + final var st = getOrNull(groupId); + if (st != null) { + final var m2 = st.m2(); + final var count = st.count(); + if (count == 0 || Double.isFinite(m2) == false) { + builder.appendNull(); + } else { + builder.appendDouble(st.evaluate()); + } + } else { + builder.appendNull(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(states); + } + + void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/WelfordAlgorithm.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/WelfordAlgorithm.java new file mode 100644 index 0000000000000..8ccb985507247 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/WelfordAlgorithm.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +/** + * Algorithm for calculating standard deviation, one value at a time. + * + * @see + * Welford's_online_algorithm and + * + * Parallel algorithm + */ +public final class WelfordAlgorithm { + private double mean; + private double m2; + private long count; + + public double mean() { + return mean; + } + + public double m2() { + return m2; + } + + public long count() { + return count; + } + + public WelfordAlgorithm() { + this(0, 0, 0); + } + + public WelfordAlgorithm(double mean, double m2, long count) { + this.mean = mean; + this.m2 = m2; + this.count = count; + } + + public void add(int value) { + add((double) value); + } + + public void add(long value) { + add((double) value); + } + + public void add(double value) { + final double delta = value - mean; + count += 1; + mean += delta / count; + m2 += delta * (value - mean); + } + + public void add(double meanValue, double m2Value, long countValue) { + if (countValue == 0) { + return; + } + if (count == 0) { + mean = meanValue; + m2 = m2Value; + count = countValue; + return; + } + double delta = mean - meanValue; + m2 += m2Value + delta * delta * count * countValue / (count + countValue); + mean = (mean * count + meanValue * countValue) / (count + countValue); + count += countValue; + } + + public double evaluate() { + return count < 2 ? 0 : Math.sqrt(m2 / count); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-StdDevAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-StdDevAggregator.java.st new file mode 100644 index 0000000000000..510d770f90d62 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-StdDevAggregator.java.st @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * A standard deviation aggregation definition for $type$. + * This class is generated. Edit `X-StdDevAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "mean", type = "DOUBLE"), + @IntermediateState(name = "m2", type = "DOUBLE"), + @IntermediateState(name = "count", type = "LONG") } +) +@GroupingAggregator +public class StdDev$Type$Aggregator { + + public static StdDevStates.SingleState initSingle() { + return new StdDevStates.SingleState(); + } + + public static void combine(StdDevStates.SingleState state, $type$ value) { + state.add(value); + } + + public static void combineIntermediate(StdDevStates.SingleState state, double mean, double m2, long count) { + state.combine(mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.SingleState state, DriverContext driverContext) { + return state.evaluateFinal(driverContext); + } + + public static StdDevStates.GroupingState initGrouping(BigArrays bigArrays) { + return new StdDevStates.GroupingState(bigArrays); + } + + public static void combine(StdDevStates.GroupingState current, int groupId, $type$ value) { + current.add(groupId, value); + } + + public static void combineStates(StdDevStates.GroupingState current, int groupId, StdDevStates.GroupingState state, int statePosition) { + current.combine(groupId, state.getOrNull(statePosition)); + } + + public static void combineIntermediate(StdDevStates.GroupingState state, int groupId, double mean, double m2, long count) { + state.combine(groupId, mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.GroupingState state, IntVector selected, DriverContext driverContext) { + return state.evaluateFinal(selected, driverContext); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 9e48bc13cdafa..1e06cf1ea4450 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -10,7 +10,6 @@ import org.apache.lucene.util.Accountable; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.NamedWriteable; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; @@ -18,8 +17,6 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.BlockLoader; -import java.util.List; - /** * A Block is a columnar representation of homogenous data. It has a position (row) count, and * various data retrieval methods for accessing the underlying data that is stored at a given @@ -291,19 +288,6 @@ static Block[] buildAll(Block.Builder... builders) { } } - static List getNamedWriteables() { - return List.of( - IntBlock.ENTRY, - LongBlock.ENTRY, - FloatBlock.ENTRY, - DoubleBlock.ENTRY, - BytesRefBlock.ENTRY, - BooleanBlock.ENTRY, - ConstantNullBlock.ENTRY, - CompositeBlock.ENTRY - ); - } - /** * Serialization type for blocks: 0 and 1 replace false/true used in pre-8.14 */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockWritables.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockWritables.java new file mode 100644 index 0000000000000..ff9139e57e52e --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockWritables.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; + +import java.util.List; + +public class BlockWritables { + + public static List getNamedWriteables() { + return List.of( + IntBlock.ENTRY, + LongBlock.ENTRY, + FloatBlock.ENTRY, + DoubleBlock.ENTRY, + BytesRefBlock.ENTRY, + BooleanBlock.ENTRY, + ConstantNullBlock.ENTRY, + CompositeBlock.ENTRY + ); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java index 9198de53b1e04..6dca94b9bc79a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java @@ -147,7 +147,7 @@ public void merge(int bucket, BytesRefBucketedSort other, int otherBucket) { // The value was never collected. return; } - other.checkInvariant(bucket); + other.checkInvariant(otherBucket); long otherStart = other.startIndex(otherBucket, otherRootIndex); long otherEnd = other.common.endIndex(otherRootIndex); // TODO: This can be improved for heapified buckets by making use of the heap structures diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java index dcd13671670d8..d7d9da052a962 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java @@ -191,18 +191,29 @@ SegmentState segmentState(int segment) throws IOException { private class SegmentState { private final Weight weight; private final LeafReaderContext ctx; + /** * Lazily initialed {@link Scorer} for this. {@code null} here means uninitialized * or that {@link #noMatch} is true. */ private Scorer scorer; + /** + * Thread that initialized the {@link #scorer}. + */ + private Thread scorerThread; + /** * Lazily initialed {@link BulkScorer} for this. {@code null} here means uninitialized * or that {@link #noMatch} is true. */ private BulkScorer bulkScorer; + /** + * Thread that initialized the {@link #bulkScorer}. + */ + private Thread bulkScorerThread; + /** * Set to {@code true} if, in the process of building a {@link Scorer} or {@link BulkScorer}, * the {@link Weight} tells us there aren't any matches. @@ -223,7 +234,10 @@ BooleanVector scoreDense(int min, int max) throws IOException { if (noMatch) { return blockFactory.newConstantBooleanVector(false, length); } - if (bulkScorer == null) { + if (bulkScorer == null || // The bulkScorer wasn't initialized + Thread.currentThread() != bulkScorerThread // The bulkScorer was initialized on a different thread + ) { + bulkScorerThread = Thread.currentThread(); bulkScorer = weight.bulkScorer(ctx); if (bulkScorer == null) { noMatch = true; @@ -257,8 +271,11 @@ private void initScorer(int minDocId) throws IOException { if (noMatch) { return; } - if (scorer == null || scorer.iterator().docID() > minDocId) { - // The previous block might have been beyond this one, reset the scorer and try again. + if (scorer == null || // Scorer not initialized + scorerThread != Thread.currentThread() || // Scorer initialized on a different thread + scorer.iterator().docID() > minDocId // The previous block came "after" this one + ) { + scorerThread = Thread.currentThread(); scorer = weight.scorer(ctx); if (scorer == null) { noMatch = true; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/RowOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/RowOperator.java deleted file mode 100644 index 4b4379eb6a4d8..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/RowOperator.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.operator; - -import org.elasticsearch.compute.data.BlockFactory; - -import java.util.List; -import java.util.Objects; - -import static java.util.stream.Collectors.joining; - -public class RowOperator extends LocalSourceOperator { - - private final List objects; - - public record RowOperatorFactory(List objects) implements SourceOperatorFactory { - - @Override - public SourceOperator get(DriverContext driverContext) { - return new RowOperator(driverContext.blockFactory(), objects); - } - - @Override - public String describe() { - return "RowOperator[objects = " + objects.stream().map(Objects::toString).collect(joining(",")) + "]"; - } - } - - public RowOperator(BlockFactory blockFactory, List objects) { - super(blockFactory, () -> objects); - this.objects = objects; - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(this.getClass().getSimpleName()).append("["); - sb.append("objects=").append(objects); - sb.append("]"); - return sb.toString(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java index 06059944f1310..e6bae7ba385e6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java @@ -40,6 +40,7 @@ import java.io.IOException; import java.util.Map; +import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicLong; @@ -339,6 +340,10 @@ public boolean isEmpty() { return sinks.isEmpty(); } + public Set sinkKeys() { + return sinks.keySet(); + } + @Override protected void doStart() { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java index e3fc0e26e34e0..4baaf9ad89bd6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java @@ -220,20 +220,21 @@ void onSinkComplete() { * @see ExchangeSinkHandler#fetchPageAsync(boolean, ActionListener) */ public void addRemoteSink(RemoteSink remoteSink, int instances) { - for (int i = 0; i < instances; i++) { - var fetcher = new RemoteSinkFetcher(remoteSink); - fetchExecutor.execute(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - fetcher.onSinkFailed(e); - } + fetchExecutor.execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + failure.unwrapAndCollect(e); + buffer.waitForReading().listener().onResponse(null); // resume the Driver if it is being blocked on reading + } - @Override - protected void doRun() { + @Override + protected void doRun() { + for (int i = 0; i < instances; i++) { + var fetcher = new RemoteSinkFetcher(remoteSink); fetcher.fetchPage(); } - }); - } + } + }); } /** diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperator.java index 2093094fb8af5..0cd34d2ad4066 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperator.java @@ -22,6 +22,7 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.operator.Warnings; import org.elasticsearch.core.Releasables; import java.io.IOException; @@ -38,17 +39,25 @@ public final class EnrichQuerySourceOperator extends SourceOperator { private int queryPosition = -1; private final IndexReader indexReader; private final IndexSearcher searcher; + private final Warnings warnings; private final int maxPageSize; // using smaller pages enables quick cancellation and reduces sorting costs public static final int DEFAULT_MAX_PAGE_SIZE = 256; - public EnrichQuerySourceOperator(BlockFactory blockFactory, int maxPageSize, QueryList queryList, IndexReader indexReader) { + public EnrichQuerySourceOperator( + BlockFactory blockFactory, + int maxPageSize, + QueryList queryList, + IndexReader indexReader, + Warnings warnings + ) { this.blockFactory = blockFactory; this.maxPageSize = maxPageSize; this.queryList = queryList; this.indexReader = indexReader; this.searcher = new IndexSearcher(indexReader); + this.warnings = warnings; } @Override @@ -73,12 +82,18 @@ public Page getOutput() { } int totalMatches = 0; do { - Query query = nextQuery(); - if (query == null) { - assert isFinished(); - break; + Query query; + try { + query = nextQuery(); + if (query == null) { + assert isFinished(); + break; + } + query = searcher.rewrite(new ConstantScoreQuery(query)); + } catch (Exception e) { + warnings.registerException(e); + continue; } - query = searcher.rewrite(new ConstantScoreQuery(query)); final var weight = searcher.createWeight(query, ScoreMode.COMPLETE_NO_SCORES, 1.0f); if (weight == null) { continue; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java index e72c34fdb5f7a..d76e58d1c8a30 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java @@ -29,7 +29,7 @@ public abstract class SerializationTestCase extends ESTestCase { BigArrays bigArrays; protected BlockFactory blockFactory; - NamedWriteableRegistry registry = new NamedWriteableRegistry(Block.getNamedWriteables()); + NamedWriteableRegistry registry = new NamedWriteableRegistry(BlockWritables.getNamedWriteables()); @Before public final void newBlockFactory() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java index f857f50b2d30f..339c2bba2a734 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java @@ -363,6 +363,52 @@ public final void testMergeEmptyToEmpty() { } } + public final void testMergeOtherBigger() { + try (T sort = build(SortOrder.ASC, 3)) { + var values = threeSortedValues(); + + collect(sort, values.get(0), 0); + collect(sort, values.get(1), 0); + collect(sort, values.get(2), 0); + + try (T other = build(SortOrder.ASC, 3)) { + collect(other, values.get(0), 0); + collect(other, values.get(1), 1); + collect(other, values.get(2), 2); + + merge(sort, 0, other, 0); + merge(sort, 0, other, 1); + merge(sort, 0, other, 2); + } + + assertBlock(sort, 0, List.of(values.get(0), values.get(0), values.get(1))); + } + } + + public final void testMergeThisBigger() { + try (T sort = build(SortOrder.ASC, 3)) { + var values = threeSortedValues(); + + collect(sort, values.get(0), 0); + collect(sort, values.get(1), 1); + collect(sort, values.get(2), 2); + + try (T other = build(SortOrder.ASC, 3)) { + collect(other, values.get(0), 0); + collect(other, values.get(1), 0); + collect(other, values.get(2), 0); + + merge(sort, 0, other, 0); + merge(sort, 1, other, 0); + merge(sort, 2, other, 0); + } + + assertBlock(sort, 0, List.of(values.get(0), values.get(0), values.get(1))); + assertBlock(sort, 1, List.of(values.get(0), values.get(1), values.get(1))); + assertBlock(sort, 2, values); + } + } + protected void assertBlock(T sort, int groupId, List values) { var blockFactory = TestBlockFactory.getNonBreakingInstance(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java deleted file mode 100644 index cd8a49939fbb5..0000000000000 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.operator; - -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.MockBigArrays; -import org.elasticsearch.common.util.PageCacheRecycler; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.TestBlockFactory; -import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.test.ESTestCase; - -import java.util.Arrays; -import java.util.List; - -import static org.hamcrest.Matchers.equalTo; - -public class RowOperatorTests extends ESTestCase { - final DriverContext driverContext = new DriverContext( - new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(), - TestBlockFactory.getNonBreakingInstance() - ); - - public void testBoolean() { - RowOperator.RowOperatorFactory factory = new RowOperator.RowOperatorFactory(List.of(false)); - assertThat(factory.describe(), equalTo("RowOperator[objects = false]")); - assertThat(factory.get(driverContext).toString(), equalTo("RowOperator[objects=[false]]")); - BooleanBlock block = factory.get(driverContext).getOutput().getBlock(0); - assertThat(block.getBoolean(0), equalTo(false)); - } - - public void testInt() { - RowOperator.RowOperatorFactory factory = new RowOperator.RowOperatorFactory(List.of(213)); - assertThat(factory.describe(), equalTo("RowOperator[objects = 213]")); - assertThat(factory.get(driverContext).toString(), equalTo("RowOperator[objects=[213]]")); - IntBlock block = factory.get(driverContext).getOutput().getBlock(0); - assertThat(block.getInt(0), equalTo(213)); - } - - public void testLong() { - RowOperator.RowOperatorFactory factory = new RowOperator.RowOperatorFactory(List.of(21321343214L)); - assertThat(factory.describe(), equalTo("RowOperator[objects = 21321343214]")); - assertThat(factory.get(driverContext).toString(), equalTo("RowOperator[objects=[21321343214]]")); - LongBlock block = factory.get(driverContext).getOutput().getBlock(0); - assertThat(block.getLong(0), equalTo(21321343214L)); - } - - public void testDouble() { - RowOperator.RowOperatorFactory factory = new RowOperator.RowOperatorFactory(List.of(2.0)); - assertThat(factory.describe(), equalTo("RowOperator[objects = 2.0]")); - assertThat(factory.get(driverContext).toString(), equalTo("RowOperator[objects=[2.0]]")); - DoubleBlock block = factory.get(driverContext).getOutput().getBlock(0); - assertThat(block.getDouble(0), equalTo(2.0)); - } - - public void testString() { - RowOperator.RowOperatorFactory factory = new RowOperator.RowOperatorFactory(List.of(new BytesRef("cat"))); - assertThat(factory.describe(), equalTo("RowOperator[objects = [63 61 74]]")); - assertThat(factory.get(driverContext).toString(), equalTo("RowOperator[objects=[[63 61 74]]]")); - BytesRefBlock block = factory.get(driverContext).getOutput().getBlock(0); - assertThat(block.getBytesRef(0, new BytesRef()), equalTo(new BytesRef("cat"))); - } - - public void testNull() { - RowOperator.RowOperatorFactory factory = new RowOperator.RowOperatorFactory(Arrays.asList(new Object[] { null })); - assertThat(factory.describe(), equalTo("RowOperator[objects = null]")); - assertThat(factory.get(driverContext).toString(), equalTo("RowOperator[objects=[null]]")); - Block block = factory.get(driverContext).getOutput().getBlock(0); - assertTrue(block.isNull(0)); - } -} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index 9e07f9c8f5faf..0b1ecce8c375b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -22,8 +22,8 @@ import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.data.Page; @@ -457,7 +457,7 @@ public void sendResponse(TransportResponse transportResponse) { private MockTransportService newTransportService() { List namedWriteables = new ArrayList<>(ClusterModule.getNamedWriteables()); - namedWriteables.addAll(Block.getNamedWriteables()); + namedWriteables.addAll(BlockWritables.getNamedWriteables()); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables); MockTransportService service = MockTransportService.createNewService( Settings.EMPTY, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java index 6daace76dd8b8..2af52b6bab5a8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java @@ -32,6 +32,8 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.Warnings; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -120,7 +122,8 @@ public void testQueries() throws Exception { // 3 -> [] -> [] // 4 -> [a1] -> [3] // 5 -> [] -> [] - EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(blockFactory, 128, queryList, reader); + var warnings = Warnings.createWarnings(DriverContext.WarningsMode.IGNORE, 0, 0, "test enrich"); + EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(blockFactory, 128, queryList, reader, warnings); Page p0 = queryOperator.getOutput(); assertNotNull(p0); assertThat(p0.getPositionCount(), equalTo(6)); @@ -187,7 +190,8 @@ public void testRandomMatchQueries() throws Exception { MappedFieldType uidField = new KeywordFieldMapper.KeywordFieldType("uid"); var queryList = QueryList.rawTermQueryList(uidField, mock(SearchExecutionContext.class), inputTerms); int maxPageSize = between(1, 256); - EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(blockFactory, maxPageSize, queryList, reader); + var warnings = Warnings.createWarnings(DriverContext.WarningsMode.IGNORE, 0, 0, "test enrich"); + EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(blockFactory, maxPageSize, queryList, reader, warnings); Map> actualPositions = new HashMap<>(); while (queryOperator.isFinished() == false) { Page page = queryOperator.getOutput(); diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle index 68c0e8e30f814..eac5d5764d4b2 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle @@ -1,8 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.util.GradleUtils -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle b/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle index 2c432eb94ebf1..7f3859e2229ef 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle +++ b/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle @@ -6,7 +6,6 @@ */ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index 60eecbb7658b7..5df85d1004dd1 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -47,6 +47,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS_V2; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_PLANNING_V1; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.METADATA_FIELDS_REMOTE_TEST; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.SYNC; @@ -124,6 +125,7 @@ protected void shouldSkipTest(String testName) throws IOException { assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V2.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_PLANNING_V1.capabilityName())); + assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP.capabilityName())); } private TestFeatureService remoteFeaturesService() throws IOException { diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 6ebf05755ef5e..265d9f7bd8cd5 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -51,7 +51,6 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.LongStream; -import java.util.stream.Stream; import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude; import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude; @@ -207,10 +206,7 @@ protected static void checkCapabilities(RestClient client, TestFeatureService te } } - var features = Stream.concat( - new EsqlFeatures().getFeatures().stream(), - new EsqlFeatures().getHistoricalFeatures().keySet().stream() - ).map(NodeFeature::id).collect(Collectors.toSet()); + var features = new EsqlFeatures().getFeatures().stream().map(NodeFeature::id).collect(Collectors.toSet()); for (String feature : testCase.requiredCapabilities) { var esqlFeature = "esql." + feature; diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java index d124fdb5755c3..813354db697e1 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java @@ -14,6 +14,7 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.index.mapper.BlockLoader; @@ -27,6 +28,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.hamcrest.Matcher; import org.junit.Before; @@ -1106,6 +1108,323 @@ public void testTypeConflictInObject() throws IOException { ); } + /** + * Test for https://github.com/elastic/elasticsearch/issues/117054 fix + */ + public void testOneNestedSubField_AndSameNameSupportedField() throws IOException { + assumeIndexResolverNestedFieldsNameClashFixed(); + ESRestTestCase.createIndex("test", Settings.EMPTY, """ + "properties": { + "Responses": { + "properties": { + "process": { + "type": "nested", + "properties": { + "pid": { + "type": "long" + } + } + } + } + }, + "process": { + "properties": { + "parent": { + "properties": { + "command_line": { + "type": "wildcard", + "fields": { + "text": { + "type": "text" + } + } + } + } + } + } + } + } + """); + + Map result = runEsql("FROM test"); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")) + ).entry("values", Collections.EMPTY_LIST) + ); + + index("test", """ + {"Responses.process.pid": 123,"process.parent.command_line":"run.bat"}"""); + + result = runEsql("FROM test"); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")) + ).entry("values", List.of(matchesList().item("run.bat").item("run.bat"))) + ); + + result = runEsql(""" + FROM test | where process.parent.command_line == "run.bat" + """); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")) + ).entry("values", List.of(matchesList().item("run.bat").item("run.bat"))) + ); + + ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test | SORT Responses.process.pid")); + String err = EntityUtils.toString(e.getResponse().getEntity()); + assertThat(err, containsString("line 1:18: Unknown column [Responses.process.pid]")); + + e = expectThrows(ResponseException.class, () -> runEsql(""" + FROM test + | SORT Responses.process.pid + | WHERE Responses.process IS NULL + """)); + err = EntityUtils.toString(e.getResponse().getEntity()); + assertThat(err, containsString("line 2:8: Unknown column [Responses.process.pid]")); + } + + public void testOneNestedSubField_AndSameNameSupportedField_TwoIndices() throws IOException { + assumeIndexResolverNestedFieldsNameClashFixed(); + ESRestTestCase.createIndex("test1", Settings.EMPTY, """ + "properties": { + "Responses": { + "properties": { + "process": { + "type": "nested", + "properties": { + "pid": { + "type": "long" + } + } + } + } + } + } + """); + ESRestTestCase.createIndex("test2", Settings.EMPTY, """ + "properties": { + "process": { + "properties": { + "parent": { + "properties": { + "command_line": { + "type": "wildcard", + "fields": { + "text": { + "type": "text" + } + } + } + } + } + } + } + } + """); + index("test1", """ + {"Responses.process.pid": 123}"""); + index("test2", """ + {"process.parent.command_line":"run.bat"}"""); + + Map result = runEsql("FROM test* | SORT process.parent.command_line ASC NULLS FIRST"); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")) + ).entry("values", List.of(matchesList().item(null).item(null), matchesList().item("run.bat").item("run.bat"))) + ); + + result = runEsql(""" + FROM test* | where process.parent.command_line == "run.bat" + """); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")) + ).entry("values", List.of(matchesList().item("run.bat").item("run.bat"))) + ); + + ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test* | SORT Responses.process.pid")); + String err = EntityUtils.toString(e.getResponse().getEntity()); + assertThat(err, containsString("line 1:19: Unknown column [Responses.process.pid]")); + + e = expectThrows(ResponseException.class, () -> runEsql(""" + FROM test* + | SORT Responses.process.pid + | WHERE Responses.process IS NULL + """)); + err = EntityUtils.toString(e.getResponse().getEntity()); + assertThat(err, containsString("line 2:8: Unknown column [Responses.process.pid]")); + } + + public void testOneNestedField_AndSameNameSupportedField_TwoIndices() throws IOException { + assumeIndexResolverNestedFieldsNameClashFixed(); + ESRestTestCase.createIndex("test1", Settings.EMPTY, """ + "properties": { + "Responses": { + "properties": { + "process": { + "type": "nested", + "properties": { + "pid": { + "type": "long" + } + } + } + } + }, + "process": { + "properties": { + "parent": { + "properties": { + "command_line": { + "type": "wildcard", + "fields": { + "text": { + "type": "text" + } + } + } + } + } + } + } + } + """); + ESRestTestCase.createIndex("test2", Settings.EMPTY, """ + "properties": { + "Responses": { + "properties": { + "process": { + "type": "integer", + "fields": { + "pid": { + "type": "long" + } + } + } + } + }, + "process": { + "properties": { + "parent": { + "properties": { + "command_line": { + "type": "wildcard", + "fields": { + "text": { + "type": "text" + } + } + } + } + } + } + } + } + """); + index("test1", """ + {"Responses.process.pid": 111,"process.parent.command_line":"run1.bat"}"""); + index("test2", """ + {"Responses.process": 222,"process.parent.command_line":"run2.bat"}"""); + + Map result = runEsql("FROM test* | SORT process.parent.command_line"); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of( + columnInfo("Responses.process", "integer"), + columnInfo("Responses.process.pid", "long"), + columnInfo("process.parent.command_line", "keyword"), + columnInfo("process.parent.command_line.text", "text") + ) + ) + .entry( + "values", + List.of( + matchesList().item(null).item(null).item("run1.bat").item("run1.bat"), + matchesList().item(222).item(222).item("run2.bat").item("run2.bat") + ) + ) + ); + + result = runEsql(""" + FROM test* | where Responses.process.pid == 111 + """); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of( + columnInfo("Responses.process", "integer"), + columnInfo("Responses.process.pid", "long"), + columnInfo("process.parent.command_line", "keyword"), + columnInfo("process.parent.command_line.text", "text") + ) + ).entry("values", List.of()) + ); + + result = runEsql("FROM test* | SORT process.parent.command_line"); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of( + columnInfo("Responses.process", "integer"), + columnInfo("Responses.process.pid", "long"), + columnInfo("process.parent.command_line", "keyword"), + columnInfo("process.parent.command_line.text", "text") + ) + ) + .entry( + "values", + List.of( + matchesList().item(null).item(null).item("run1.bat").item("run1.bat"), + matchesList().item(222).item(222).item("run2.bat").item("run2.bat") + ) + ) + ); + + result = runEsql(""" + FROM test* + | SORT process.parent.command_line + | WHERE Responses.process IS NULL + """); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of( + columnInfo("Responses.process", "integer"), + columnInfo("Responses.process.pid", "long"), + columnInfo("process.parent.command_line", "keyword"), + columnInfo("process.parent.command_line.text", "text") + ) + ).entry("values", List.of(matchesList().item(null).item(null).item("run1.bat").item("run1.bat"))) + ); + } + + private void assumeIndexResolverNestedFieldsNameClashFixed() throws IOException { + // especially for BWC tests but also for regular tests + var capsName = EsqlCapabilities.Cap.FIX_NESTED_FIELDS_NAME_CLASH_IN_INDEXRESOLVER.name().toLowerCase(Locale.ROOT); + boolean requiredClusterCapability = clusterHasCapability("POST", "/_query", List.of(), List.of(capsName)).orElse(false); + assumeTrue( + "This test makes sense for versions that have the fix for https://github.com/elastic/elasticsearch/issues/117054", + requiredClusterCapability + ); + } + private CheckedConsumer empNoInObject(String empNoType) { return index -> { index.startObject("properties"); @@ -1456,16 +1775,12 @@ private static void index(String name, String... docs) throws IOException { } private static void createIndex(String name, CheckedConsumer mapping) throws IOException { - Request request = new Request("PUT", "/" + name); XContentBuilder index = JsonXContent.contentBuilder().prettyPrint().startObject(); - index.startObject("mappings"); mapping.accept(index); index.endObject(); - index.endObject(); String configStr = Strings.toString(index); logger.info("index: {} {}", name, configStr); - request.setJsonEntity(configStr); - client().performRequest(request); + ESRestTestCase.createIndex(name, Settings.EMPTY, configStr); } /** diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index 478c68db68aa7..0d6659ad37a27 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -93,6 +93,8 @@ public class CsvTestsDataLoader { private static final TestsDataset BOOKS = new TestsDataset("books"); private static final TestsDataset SEMANTIC_TEXT = new TestsDataset("semantic_text").withInferenceEndpoint(true); + private static final String LOOKUP_INDEX_SUFFIX = "_lookup"; + public static final Map CSV_DATASET_MAP = Map.ofEntries( Map.entry(EMPLOYEES.indexName, EMPLOYEES), Map.entry(HOSTS.indexName, HOSTS), @@ -128,7 +130,9 @@ public class CsvTestsDataLoader { Map.entry(DISTANCES.indexName, DISTANCES), Map.entry(ADDRESSES.indexName, ADDRESSES), Map.entry(BOOKS.indexName, BOOKS), - Map.entry(SEMANTIC_TEXT.indexName, SEMANTIC_TEXT) + Map.entry(SEMANTIC_TEXT.indexName, SEMANTIC_TEXT), + // JOIN LOOKUP alias + Map.entry(LANGUAGES.indexName + LOOKUP_INDEX_SUFFIX, LANGUAGES.withIndex(LANGUAGES.indexName + LOOKUP_INDEX_SUFFIX)) ); private static final EnrichConfig LANGUAGES_ENRICH = new EnrichConfig("languages_policy", "enrich-policy-languages.json"); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 2913401d8aab3..d6715a932c075 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -29,6 +29,7 @@ import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; @@ -342,7 +343,7 @@ public String toString() { public static final Configuration TEST_CFG = configuration(new QueryPragmas(Settings.EMPTY)); - public static final Verifier TEST_VERIFIER = new Verifier(new Metrics(new EsqlFunctionRegistry())); + public static final Verifier TEST_VERIFIER = new Verifier(new Metrics(new EsqlFunctionRegistry()), new XPackLicenseState(() -> 0L)); private EsqlTestUtils() {} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec index adbf24cee10b0..1e23cf62917fc 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec @@ -63,7 +63,6 @@ avg(salary):double | always_false:boolean in -required_capability: mv_warn from employees | keep emp_no, is_rehired, still_hired | where is_rehired in (still_hired, true) | where is_rehired != still_hired; ignoreOrder:true diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec index 3be3decaf351c..7bbf011176693 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec @@ -760,3 +760,19 @@ c:long |b:date 3 |2025-10-01T00:00:00.000Z 4 |2023-11-01T00:00:00.000Z ; + +bucketWithFilteredCountRefingBucket +required_capability: implicit_casting_string_literal_to_temporal_amount + +FROM employees +| STATS c = COUNT(*) WHERE b > "1953-01-01T00:00:00.000Z" AND emp_no > 10020 BY b = BUCKET(birth_date, 1 year) +| SORT c, b +| LIMIT 4 +; + +c:long |b:date +0 |1952-01-01T00:00:00.000Z +0 |1953-01-01T00:00:00.000Z +0 |null +1 |1965-01-01T00:00:00.000Z +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 7e7c561fac3a5..734e2ef5e475e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -216,7 +216,6 @@ string:keyword |datetime:date ; convertFromUnsignedLong -required_capability: convert_warn row ul = [9223372036854775808, 520128000000] | eval dt = to_datetime(ul); warningRegex:Line 1:58: evaluation of \[to_datetime\(ul\)\] failed, treating result as null. Only first 20 failures recorded. diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec index 3c38bd190b0b1..25b114b5d1daf 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec @@ -580,7 +580,6 @@ CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark spatialEnrichmentGeoMatchStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] required_capability: enrich_load -required_capability: mv_warn FROM airports | ENRICH city_boundaries ON city_location WITH airport, region, city_boundary diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index 537b69547c6be..3505b52e5599e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -99,7 +99,6 @@ int:integer |dbl:double ; lessThanMultivalue -required_capability: mv_warn from employees | where salary_change < 1 | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[salary_change < 1\] failed, treating result as null. Only first 20 failures recorded. @@ -115,7 +114,6 @@ emp_no:integer |salary_change:double ; greaterThanMultivalue -required_capability: mv_warn from employees | where salary_change > 1 | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[salary_change > 1\] failed, treating result as null. Only first 20 failures recorded. @@ -131,7 +129,6 @@ emp_no:integer |salary_change:double ; equalToMultivalue -required_capability: mv_warn from employees | where salary_change == 1.19 | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change == 1.19] failed, treating result as null. Only first 20 failures recorded. @@ -143,7 +140,6 @@ emp_no:integer |salary_change:double ; equalToOrEqualToMultivalue -required_capability: mv_warn from employees | where salary_change == 1.19 or salary_change == 7.58 | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change] failed, treating result as null. Only first 20 failures recorded. @@ -156,7 +152,6 @@ emp_no:integer |salary_change:double ; inMultivalue -required_capability: mv_warn from employees | where salary_change in (1.19, 7.58) | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change in (1.19, 7.58)] failed, treating result as null. Only first 20 failures recorded. @@ -169,7 +164,6 @@ emp_no:integer |salary_change:double ; notLessThanMultivalue -required_capability: mv_warn from employees | where not(salary_change < 1) | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change < 1.*\] failed, treating result as null. Only first 20 failures recorded. @@ -185,7 +179,6 @@ emp_no:integer |salary_change:double ; notGreaterThanMultivalue -required_capability: mv_warn from employees | where not(salary_change > 1) | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change > 1.*\] failed, treating result as null. Only first 20 failures recorded. @@ -201,7 +194,6 @@ emp_no:integer |salary_change:double ; notEqualToMultivalue -required_capability: mv_warn from employees | where not(salary_change == 1.19) | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change == 1.19.*\] failed, treating result as null. Only first 20 failures recorded. diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec-ignored similarity index 100% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec-ignored diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index b399734151412..f4b6d41a7a027 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -1,7 +1,6 @@ // Integral types-specific tests inLongAndInt -required_capability: mv_warn from employees | where avg_worked_seconds in (372957040, salary_change.long, 236703986) | where emp_no in (10017, emp_no - 1) | keep emp_no, avg_worked_seconds; warningRegex:evaluation of \[avg_worked_seconds in \(372957040, salary_change.long, 236703986\)\] failed, treating result as null. Only first 20 failures recorded. @@ -68,7 +67,6 @@ long:long |ul:ul ; convertDoubleToUL -required_capability: convert_warn row d = 123.4 | eval ul = to_ul(d), overflow = to_ul(1e20); warningRegex:Line 1:48: evaluation of \[to_ul\(1e20\)\] failed, treating result as null. Only first 20 failures recorded. @@ -127,7 +125,6 @@ int:integer |long:long ; convertULToLong -required_capability: convert_warn row ul = [9223372036854775807, 9223372036854775808] | eval long = to_long(ul); warningRegex:Line 1:67: evaluation of \[to_long\(ul\)\] failed, treating result as null. Only first 20 failures recorded. @@ -170,7 +167,6 @@ str1:keyword |str2:keyword |str3:keyword |long1:long |long2:long |long3:long ; convertDoubleToLong -required_capability: convert_warn row d = 123.4 | eval d2l = to_long(d), overflow = to_long(1e19); warningRegex:Line 1:51: evaluation of \[to_long\(1e19\)\] failed, treating result as null. Only first 20 failures recorded. @@ -190,7 +186,6 @@ int:integer |ii:integer ; convertLongToInt -required_capability: convert_warn // tag::to_int-long[] ROW long = [5013792, 2147483647, 501379200000] @@ -207,7 +202,6 @@ long:long |int:integer ; convertULToInt -required_capability: convert_warn row ul = [2147483647, 9223372036854775808] | eval int = to_int(ul); warningRegex:Line 1:57: evaluation of \[to_int\(ul\)\] failed, treating result as null. Only first 20 failures recorded. @@ -239,7 +233,6 @@ int_str:keyword |int_dbl_str:keyword |is2i:integer|ids2i:integer ; convertStringToIntFail#[skip:-8.13.99, reason:warning changed in 8.14] -required_capability: mv_warn row str1 = "2147483647.2", str2 = "2147483648", non = "no number" | eval i1 = to_integer(str1), i2 = to_integer(str2), noi = to_integer(non); warningRegex:Line 1:79: evaluation of \[to_integer\(str1\)\] failed, treating result as null. Only first 20 failures recorded. @@ -254,7 +247,6 @@ str1:keyword |str2:keyword |non:keyword |i1:integer |i2:integer | ; convertDoubleToInt -required_capability: convert_warn row d = 123.4 | eval d2i = to_integer(d), overflow = to_integer(1e19); warningRegex:Line 1:54: evaluation of \[to_integer\(1e19\)\] failed, treating result as null. Only first 20 failures recorded. @@ -265,7 +257,6 @@ d:double |d2i:integer |overflow:integer ; lessThanMultivalue -required_capability: mv_warn from employees | where salary_change.int < 1 | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[salary_change.int < 1\] failed, treating result as null. Only first 20 failures recorded. @@ -281,7 +272,6 @@ emp_no:integer |salary_change.int:integer ; greaterThanMultivalue -required_capability: mv_warn from employees | where salary_change.int > 1 | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[salary_change.int > 1\] failed, treating result as null. Only first 20 failures recorded. @@ -297,7 +287,6 @@ emp_no:integer |salary_change.int:integer ; equalToMultivalue -required_capability: mv_warn from employees | where salary_change.int == 0 | keep emp_no, salary_change.int | sort emp_no; warningRegex:evaluation of \[salary_change.int == 0\] failed, treating result as null. Only first 20 failures recorded. @@ -312,7 +301,6 @@ emp_no:integer |salary_change.int:integer ; equalToOrEqualToMultivalue -required_capability: mv_warn from employees | where salary_change.int == 1 or salary_change.int == 8 | keep emp_no, salary_change.int | sort emp_no; warningRegex:evaluation of \[salary_change.int\] failed, treating result as null. Only first 20 failures recorded. @@ -325,7 +313,6 @@ emp_no:integer |salary_change.int:integer ; inMultivalue -required_capability: mv_warn from employees | where salary_change.int in (1, 7) | keep emp_no, salary_change.int | sort emp_no; warningRegex:evaluation of \[salary_change.int in \(1, 7\)\] failed, treating result as null. Only first 20 failures recorded. @@ -338,7 +325,6 @@ emp_no:integer |salary_change.int:integer ; notLessThanMultivalue -required_capability: mv_warn from employees | where not(salary_change.int < 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change.int < 1.*\] failed, treating result as null. Only first 20 failures recorded. @@ -354,7 +340,6 @@ emp_no:integer |salary_change.int:integer ; notGreaterThanMultivalue -required_capability: mv_warn from employees | where not(salary_change.int > 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change.int > 1.*\] failed, treating result as null. Only first 20 failures recorded. @@ -370,7 +355,6 @@ emp_no:integer |salary_change.int:integer ; notEqualToMultivalue -required_capability: mv_warn from employees | where not(salary_change.int == 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change.int == 1.*\] failed, treating result as null. Only first 20 failures recorded diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index 0fb6994ef759f..4418f7e0aa7ed 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -16,7 +16,6 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; equals -required_capability: mv_warn from hosts | sort host, card | where ip0 == ip1 | keep card, host, ip0, ip1; warningRegex:evaluation of \[ip0 == ip1\] failed, treating result as null. Only first 20 failures recorded. @@ -60,7 +59,6 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; lessThan -required_capability: mv_warn from hosts | sort host, card, ip1 | where ip0 < ip1 | keep card, host, ip0, ip1; warningRegex:evaluation of \[ip0 < ip1\] failed, treating result as null. Only first 20 failures recorded. @@ -73,7 +71,6 @@ lo0 |gamma |fe80::cae2:65ff:fece:feb9|fe81::cae2:65ff:fece:f ; notEquals -required_capability: mv_warn from hosts | sort host, card, ip1 | where ip0 != ip1 | keep card, host, ip0, ip1; warningRegex:evaluation of \[ip0 != ip1\] failed, treating result as null. Only first 20 failures recorded. @@ -125,7 +122,6 @@ null |[127.0.0.1, 127.0.0.2, 127.0.0.3] ; conditional -required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | keep eq, ip0, ip1; ignoreOrder:true @@ -146,7 +142,6 @@ fe80::cae2:65ff:fece:fec1 |[fe80::cae2:65ff:fece:feb ; in -required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | where eq in (ip0, ip1) | keep card, host, ip0, ip1, eq; ignoreOrder:true @@ -168,7 +163,6 @@ eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece inWithWarningsRegex#[skip:-8.13.99, reason:regex warnings in tests introduced in v 8.14.0] -required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | where eq in (ip0, ip1) | keep card, host, ip0, ip1, eq; ignoreOrder:true @@ -188,7 +182,6 @@ eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece ; cidrMatchSimple -required_capability: mv_warn from hosts | where cidr_match(ip1, "127.0.0.2/32") | keep card, host, ip0, ip1; warningRegex:evaluation of \[cidr_match\(ip1, \\\"127.0.0.2/32\\\"\)\] failed, treating result as null. Only first 20 failures recorded. @@ -199,7 +192,6 @@ eth1 |beta |127.0.0.1 |127.0.0.2 ; cidrMatchNullField -required_capability: mv_warn from hosts | where cidr_match(ip0, "127.0.0.2/32") is null | keep card, host, ip0, ip1; ignoreOrder:true @@ -213,7 +205,6 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; cdirMatchMultipleArgs -required_capability: mv_warn //tag::cdirMatchMultipleArgs[] FROM hosts @@ -233,7 +224,6 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9|127.0.0.3 ; cidrMatchFunctionArg -required_capability: mv_warn from hosts | where cidr_match(ip1, concat("127.0.0.2", "/32"), "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true @@ -246,7 +236,6 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9|127.0.0.3 ; cidrMatchFieldArg -required_capability: mv_warn from hosts | eval cidr="127.0.0.2" | where cidr_match(ip1, cidr, "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true @@ -366,7 +355,6 @@ eth0 |beta |127.0.0.1 |::1 ; pushDownIPWithIn -required_capability: mv_warn from hosts | where ip1 in (to_ip("::1"), to_ip("127.0.0.1")) | keep card, host, ip0, ip1; ignoreOrder:true @@ -380,7 +368,6 @@ eth0 |beta |127.0.0.1 |::1 ; pushDownIPWithComparision -required_capability: mv_warn from hosts | where ip1 > to_ip("127.0.0.1") | keep card, ip1; ignoreOrder:true diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec new file mode 100644 index 0000000000000..605bf78c20a32 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -0,0 +1,48 @@ +// +// CSV spec for LOOKUP JOIN command +// Reuses the sample dataset and commands from enrich.csv-spec +// + +basicOnTheDataNode +required_capability: join_lookup + +//TODO: this returns different results in CI then locally +// sometimes null, sometimes spanish (likely related to the execution order) +FROM employees +| EVAL language_code = languages +| LOOKUP JOIN languages_lookup ON language_code +| WHERE emp_no < 500 +| KEEP emp_no, language_name +| SORT emp_no +| LIMIT 1 +; + +emp_no:integer | language_name:keyword +//10091 | Spanish +; + +basicRow-Ignore +required_capability: join_lookup + +ROW language_code = 1 +| LOOKUP JOIN languages_lookup ON language_code +; + +language_code:keyword | language_name:keyword +1 | English +; + +basicOnTheCoordinator +required_capability: join_lookup + +FROM employees +| SORT emp_no +| LIMIT 1 +| EVAL language_code = languages +| LOOKUP JOIN languages_lookup ON language_code +| KEEP emp_no, language_name +; + +emp_no:integer | language_name:keyword +10001 | French +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec-ignored similarity index 80% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec-ignored index 9cf96f7c0b6de..685e3ab2778e1 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec-ignored @@ -4,7 +4,7 @@ FROM employees | SORT emp_no | LIMIT 4 | RENAME languages AS int -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int | RENAME int AS languages, name AS lang_name | KEEP emp_no, languages, lang_name ; @@ -19,7 +19,7 @@ emp_no:integer | languages:integer | lang_name:keyword keywordByMvIntAndQuotedSource required_capability: lookup_v4 ROW int=[1, 2, 3] -| LOOKUP "int_number_names" ON int +| LOOKUP_🐔 "int_number_names" ON int ; int:integer | name:keyword @@ -29,7 +29,7 @@ int:integer | name:keyword keywordByDupeIntAndTripleQuotedSource required_capability: lookup_v4 ROW int=[1, 1, 1] -| LOOKUP """int_number_names""" ON int +| LOOKUP_🐔 """int_number_names""" ON int ; int:integer | name:keyword @@ -39,10 +39,10 @@ int:integer | name:keyword intByKeyword required_capability: lookup_v4 ROW name="two" -| LOOKUP int_number_names ON name +| LOOKUP_🐔 int_number_names ON name ; -name:keyword | int:integer +name:keyword | int:integer two | 2 ; @@ -53,7 +53,7 @@ FROM employees | SORT emp_no | LIMIT 4 | RENAME languages.long AS long -| LOOKUP long_number_names ON long +| LOOKUP_🐔 long_number_names ON long | RENAME long AS languages, name AS lang_name | KEEP emp_no, languages, lang_name ; @@ -68,7 +68,7 @@ emp_no:integer | languages:long | lang_name:keyword longByKeyword required_capability: lookup_v4 ROW name="two" -| LOOKUP long_number_names ON name +| LOOKUP_🐔 long_number_names ON name ; name:keyword | long:long @@ -81,7 +81,7 @@ FROM employees | SORT emp_no | LIMIT 4 | RENAME height AS double -| LOOKUP double_number_names ON double +| LOOKUP_🐔 double_number_names ON double | RENAME double AS height, name AS height_name | KEEP emp_no, height, height_name ; @@ -96,7 +96,7 @@ emp_no:integer | height:double | height_name:keyword floatByKeyword required_capability: lookup_v4 ROW name="two point zero eight" -| LOOKUP double_number_names ON name +| LOOKUP_🐔 double_number_names ON name ; name:keyword | double:double @@ -106,7 +106,7 @@ two point zero eight | 2.08 floatByNullMissing required_capability: lookup_v4 ROW name=null -| LOOKUP double_number_names ON name +| LOOKUP_🐔 double_number_names ON name ; name:null | double:double @@ -116,7 +116,7 @@ name:null | double:double floatByNullMatching required_capability: lookup_v4 ROW name=null -| LOOKUP double_number_names_with_null ON name +| LOOKUP_🐔 double_number_names_with_null ON name ; name:null | double:double @@ -126,7 +126,7 @@ name:null | double:double intIntByKeywordKeyword required_capability: lookup_v4 ROW aa="foo", ab="zoo" -| LOOKUP big ON aa, ab +| LOOKUP_🐔 big ON aa, ab ; aa:keyword | ab:keyword | na:integer | nb:integer @@ -136,7 +136,7 @@ foo | zoo | 1 | -1 intIntByKeywordKeywordMissing required_capability: lookup_v4 ROW aa="foo", ab="zoi" -| LOOKUP big ON aa, ab +| LOOKUP_🐔 big ON aa, ab ; aa:keyword | ab:keyword | na:integer | nb:integer @@ -146,7 +146,7 @@ foo | zoi | null | null intIntByKeywordKeywordSameValues required_capability: lookup_v4 ROW aa="foo", ab="foo" -| LOOKUP big ON aa, ab +| LOOKUP_🐔 big ON aa, ab ; aa:keyword | ab:keyword | na:integer | nb:integer @@ -156,7 +156,7 @@ foo | foo | 2 | -2 intIntByKeywordKeywordSameValuesMissing required_capability: lookup_v4 ROW aa="bar", ab="bar" -| LOOKUP big ON aa, ab +| LOOKUP_🐔 big ON aa, ab ; aa:keyword | ab:keyword | na:integer | nb:integer @@ -168,7 +168,7 @@ lookupBeforeStats-Ignore required_capability: lookup_v4 FROM employees | RENAME languages AS int -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int | RENAME name AS languages | STATS height=ROUND(AVG(height), 3) BY languages | SORT height ASC; @@ -178,7 +178,7 @@ height:double | languages:keyword 1.732 | one 1.762 | two 1.764 | three - 1.809 | null + 1.809 | null 1.847 | five ; @@ -186,14 +186,14 @@ lookupAfterStats required_capability: lookup_v4 FROM employees | STATS int=TO_INT(AVG(height)) -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int | KEEP name; name:keyword two ; -// Makes sure the LOOKUP squashes previous names +// Makes sure the LOOKUP_🐔 squashes previous names doesNotDuplicateNames required_capability: lookup_v4 FROM employees @@ -201,7 +201,7 @@ FROM employees | LIMIT 4 | RENAME languages.long AS long | EVAL name = CONCAT(first_name, " ", last_name) -| LOOKUP long_number_names ON long +| LOOKUP_🐔 long_number_names ON long | RENAME long AS languages | KEEP emp_no, languages, name ; @@ -219,7 +219,7 @@ required_capability: lookup_v4 FROM employees | WHERE emp_no < 10005 | RENAME languages AS int -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int | RENAME name AS languages | KEEP languages, emp_no | SORT languages ASC, emp_no ASC @@ -238,7 +238,7 @@ FROM employees | WHERE emp_no < 10005 | SORT languages ASC, emp_no ASC | RENAME languages AS int -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int | RENAME name AS languages | KEEP languages, emp_no ; @@ -256,7 +256,7 @@ FROM employees | KEEP emp_no | WHERE emp_no == 10001 | EVAL left = "left", int = emp_no - 10000, name = "name", right = "right" -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int ; emp_no:integer | left:keyword | int:integer | right:keyword | name:keyword @@ -269,65 +269,57 @@ FROM employees | KEEP emp_no | WHERE emp_no == 10001 | EVAL left = "left", nb = -10011+emp_no, na = "na", middle = "middle", ab = "ab", aa = "bar", right = "right" -| LOOKUP big ON aa, nb +| LOOKUP_🐔 big ON aa, nb ; -emp_no:integer | left:keyword | nb:integer | middle:keyword | aa:keyword | right:keyword | ab:keyword | na:integer +emp_no:integer | left:keyword | nb:integer | middle:keyword | aa:keyword | right:keyword | ab:keyword | na:integer 10001 | left | -10 | middle | bar | right | zop | 10 ; // -// Make sure that the new LOOKUP syntax doesn't clash with any existing things -// named "lookup" +// Make sure that the new LOOKUP_🐔 syntax doesn't clash with any existing things +// named "lookup_🐔" // -rowNamedLookup -required_capability: lookup_v4 -ROW lookup = "a" -; - -lookup:keyword - a -; rowNamedLOOKUP required_capability: lookup_v4 -ROW LOOKUP = "a" +ROW lookup_🐔 = "a" ; -LOOKUP:keyword +lookup_🐔:keyword a ; evalNamedLookup required_capability: lookup_v4 -ROW a = "a" | EVAL lookup = CONCAT(a, "1") +ROW a = "a" | EVAL lookup_🐔 = CONCAT(a, "1") ; -a:keyword | lookup:keyword +a:keyword | lookup_🐔:keyword a | a1 ; dissectNamedLookup required_capability: lookup_v4 -row a = "foo bar" | dissect a "foo %{lookup}"; +row a = "foo bar" | dissect a "foo %{lookup_🐔}"; -a:keyword | lookup:keyword +a:keyword | lookup_🐔:keyword foo bar | bar ; renameIntoLookup required_capability: lookup_v4 -row a = "foo bar" | RENAME a AS lookup; +row a = "foo bar" | RENAME a AS lookup_🐔; -lookup:keyword +lookup_🐔:keyword foo bar ; sortOnLookup required_capability: lookup_v4 -ROW lookup = "a" | SORT lookup +ROW lookup_🐔 = "a" | SORT lookup_🐔 ; -lookup:keyword +lookup_🐔:keyword a ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec index da069836504d4..2fe2feb3bc219 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec @@ -214,8 +214,6 @@ height:double | s:double ; powSalarySquared -required_capability: pow_double - from employees | eval s = pow(to_long(salary) - 75000, 2) + 10000 | keep salary, s | sort salary desc | limit 4; salary:integer | s:double @@ -631,8 +629,6 @@ base:double | exponent:integer | result:double ; powIntInt -required_capability: pow_double - ROW base = 2, exponent = 2 | EVAL s = POW(base, exponent) ; @@ -642,8 +638,6 @@ base:integer | exponent:integer | s:double ; powIntIntPlusInt -required_capability: pow_double - row s = 1 + pow(2, 2); s:double @@ -658,8 +652,6 @@ s:double ; powIntUL -required_capability: pow_double - row x = pow(1, 9223372036854775808); x:double @@ -667,8 +659,6 @@ x:double ; powLongUL -required_capability: pow_double - row x = to_long(1) | eval x = pow(x, 9223372036854775808); x:double @@ -676,8 +666,6 @@ x:double ; powUnsignedLongUL -required_capability: pow_double - row x = to_ul(1) | eval x = pow(x, 9223372036854775808); x:double @@ -701,8 +689,6 @@ null ; powULInt -required_capability: pow_double - row x = pow(to_unsigned_long(9223372036854775807), 1); x:double @@ -710,8 +696,6 @@ x:double ; powULIntOverrun -required_capability: pow_double - ROW x = POW(9223372036854775808, 2) ; @@ -732,8 +716,6 @@ x:double ; powULLong -required_capability: pow_double - row x = to_long(10) | eval x = pow(to_unsigned_long(10), x); x:double @@ -741,8 +723,6 @@ x:double ; powULLongOverrun -required_capability: pow_double - row x = to_long(100) | eval x = pow(to_unsigned_long(10), x); x:double diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 01e7258e8a6ee..ac9948c90f5e9 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -3,7 +3,6 @@ ############################################### convertFromStringQuantize -required_capability: spatial_points row wkt = "POINT(42.97109629958868 14.7552534006536)" | eval pt = to_geopoint(wkt); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index ad9de4674f8e1..5562028a5935f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -2348,6 +2348,42 @@ v:integer | job_positions:keyword 10094 | Accountant ; +docsStatsWithSimpleFiltering +required_capability: per_agg_filtering +// tag::aggFiltering[] +FROM employees +| STATS avg50s = AVG(salary)::LONG WHERE birth_date < "1960-01-01", + avg60s = AVG(salary)::LONG WHERE birth_date >= "1960-01-01" + BY gender +| SORT gender +// end::aggFiltering[] +| WHERE gender IS NOT NULL +; + +// tag::aggFiltering-result[] +avg50s:long |avg60s:long |gender:keyword +55462 |46637 |F +48279 |44879 |M +// end::aggFiltering-result[] +; + +docsStatsWithFilteringNoGroups +required_capability: per_agg_filtering +// tag::aggFilteringNoGroup[] +FROM employees +| EVAL Ks = salary / 1000 // thousands +| STATS under_40K = COUNT(*) WHERE Ks < 40, + inbetween = COUNT(*) WHERE 40 <= Ks AND Ks < 60, + over_60K = COUNT(*) WHERE 60 <= Ks, + total = COUNT(*) +// end::aggFilteringNoGroup[] +; + +// tag::aggFilteringNoGroup-result[] +under_40K:long |inbetween:long |over_60K:long |total:long +36 |39 |25 |100 +// end::aggFilteringNoGroup-result[] +; statsWithFiltering required_capability: per_agg_filtering @@ -2468,6 +2504,7 @@ count:long |values:keyword |job_positions:keyword ; prunedStatsFollowedByStats +required_capability: per_agg_filtering from employees | eval my_length = length(concat(first_name, null)) | stats count = count(my_length) where false, @@ -2641,6 +2678,26 @@ c2:l |c2_f:l |m2:i |m2_f:i |c:l 1 |1 |5 |5 |21 ; +simpleCountOnFieldWithFilteringAndNoGrouping +required_capability: per_agg_filtering +from employees +| stats c1 = count(emp_no) where emp_no < 10042 +; + +c1:long +41 +; + +simpleCountOnStarWithFilteringAndNoGrouping +required_capability: per_agg_filtering +from employees +| stats c1 = count(*) where emp_no < 10042 +; + +c1:long +41 +; + commonFilterExtractionWithAliasing required_capability: per_agg_filtering from employees @@ -2846,3 +2903,143 @@ max:integer | job_positions:keyword 39878 | Business Analyst 67492 | Data Scientist ; + +stdDeviation +required_capability: std_dev +// tag::stdev[] +FROM employees +| STATS STD_DEV(height) +// end::stdev[] +; + +// tag::stdev-result[] +STD_DEV(height):double +0.20637044362020449 +// end::stdev-result[] +; + +stdDeviationNested +required_capability: std_dev +// tag::docsStatsStdDevNestedExpression[] +FROM employees +| STATS stddev_salary_change = STD_DEV(MV_MAX(salary_change)) +// end::docsStatsStdDevNestedExpression[] +; + +// tag::docsStatsStdDevNestedExpression-result[] +stddev_salary_change:double +6.875829592924112 +// end::docsStatsStdDevNestedExpression-result[] +; + + +stdDeviationWithLongs +required_capability: std_dev +FROM employees +| STATS STD_DEV(avg_worked_seconds) +; + +STD_DEV(avg_worked_seconds):double +5.76010425971634E7 +; + +stdDeviationWithInts +required_capability: std_dev +FROM employees +| STATS STD_DEV(salary) +; + +STD_DEV(salary):double +13765.12550278783 +; + +stdDeviationConstantValue +required_capability: std_dev +FROM employees +| WHERE languages == 2 +| STATS STD_DEV(languages) +; + +STD_DEV(languages):double +0.0 +; + +stdDeviationGroupedDoublesOnly +required_capability: std_dev +FROM employees +| STATS STD_DEV(height) BY languages +| SORT languages asc +; + +STD_DEV(height):double | languages:integer +0.22106409327010415 | 1 +0.22797190865484734 | 2 +0.18893070075713295 | 3 +0.14656141004227627 | 4 +0.17733860152780256 | 5 +0.2486543786061287 | null +; + +stdDeviationGroupedAllTypes +required_capability: std_dev +FROM employees +| WHERE languages < 3 +| STATS + double_std_dev = STD_DEV(height), + int_std_dev = STD_DEV(salary), + long_std_dev = STD_DEV(avg_worked_seconds) + BY languages +| SORT languages asc +; + +double_std_dev:double | int_std_dev:double | long_std_dev:double | languages:integer +0.22106409327010415 | 15166.244178730898 | 5.1998715922156096E7 | 1 +0.22797190865484734 | 12139.61099378116 | 5.309085506583288E7 | 2 +; + +stdDeviationNoRows +required_capability: std_dev +FROM employees +| WHERE languages IS null +| STATS STD_DEV(languages) +; + +STD_DEV(languages):double +null +; + +stdDevMultiValue +required_capability: std_dev +FROM employees +| STATS STD_DEV(salary_change) +; + +STD_DEV(salary_change):double +7.062226788733394 +; + +stdDevFilter +required_capability: std_dev +FROM employees +| STATS greater_than = STD_DEV(salary_change) WHERE languages > 3 +, less_than = STD_DEV(salary_change) WHERE languages <= 3 +, salary = STD_DEV(salary * 2) +, count = COUNT(*) BY gender +| SORT gender asc +; + +greater_than:double | less_than:double | salary:double | count:long | gender:keyword +6.4543266953142835 | 7.57786788789264 | 29045.770666969744 | 33 | F +6.975232333891946 | 6.604807075547775 | 26171.331109641273 | 57 | M +6.949207097931448 | 7.127229475750027 | 27921.220736207077 | 10 | null +; + +stdDevRow +required_capability: std_dev +ROW a = [1,2,3], b = 5 +| STATS STD_DEV(a), STD_DEV(b) +; + +STD_DEV(a):double | STD_DEV(b):double +0.816496580927726 | 0.0 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 963245f9f0ea6..e103168d2e589 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -390,7 +390,6 @@ emp_no:integer | name:keyword // Note: no matches in MV returned in -required_capability: mv_warn from employees | where job_positions in ("Internship", first_name) | keep emp_no, job_positions; ignoreOrder:true @@ -582,7 +581,6 @@ emp_no:integer |positions:keyword ; lessThanMultivalue -required_capability: mv_warn from employees | where job_positions < "C" | keep emp_no, job_positions | sort emp_no; warningRegex:evaluation of \[job_positions < \\\"C\\\"\] failed, treating result as null. Only first 20 failures recorded. @@ -595,7 +593,6 @@ emp_no:integer |job_positions:keyword ; greaterThanMultivalue -required_capability: mv_warn from employees | where job_positions > "C" | keep emp_no, job_positions | sort emp_no | limit 6; warningRegex:evaluation of \[job_positions > \\\"C\\\"\] failed, treating result as null. Only first 20 failures recorded. @@ -612,7 +609,6 @@ emp_no:integer |job_positions:keyword ; equalToMultivalue -required_capability: mv_warn from employees | where job_positions == "Accountant" | keep emp_no, job_positions | sort emp_no; warningRegex:evaluation of \[job_positions == \\\"Accountant\\\"\] failed, treating result as null. Only first 20 failures recorded. @@ -624,7 +620,6 @@ emp_no:integer |job_positions:keyword ; equalToOrEqualToMultivalue -required_capability: mv_warn from employees | where job_positions == "Accountant" or job_positions == "Tech Lead" | keep emp_no, job_positions | sort emp_no; warningRegex:evaluation of \[job_positions\] failed, treating result as null. Only first 20 failures recorded. @@ -637,7 +632,6 @@ emp_no:integer |job_positions:keyword ; inMultivalue -required_capability: mv_warn from employees | where job_positions in ("Accountant", "Tech Lead") | keep emp_no, job_positions | sort emp_no; warningRegex:evaluation of \[job_positions in \(\\\"Accountant\\\", \\"Tech Lead\\\"\)\] failed, treating result as null. Only first 20 failures recorded. @@ -650,7 +644,6 @@ emp_no:integer |job_positions:keyword ; notLessThanMultivalue -required_capability: mv_warn from employees | where not(job_positions < "C") | keep emp_no, job_positions | sort emp_no | limit 6; warningRegex:evaluation of \[.*job_positions < \\\"C\\\".*\] failed, treating result as null. Only first 20 failures recorded. @@ -667,7 +660,6 @@ emp_no:integer |job_positions:keyword ; notGreaterThanMultivalue -required_capability: mv_warn from employees | where not(job_positions > "C") | keep emp_no, job_positions | sort emp_no | limit 6; warningRegex:evaluation of \[.*job_positions > \\\"C\\\".*\] failed, treating result as null. Only first 20 failures recorded. @@ -680,7 +672,6 @@ emp_no:integer |job_positions:keyword ; notEqualToMultivalue -required_capability: mv_warn from employees | where not(job_positions == "Accountant") | keep emp_no, job_positions | sort emp_no | limit 6; warningRegex:evaluation of \[.*job_positions == \\\"Accountant\\\".*\] failed, treating result as null. Only first 20 failures recorded. @@ -937,7 +928,6 @@ beta | Kubernetes cluster | [beta k8s server, beta k8s server2 ; lengthOfText -required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = length(host_group), l2 = length(description) | keep l1, l2; ignoreOrder:true @@ -951,7 +941,6 @@ null | 19 ; startsWithText -required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = starts_with(host_group, host), l2 = starts_with(description, host) | keep l1, l2; ignoreOrder:true @@ -965,7 +954,6 @@ false | null ; substringOfText -required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = substring(host_group, 0, 5), l2 = substring(description, 0, 5) | keep l1, l2; ignoreOrder:true @@ -979,7 +967,6 @@ Gatew | null ; concatOfText -required_capability: mv_warn from hosts | where host == "epsilon" | eval l1 = concat(host, "/", host_group), l2 = concat(host_group, "/", description) | sort l1 | keep l1, l2; warning:Line 1:86: evaluation of [concat(host_group, \"/\", description)] failed, treating result as null. Only first 20 failures recorded. @@ -1518,7 +1505,6 @@ min(f_l):integer | max(f_l):integer | job_positions:keyword ; locateWarnings#[skip:-8.13.99,reason:new string function added in 8.14] -required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = locate(host_group, "ate"), l2 = locate(description, "ate") | keep l1, l2; ignoreOrder:true diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec index a2fd3f3d5e0da..af987b13acc82 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec @@ -718,6 +718,7 @@ count:long | @timestamp:date multiIndexTsNanosToDatetimeStats required_capability: union_types required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data, sample_data_ts_nanos | EVAL @timestamp = DATE_TRUNC(1 hour, TO_DATETIME(@timestamp)) @@ -734,6 +735,8 @@ count:long | @timestamp:date Multi Index millis to nanos stats required_capability: union_types required_capability: union_types_remove_fields +required_capability: to_date_nanos +required_capability: date_trunc_date_nanos FROM sample_data, sample_data_ts_nanos | EVAL @timestamp = DATE_TRUNC(1 hour, TO_DATE_NANOS(@timestamp)) @@ -752,6 +755,7 @@ multiIndexTsLongStatsDrop required_capability: union_types required_capability: union_types_agg_cast required_capability: casting_operator +required_capability: to_date_nanos FROM sample_data, sample_data_ts_long, sample_data_ts_nanos | STATS count=count(*) BY @timestamp::datetime @@ -772,6 +776,7 @@ multiIndexTsLongStatsInline2 required_capability: union_types required_capability: union_types_agg_cast required_capability: casting_operator +required_capability: to_date_nanos FROM sample_data, sample_data_ts_long, sample_data_ts_nanos | STATS count=count(*) BY @timestamp::datetime @@ -915,6 +920,7 @@ multiIndexIpStringTsLong required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL @timestamp = TO_DATETIME(@timestamp), client_ip = TO_IP(client_ip) @@ -956,6 +962,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringTsLongDropped required_capability: union_types required_capability: metadata_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL @timestamp = TO_DATETIME(@timestamp), client_ip = TO_IP(client_ip) @@ -998,6 +1005,7 @@ multiIndexIpStringTsLongRename required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL ts = TO_DATETIME(@timestamp), host_ip = TO_IP(client_ip) @@ -1039,6 +1047,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringTsLongRenameDropped required_capability: union_types required_capability: metadata_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL ts = TO_DATETIME(@timestamp), host_ip = TO_IP(client_ip) @@ -1081,6 +1090,7 @@ multiIndexIpStringTsLongRenameToString required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL ts = TO_STRING(TO_DATETIME(@timestamp)), host_ip = TO_STRING(TO_IP(client_ip)) @@ -1123,6 +1133,7 @@ multiIndexWhereIpStringTsLong required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE TO_LONG(@timestamp) < 1698068014937 AND TO_STRING(client_ip) == "172.21.2.162" @@ -1139,6 +1150,7 @@ sample_data_ts_long | 3450233 | Connected to 10.1.0.3 multiIndexWhereIpStringTsLongStats required_capability: union_types required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* | WHERE TO_LONG(@timestamp) < 1698068014937 AND TO_STRING(client_ip) == "172.21.2.162" @@ -1155,6 +1167,7 @@ multiIndexWhereIpStringLikeTsLong required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE TO_LONG(@timestamp) < 1698068014937 AND TO_STRING(client_ip) LIKE "172.21.2.16?" @@ -1171,6 +1184,7 @@ sample_data_ts_long | 3450233 | Connected to 10.1.0.3 multiIndexWhereIpStringLikeTsLongStats required_capability: union_types required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* | WHERE TO_LONG(@timestamp) < 1698068014937 AND TO_STRING(client_ip) LIKE "172.21.2.16?" @@ -1187,6 +1201,7 @@ multiIndexMultiColumnTypesRename required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE event_duration > 8000000 @@ -1205,6 +1220,7 @@ multiIndexMultiColumnTypesRenameAndKeep required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE event_duration > 8000000 @@ -1224,6 +1240,7 @@ multiIndexMultiColumnTypesRenameAndDrop required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE event_duration > 8000000 @@ -1515,7 +1532,7 @@ FROM sample_data, sample_data_ts_long null | 172.21.0.5 | 1232382 | Disconnected | 8268153 ; -multiIndexIndirectUseOfUnionTypesInLookup +multiIndexIndirectUseOfUnionTypesInLookup-Ignore // TODO: `union_types` is required only because this makes the test skip in the csv tests; better solution: // make the csv tests work with multiple indices. required_capability: union_types @@ -1524,7 +1541,7 @@ FROM sample_data, sample_data_ts_long | SORT client_ip ASC | LIMIT 1 | EVAL int = (event_duration - 1232380)::integer -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int ; @timestamp:null | client_ip:ip | event_duration:long | message:keyword | int:integer | name:keyword diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec index 03d0b71894d9b..fbddb3d0e6989 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec @@ -46,7 +46,6 @@ from ul_logs | sort bytes_in desc nulls last, id | limit 12; ; filterPushDownGT -required_capability: mv_warn from ul_logs | where bytes_in >= to_ul(74330435873664882) | sort bytes_in | eval div = bytes_in / to_ul(pow(10., 15)) | keep bytes_in, div, id | limit 12; warningRegex:evaluation of \[bytes_in >= to_ul\(74330435873664882\)\] failed, treating result as null. Only first 20 failures recorded. @@ -68,7 +67,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; filterPushDownRange -required_capability: mv_warn from ul_logs | where bytes_in >= to_ul(74330435873664882) | where bytes_in <= to_ul(316080452389500167) | sort bytes_in | eval div = bytes_in / to_ul(pow(10., 15)) | keep bytes_in, div, id | limit 12; warningRegex:evaluation of \[bytes_in .* to_ul\(.*\)\] failed, treating result as null. Only first 20 failures recorded. @@ -82,7 +80,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; filterPushDownIn -required_capability: mv_warn // TODO: testing framework doesn't perform implicit conversion to UL of given values, needs explicit conversion from ul_logs | where bytes_in in (to_ul(74330435873664882), to_ul(154551962150890564), to_ul(195161570976258241)) | sort bytes_in | keep bytes_in, id; @@ -96,7 +93,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; filterOnFieldsEquality -required_capability: mv_warn from ul_logs | where bytes_in == bytes_out; warningRegex:evaluation of \[bytes_in == bytes_out\] failed, treating result as null. Only first 20 failures recorded. @@ -107,7 +103,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; filterOnFieldsInequality -required_capability: mv_warn from ul_logs | sort id | where bytes_in < bytes_out | eval b_in = bytes_in / to_ul(pow(10.,15)), b_out = bytes_out / to_ul(pow(10.,15)) | limit 5; warningRegex:evaluation of \[bytes_in < bytes_out\] failed, treating result as null. Only first 20 failures recorded. @@ -138,7 +133,6 @@ from ul_logs | stats c = count(bytes_in) by bytes_in | sort c desc, bytes_in des ; case -required_capability: mv_warn from ul_logs | where case(bytes_in == to_ul(154551962150890564), true, false); warningRegex:evaluation of \[bytes_in == to_ul\(154551962150890564\)\] failed, treating result as null. Only first 20 failures recorded. @@ -149,7 +143,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; toDegrees -required_capability: mv_warn FROM ul_logs | WHERE bytes_in == bytes_out | EVAL deg = TO_DEGREES(bytes_in) | KEEP bytes_in, deg ; @@ -161,7 +154,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; toRadians -required_capability: mv_warn FROM ul_logs | WHERE bytes_in == bytes_out | EVAL rad = TO_RADIANS(bytes_in) | KEEP bytes_in, rad ; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java index df6a1e00b0212..c426e0f528eab 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.action; +import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.bulk.BulkRequestBuilder; @@ -15,6 +16,7 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.compute.operator.DriverTaskRunner; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; @@ -27,8 +29,10 @@ import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.test.AbstractMultiClustersTestCase; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.esql.plugin.ComputeService; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.junit.Before; @@ -40,8 +44,10 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; import static org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase.randomPragmas; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; @@ -189,4 +195,44 @@ public void testCancel() throws Exception { Exception error = expectThrows(Exception.class, requestFuture::actionGet); assertThat(error.getMessage(), containsString("proxy timeout")); } + + public void testSameRemoteClusters() throws Exception { + TransportAddress address = cluster(REMOTE_CLUSTER).getInstance(TransportService.class).getLocalNode().getAddress(); + int moreClusters = between(1, 5); + for (int i = 0; i < moreClusters; i++) { + String clusterAlias = REMOTE_CLUSTER + "-" + i; + configureRemoteClusterWithSeedAddresses(clusterAlias, List.of(address)); + } + int numDocs = between(10, 100); + createRemoteIndex(numDocs); + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); + request.query("FROM *:test | STATS total=sum(const) | LIMIT 1"); + request.pragmas(randomPragmas()); + ActionFuture future = client().execute(EsqlQueryAction.INSTANCE, request); + try { + try { + assertBusy(() -> { + List tasks = client(REMOTE_CLUSTER).admin() + .cluster() + .prepareListTasks() + .setActions(ComputeService.CLUSTER_ACTION_NAME) + .get() + .getTasks(); + assertThat(tasks, hasSize(moreClusters + 1)); + }); + } finally { + PauseFieldPlugin.allowEmitting.countDown(); + } + try (EsqlQueryResponse resp = future.actionGet(30, TimeUnit.SECONDS)) { + // TODO: This produces incorrect results because data on the remote cluster is processed multiple times. + long expectedCount = numDocs * (moreClusters + 1L); + assertThat(getValuesList(resp), equalTo(List.of(List.of(expectedCount)))); + } + } finally { + for (int i = 0; i < moreClusters; i++) { + String clusterAlias = REMOTE_CLUSTER + "-" + i; + removeRemoteCluster(clusterAlias); + } + } + } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index cde4f10ef556c..460ab0f5b8b38 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -79,6 +79,11 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { private String REDUCE_DESCRIPTION; private boolean nodeLevelReduction; + /** + * Number of docs released by {@link #startEsql}. + */ + private int prereleasedDocs; + @Before public void setup() { assumeTrue("requires query pragmas", canUseQueryPragmas()); @@ -104,6 +109,7 @@ public void testTaskContents() throws Exception { ActionFuture response = startEsql(); try { getTasksStarting(); + logger.info("unblocking script"); scriptPermits.release(pageSize()); List foundTasks = getTasksRunning(); int luceneSources = 0; @@ -216,9 +222,15 @@ private ActionFuture startEsql() { return startEsql("from test | stats sum(pause_me)"); } + /** + * Start an ESQL query, releasing a few docs from the {@code pause_me} + * script so it'll actually start but won't finish it's first page. + */ private ActionFuture startEsql(String query) { scriptPermits.drainPermits(); - scriptPermits.release(between(1, 5)); + // Allow a few docs to calculate os the query gets "started" + prereleasedDocs = between(1, pageSize() / 2); + scriptPermits.release(prereleasedDocs); var settingsBuilder = Settings.builder() // Force shard partitioning because that's all the tests know how to match. It is easier to reason about too. .put("data_partitioning", "shard") @@ -401,7 +413,8 @@ protected void doRun() throws Exception { }); sessionId = foundTasks.get(0).taskId().toString(); assertTrue(fetchingStarted.await(1, TimeUnit.MINUTES)); - ExchangeSinkHandler exchangeSink = exchangeService.getSinkHandler(sessionId); + String exchangeId = exchangeService.sinkKeys().stream().filter(s -> s.startsWith(sessionId)).findFirst().get(); + ExchangeSinkHandler exchangeSink = exchangeService.getSinkHandler(exchangeId); waitedForPages = randomBoolean(); if (waitedForPages) { // do not fail exchange requests until we have some pages @@ -444,6 +457,7 @@ public void testTaskContentsForTopNQuery() throws Exception { ActionFuture response = startEsql("from test | sort pause_me | keep pause_me"); try { getTasksStarting(); + logger.info("unblocking script"); scriptPermits.release(pageSize()); getTasksRunning(); } finally { @@ -455,7 +469,6 @@ public void testTaskContentsForTopNQuery() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107293") public void testTaskContentsForLimitQuery() throws Exception { String limit = Integer.toString(randomIntBetween(pageSize() + 1, 2 * numberOfDocs())); READ_DESCRIPTION = """ @@ -475,7 +488,8 @@ public void testTaskContentsForLimitQuery() throws Exception { ActionFuture response = startEsql("from test | keep pause_me | limit " + limit); try { getTasksStarting(); - scriptPermits.release(pageSize()); + logger.info("unblocking script"); + scriptPermits.release(pageSize() - prereleasedDocs); getTasksRunning(); } finally { scriptPermits.release(numberOfDocs()); @@ -504,6 +518,7 @@ public void testTaskContentsForGroupingStatsQuery() throws Exception { ActionFuture response = startEsql("from test | stats max(foo) by pause_me"); try { getTasksStarting(); + logger.info("unblocking script"); scriptPermits.release(pageSize()); getTasksRunning(); } finally { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java new file mode 100644 index 0000000000000..3a69983a0d86e --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.index.mapper.OnScriptError; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.script.LongFieldScript; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +/** + * Make sure the failures on the data node come back as failures over the wire. + */ +@ESIntegTestCase.ClusterScope(minNumDataNodes = 2) +public class EsqlNodeFailureIT extends AbstractEsqlIntegTestCase { + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopy(super.nodePlugins(), FailingFieldPlugin.class); + } + + /** + * Use a runtime field that fails when loading field values to fail the entire query. + */ + public void testFailureLoadingFields() throws IOException { + XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); + mapping.startObject("runtime"); + { + mapping.startObject("fail_me"); + { + mapping.field("type", "long"); + mapping.startObject("script").field("source", "").field("lang", "fail").endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + client().admin().indices().prepareCreate("fail").setSettings(indexSettings(1, 0)).setMapping(mapping.endObject()).get(); + + int docCount = 100; + List docs = new ArrayList<>(docCount); + for (int d = 0; d < docCount; d++) { + docs.add(client().prepareIndex("ok").setSource("foo", d)); + } + docs.add(client().prepareIndex("fail").setSource("foo", 0)); + indexRandom(true, docs); + + ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> run("FROM fail,ok | LIMIT 100").close()); + assertThat(e.getMessage(), equalTo("test failure")); + } + + public static class FailingFieldPlugin extends Plugin implements ScriptPlugin { + + @Override + public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { + return new ScriptEngine() { + @Override + public String getType() { + return "fail"; + } + + @Override + @SuppressWarnings("unchecked") + public FactoryType compile( + String name, + String code, + ScriptContext context, + Map params + ) { + return (FactoryType) new LongFieldScript.Factory() { + @Override + public LongFieldScript.LeafFactory newFactory( + String fieldName, + Map params, + SearchLookup searchLookup, + OnScriptError onScriptError + ) { + return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { + @Override + public void execute() { + throw new ElasticsearchException("test failure"); + } + }; + } + }; + } + + @Override + public Set> getSupportedContexts() { + return Set.of(LongFieldScript.CONTEXT); + } + }; + } + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java index cff9604053903..5c0c13b48df3b 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java @@ -183,7 +183,8 @@ public void testLookupIndex() throws IOException { DataType.KEYWORD, "lookup", "data", - List.of(new Alias(Source.EMPTY, "l", new ReferenceAttribute(Source.EMPTY, "l", DataType.LONG))) + List.of(new Alias(Source.EMPTY, "l", new ReferenceAttribute(Source.EMPTY, "l", DataType.LONG))), + Source.EMPTY ); DriverContext driverContext = driverContext(); try ( diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/ManyShardsIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/ManyShardsIT.java index 1ce92ded8acc6..c52e1b538972b 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/ManyShardsIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/ManyShardsIT.java @@ -14,9 +14,13 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.MockSearchService; @@ -26,6 +30,7 @@ import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.esql.plugin.ComputeService; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.hamcrest.Matchers; import org.junit.Before; @@ -56,6 +61,18 @@ protected Collection> getMockPlugins() { return plugins; } + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopy(super.nodePlugins(), InternalExchangePlugin.class); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, TimeValue.timeValueMillis(between(3000, 5000))) + .build(); + } + @Before public void setupIndices() { int numIndices = between(10, 20); @@ -113,32 +130,64 @@ public void testConcurrentQueries() throws Exception { } public void testRejection() throws Exception { - String[] nodes = internalCluster().getNodeNames(); - for (String node : nodes) { - MockTransportService ts = (MockTransportService) internalCluster().getInstance(TransportService.class, node); - ts.addRequestHandlingBehavior(ExchangeService.EXCHANGE_ACTION_NAME, (handler, request, channel, task) -> { - handler.messageReceived(request, new TransportChannel() { - @Override - public String getProfileName() { - return channel.getProfileName(); - } - - @Override - public void sendResponse(TransportResponse response) { - channel.sendResponse(new RemoteTransportException("simulated", new EsRejectedExecutionException("test queue"))); - } - - @Override - public void sendResponse(Exception exception) { - channel.sendResponse(exception); - } - }, task); + DiscoveryNode dataNode = randomFrom(internalCluster().clusterService().state().nodes().getDataNodes().values()); + String indexName = "single-node-index"; + client().admin() + .indices() + .prepareCreate(indexName) + .setSettings( + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.routing.allocation.require._name", dataNode.getName()) + ) + .setMapping("user", "type=keyword", "tags", "type=keyword") + .get(); + client().prepareIndex(indexName) + .setSource("user", "u1", "tags", "lucene") + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); + + MockTransportService ts = (MockTransportService) internalCluster().getInstance(TransportService.class, dataNode.getName()); + CountDownLatch dataNodeRequestLatch = new CountDownLatch(1); + ts.addRequestHandlingBehavior(ComputeService.DATA_ACTION_NAME, (handler, request, channel, task) -> { + handler.messageReceived(request, channel, task); + dataNodeRequestLatch.countDown(); + }); + + ts.addRequestHandlingBehavior(ExchangeService.EXCHANGE_ACTION_NAME, (handler, request, channel, task) -> { + ts.getThreadPool().generic().execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + channel.sendResponse(e); + } + + @Override + protected void doRun() throws Exception { + assertTrue(dataNodeRequestLatch.await(30, TimeUnit.SECONDS)); + handler.messageReceived(request, new TransportChannel() { + @Override + public String getProfileName() { + return channel.getProfileName(); + } + + @Override + public void sendResponse(TransportResponse response) { + channel.sendResponse(new RemoteTransportException("simulated", new EsRejectedExecutionException("test queue"))); + } + + @Override + public void sendResponse(Exception exception) { + channel.sendResponse(exception); + } + }, task); + } }); - } + }); + try { AtomicReference failure = new AtomicReference<>(); EsqlQueryRequest request = new EsqlQueryRequest(); - request.query("from test-* | stats count(user) by tags"); + request.query("from single-node-index | stats count(user) by tags"); request.acceptedPragmaRisks(true); request.pragmas(randomPragmas()); CountDownLatch queryLatch = new CountDownLatch(1); @@ -151,9 +200,7 @@ public void sendResponse(Exception exception) { assertThat(ExceptionsHelper.status(failure.get()), equalTo(RestStatus.TOO_MANY_REQUESTS)); assertThat(failure.get().getMessage(), equalTo("test queue")); } finally { - for (String node : nodes) { - ((MockTransportService) internalCluster().getInstance(TransportService.class, node)).clearAllRules(); - } + ts.clearAllRules(); } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java index b86c46fd3fa7a..3b647583f1129 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java @@ -14,9 +14,6 @@ import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; -import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; -import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.junit.Before; import java.util.List; @@ -32,12 +29,6 @@ public void setupIndex() { createAndPopulateIndex(); } - @Override - protected EsqlQueryResponse run(EsqlQueryRequest request) { - assumeTrue("match operator capability not available", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - return super.run(request); - } - public void testSimpleWhereMatch() { var query = """ FROM test diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index 6ec93d203d984..ef875d7ca01d8 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -85,8 +85,15 @@ WHERE : 'where' -> pushMode(EXPRESSION_MODE); // main section while preserving alphabetical order: // MYCOMMAND : 'mycommand' -> ... DEV_INLINESTATS : {this.isDevVersion()}? 'inlinestats' -> pushMode(EXPRESSION_MODE); -DEV_LOOKUP : {this.isDevVersion()}? 'lookup' -> pushMode(LOOKUP_MODE); +DEV_LOOKUP : {this.isDevVersion()}? 'lookup_🐔' -> pushMode(LOOKUP_MODE); DEV_METRICS : {this.isDevVersion()}? 'metrics' -> pushMode(METRICS_MODE); +// list of all JOIN commands +DEV_JOIN : {this.isDevVersion()}? 'join' -> pushMode(JOIN_MODE); +DEV_JOIN_FULL : {this.isDevVersion()}? 'full' -> pushMode(JOIN_MODE); +DEV_JOIN_LEFT : {this.isDevVersion()}? 'left' -> pushMode(JOIN_MODE); +DEV_JOIN_RIGHT : {this.isDevVersion()}? 'right' -> pushMode(JOIN_MODE); +DEV_JOIN_LOOKUP : {this.isDevVersion()}? 'lookup' -> pushMode(JOIN_MODE); + // // Catch-all for unrecognized commands - don't define any beyond this line @@ -105,8 +112,6 @@ WS : [ \r\n\t]+ -> channel(HIDDEN) ; -COLON : ':'; - // // Expression - used by most command // @@ -177,6 +182,7 @@ AND : 'and'; ASC : 'asc'; ASSIGN : '='; CAST_OP : '::'; +COLON : ':'; COMMA : ','; DESC : 'desc'; DOT : '.'; @@ -209,7 +215,6 @@ MINUS : '-'; ASTERISK : '*'; SLASH : '/'; PERCENT : '%'; -EXPRESSION_COLON : {this.isDevVersion()}? COLON -> type(COLON); NESTED_WHERE : WHERE -> type(WHERE); @@ -543,6 +548,31 @@ LOOKUP_FIELD_WS : WS -> channel(HIDDEN) ; +// +// JOIN-related commands +// +mode JOIN_MODE; +JOIN_PIPE : PIPE -> type(PIPE), popMode; +JOIN_JOIN : DEV_JOIN -> type(DEV_JOIN); +JOIN_AS : AS -> type(AS); +JOIN_ON : ON -> type(ON), popMode, pushMode(EXPRESSION_MODE); +USING : 'USING' -> popMode, pushMode(EXPRESSION_MODE); + +JOIN_UNQUOTED_IDENTIFER: UNQUOTED_IDENTIFIER -> type(UNQUOTED_IDENTIFIER); +JOIN_QUOTED_IDENTIFIER : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER); + +JOIN_LINE_COMMENT + : LINE_COMMENT -> channel(HIDDEN) + ; + +JOIN_MULTILINE_COMMENT + : MULTILINE_COMMENT -> channel(HIDDEN) + ; + +JOIN_WS + : WS -> channel(HIDDEN) + ; + // // METRICS command // diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index 3dd1a2c754038..b1a16987dd8ce 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -17,106 +17,115 @@ WHERE=16 DEV_INLINESTATS=17 DEV_LOOKUP=18 DEV_METRICS=19 -UNKNOWN_CMD=20 -LINE_COMMENT=21 -MULTILINE_COMMENT=22 -WS=23 -COLON=24 -PIPE=25 -QUOTED_STRING=26 -INTEGER_LITERAL=27 -DECIMAL_LITERAL=28 -BY=29 -AND=30 -ASC=31 -ASSIGN=32 -CAST_OP=33 -COMMA=34 -DESC=35 -DOT=36 -FALSE=37 -FIRST=38 -IN=39 -IS=40 -LAST=41 -LIKE=42 -LP=43 -NOT=44 -NULL=45 -NULLS=46 -OR=47 -PARAM=48 -RLIKE=49 -RP=50 -TRUE=51 -EQ=52 -CIEQ=53 -NEQ=54 -LT=55 -LTE=56 -GT=57 -GTE=58 -PLUS=59 -MINUS=60 -ASTERISK=61 -SLASH=62 -PERCENT=63 -NAMED_OR_POSITIONAL_PARAM=64 -OPENING_BRACKET=65 -CLOSING_BRACKET=66 -UNQUOTED_IDENTIFIER=67 -QUOTED_IDENTIFIER=68 -EXPR_LINE_COMMENT=69 -EXPR_MULTILINE_COMMENT=70 -EXPR_WS=71 -EXPLAIN_WS=72 -EXPLAIN_LINE_COMMENT=73 -EXPLAIN_MULTILINE_COMMENT=74 -METADATA=75 -UNQUOTED_SOURCE=76 -FROM_LINE_COMMENT=77 -FROM_MULTILINE_COMMENT=78 -FROM_WS=79 -ID_PATTERN=80 -PROJECT_LINE_COMMENT=81 -PROJECT_MULTILINE_COMMENT=82 -PROJECT_WS=83 -AS=84 -RENAME_LINE_COMMENT=85 -RENAME_MULTILINE_COMMENT=86 -RENAME_WS=87 -ON=88 -WITH=89 -ENRICH_POLICY_NAME=90 -ENRICH_LINE_COMMENT=91 -ENRICH_MULTILINE_COMMENT=92 -ENRICH_WS=93 -ENRICH_FIELD_LINE_COMMENT=94 -ENRICH_FIELD_MULTILINE_COMMENT=95 -ENRICH_FIELD_WS=96 -MVEXPAND_LINE_COMMENT=97 -MVEXPAND_MULTILINE_COMMENT=98 -MVEXPAND_WS=99 -INFO=100 -SHOW_LINE_COMMENT=101 -SHOW_MULTILINE_COMMENT=102 -SHOW_WS=103 -SETTING=104 -SETTING_LINE_COMMENT=105 -SETTTING_MULTILINE_COMMENT=106 -SETTING_WS=107 -LOOKUP_LINE_COMMENT=108 -LOOKUP_MULTILINE_COMMENT=109 -LOOKUP_WS=110 -LOOKUP_FIELD_LINE_COMMENT=111 -LOOKUP_FIELD_MULTILINE_COMMENT=112 -LOOKUP_FIELD_WS=113 -METRICS_LINE_COMMENT=114 -METRICS_MULTILINE_COMMENT=115 -METRICS_WS=116 -CLOSING_METRICS_LINE_COMMENT=117 -CLOSING_METRICS_MULTILINE_COMMENT=118 -CLOSING_METRICS_WS=119 +DEV_JOIN=20 +DEV_JOIN_FULL=21 +DEV_JOIN_LEFT=22 +DEV_JOIN_RIGHT=23 +DEV_JOIN_LOOKUP=24 +UNKNOWN_CMD=25 +LINE_COMMENT=26 +MULTILINE_COMMENT=27 +WS=28 +PIPE=29 +QUOTED_STRING=30 +INTEGER_LITERAL=31 +DECIMAL_LITERAL=32 +BY=33 +AND=34 +ASC=35 +ASSIGN=36 +CAST_OP=37 +COLON=38 +COMMA=39 +DESC=40 +DOT=41 +FALSE=42 +FIRST=43 +IN=44 +IS=45 +LAST=46 +LIKE=47 +LP=48 +NOT=49 +NULL=50 +NULLS=51 +OR=52 +PARAM=53 +RLIKE=54 +RP=55 +TRUE=56 +EQ=57 +CIEQ=58 +NEQ=59 +LT=60 +LTE=61 +GT=62 +GTE=63 +PLUS=64 +MINUS=65 +ASTERISK=66 +SLASH=67 +PERCENT=68 +NAMED_OR_POSITIONAL_PARAM=69 +OPENING_BRACKET=70 +CLOSING_BRACKET=71 +UNQUOTED_IDENTIFIER=72 +QUOTED_IDENTIFIER=73 +EXPR_LINE_COMMENT=74 +EXPR_MULTILINE_COMMENT=75 +EXPR_WS=76 +EXPLAIN_WS=77 +EXPLAIN_LINE_COMMENT=78 +EXPLAIN_MULTILINE_COMMENT=79 +METADATA=80 +UNQUOTED_SOURCE=81 +FROM_LINE_COMMENT=82 +FROM_MULTILINE_COMMENT=83 +FROM_WS=84 +ID_PATTERN=85 +PROJECT_LINE_COMMENT=86 +PROJECT_MULTILINE_COMMENT=87 +PROJECT_WS=88 +AS=89 +RENAME_LINE_COMMENT=90 +RENAME_MULTILINE_COMMENT=91 +RENAME_WS=92 +ON=93 +WITH=94 +ENRICH_POLICY_NAME=95 +ENRICH_LINE_COMMENT=96 +ENRICH_MULTILINE_COMMENT=97 +ENRICH_WS=98 +ENRICH_FIELD_LINE_COMMENT=99 +ENRICH_FIELD_MULTILINE_COMMENT=100 +ENRICH_FIELD_WS=101 +MVEXPAND_LINE_COMMENT=102 +MVEXPAND_MULTILINE_COMMENT=103 +MVEXPAND_WS=104 +INFO=105 +SHOW_LINE_COMMENT=106 +SHOW_MULTILINE_COMMENT=107 +SHOW_WS=108 +SETTING=109 +SETTING_LINE_COMMENT=110 +SETTTING_MULTILINE_COMMENT=111 +SETTING_WS=112 +LOOKUP_LINE_COMMENT=113 +LOOKUP_MULTILINE_COMMENT=114 +LOOKUP_WS=115 +LOOKUP_FIELD_LINE_COMMENT=116 +LOOKUP_FIELD_MULTILINE_COMMENT=117 +LOOKUP_FIELD_WS=118 +USING=119 +JOIN_LINE_COMMENT=120 +JOIN_MULTILINE_COMMENT=121 +JOIN_WS=122 +METRICS_LINE_COMMENT=123 +METRICS_MULTILINE_COMMENT=124 +METRICS_WS=125 +CLOSING_METRICS_LINE_COMMENT=126 +CLOSING_METRICS_MULTILINE_COMMENT=127 +CLOSING_METRICS_WS=128 'dissect'=1 'drop'=2 'enrich'=3 @@ -133,46 +142,47 @@ CLOSING_METRICS_WS=119 'sort'=14 'stats'=15 'where'=16 -':'=24 -'|'=25 -'by'=29 -'and'=30 -'asc'=31 -'='=32 -'::'=33 -','=34 -'desc'=35 -'.'=36 -'false'=37 -'first'=38 -'in'=39 -'is'=40 -'last'=41 -'like'=42 -'('=43 -'not'=44 -'null'=45 -'nulls'=46 -'or'=47 -'?'=48 -'rlike'=49 -')'=50 -'true'=51 -'=='=52 -'=~'=53 -'!='=54 -'<'=55 -'<='=56 -'>'=57 -'>='=58 -'+'=59 -'-'=60 -'*'=61 -'/'=62 -'%'=63 -']'=66 -'metadata'=75 -'as'=84 -'on'=88 -'with'=89 -'info'=100 +'|'=29 +'by'=33 +'and'=34 +'asc'=35 +'='=36 +'::'=37 +':'=38 +','=39 +'desc'=40 +'.'=41 +'false'=42 +'first'=43 +'in'=44 +'is'=45 +'last'=46 +'like'=47 +'('=48 +'not'=49 +'null'=50 +'nulls'=51 +'or'=52 +'?'=53 +'rlike'=54 +')'=55 +'true'=56 +'=='=57 +'=~'=58 +'!='=59 +'<'=60 +'<='=61 +'>'=62 +'>='=63 +'+'=64 +'-'=65 +'*'=66 +'/'=67 +'%'=68 +']'=71 +'metadata'=80 +'as'=89 +'on'=93 +'with'=94 +'info'=105 +'USING'=119 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 67f194a1bff64..f84cfe3060503 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -54,6 +54,7 @@ processingCommand // in development | {this.isDevVersion()}? inlinestatsCommand | {this.isDevVersion()}? lookupCommand + | {this.isDevVersion()}? joinCommand ; whereCommand @@ -68,7 +69,7 @@ booleanExpression | left=booleanExpression operator=OR right=booleanExpression #logicalBinary | valueExpression (NOT)? IN LP valueExpression (COMMA valueExpression)* RP #logicalIn | valueExpression IS NOT? NULL #isNull - | {this.isDevVersion()}? matchBooleanExpression #matchExpression + | matchBooleanExpression #matchExpression ; regexBooleanExpression @@ -322,3 +323,19 @@ lookupCommand inlinestatsCommand : DEV_INLINESTATS stats=aggFields (BY grouping=fields)? ; + +joinCommand + : type=(DEV_JOIN_LOOKUP | DEV_JOIN_LEFT | DEV_JOIN_RIGHT)? DEV_JOIN joinTarget joinCondition + ; + +joinTarget + : index=identifier (AS alias=identifier)? + ; + +joinCondition + : ON joinPredicate (COMMA joinPredicate)* + ; + +joinPredicate + : valueExpression + ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index 3dd1a2c754038..b1a16987dd8ce 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -17,106 +17,115 @@ WHERE=16 DEV_INLINESTATS=17 DEV_LOOKUP=18 DEV_METRICS=19 -UNKNOWN_CMD=20 -LINE_COMMENT=21 -MULTILINE_COMMENT=22 -WS=23 -COLON=24 -PIPE=25 -QUOTED_STRING=26 -INTEGER_LITERAL=27 -DECIMAL_LITERAL=28 -BY=29 -AND=30 -ASC=31 -ASSIGN=32 -CAST_OP=33 -COMMA=34 -DESC=35 -DOT=36 -FALSE=37 -FIRST=38 -IN=39 -IS=40 -LAST=41 -LIKE=42 -LP=43 -NOT=44 -NULL=45 -NULLS=46 -OR=47 -PARAM=48 -RLIKE=49 -RP=50 -TRUE=51 -EQ=52 -CIEQ=53 -NEQ=54 -LT=55 -LTE=56 -GT=57 -GTE=58 -PLUS=59 -MINUS=60 -ASTERISK=61 -SLASH=62 -PERCENT=63 -NAMED_OR_POSITIONAL_PARAM=64 -OPENING_BRACKET=65 -CLOSING_BRACKET=66 -UNQUOTED_IDENTIFIER=67 -QUOTED_IDENTIFIER=68 -EXPR_LINE_COMMENT=69 -EXPR_MULTILINE_COMMENT=70 -EXPR_WS=71 -EXPLAIN_WS=72 -EXPLAIN_LINE_COMMENT=73 -EXPLAIN_MULTILINE_COMMENT=74 -METADATA=75 -UNQUOTED_SOURCE=76 -FROM_LINE_COMMENT=77 -FROM_MULTILINE_COMMENT=78 -FROM_WS=79 -ID_PATTERN=80 -PROJECT_LINE_COMMENT=81 -PROJECT_MULTILINE_COMMENT=82 -PROJECT_WS=83 -AS=84 -RENAME_LINE_COMMENT=85 -RENAME_MULTILINE_COMMENT=86 -RENAME_WS=87 -ON=88 -WITH=89 -ENRICH_POLICY_NAME=90 -ENRICH_LINE_COMMENT=91 -ENRICH_MULTILINE_COMMENT=92 -ENRICH_WS=93 -ENRICH_FIELD_LINE_COMMENT=94 -ENRICH_FIELD_MULTILINE_COMMENT=95 -ENRICH_FIELD_WS=96 -MVEXPAND_LINE_COMMENT=97 -MVEXPAND_MULTILINE_COMMENT=98 -MVEXPAND_WS=99 -INFO=100 -SHOW_LINE_COMMENT=101 -SHOW_MULTILINE_COMMENT=102 -SHOW_WS=103 -SETTING=104 -SETTING_LINE_COMMENT=105 -SETTTING_MULTILINE_COMMENT=106 -SETTING_WS=107 -LOOKUP_LINE_COMMENT=108 -LOOKUP_MULTILINE_COMMENT=109 -LOOKUP_WS=110 -LOOKUP_FIELD_LINE_COMMENT=111 -LOOKUP_FIELD_MULTILINE_COMMENT=112 -LOOKUP_FIELD_WS=113 -METRICS_LINE_COMMENT=114 -METRICS_MULTILINE_COMMENT=115 -METRICS_WS=116 -CLOSING_METRICS_LINE_COMMENT=117 -CLOSING_METRICS_MULTILINE_COMMENT=118 -CLOSING_METRICS_WS=119 +DEV_JOIN=20 +DEV_JOIN_FULL=21 +DEV_JOIN_LEFT=22 +DEV_JOIN_RIGHT=23 +DEV_JOIN_LOOKUP=24 +UNKNOWN_CMD=25 +LINE_COMMENT=26 +MULTILINE_COMMENT=27 +WS=28 +PIPE=29 +QUOTED_STRING=30 +INTEGER_LITERAL=31 +DECIMAL_LITERAL=32 +BY=33 +AND=34 +ASC=35 +ASSIGN=36 +CAST_OP=37 +COLON=38 +COMMA=39 +DESC=40 +DOT=41 +FALSE=42 +FIRST=43 +IN=44 +IS=45 +LAST=46 +LIKE=47 +LP=48 +NOT=49 +NULL=50 +NULLS=51 +OR=52 +PARAM=53 +RLIKE=54 +RP=55 +TRUE=56 +EQ=57 +CIEQ=58 +NEQ=59 +LT=60 +LTE=61 +GT=62 +GTE=63 +PLUS=64 +MINUS=65 +ASTERISK=66 +SLASH=67 +PERCENT=68 +NAMED_OR_POSITIONAL_PARAM=69 +OPENING_BRACKET=70 +CLOSING_BRACKET=71 +UNQUOTED_IDENTIFIER=72 +QUOTED_IDENTIFIER=73 +EXPR_LINE_COMMENT=74 +EXPR_MULTILINE_COMMENT=75 +EXPR_WS=76 +EXPLAIN_WS=77 +EXPLAIN_LINE_COMMENT=78 +EXPLAIN_MULTILINE_COMMENT=79 +METADATA=80 +UNQUOTED_SOURCE=81 +FROM_LINE_COMMENT=82 +FROM_MULTILINE_COMMENT=83 +FROM_WS=84 +ID_PATTERN=85 +PROJECT_LINE_COMMENT=86 +PROJECT_MULTILINE_COMMENT=87 +PROJECT_WS=88 +AS=89 +RENAME_LINE_COMMENT=90 +RENAME_MULTILINE_COMMENT=91 +RENAME_WS=92 +ON=93 +WITH=94 +ENRICH_POLICY_NAME=95 +ENRICH_LINE_COMMENT=96 +ENRICH_MULTILINE_COMMENT=97 +ENRICH_WS=98 +ENRICH_FIELD_LINE_COMMENT=99 +ENRICH_FIELD_MULTILINE_COMMENT=100 +ENRICH_FIELD_WS=101 +MVEXPAND_LINE_COMMENT=102 +MVEXPAND_MULTILINE_COMMENT=103 +MVEXPAND_WS=104 +INFO=105 +SHOW_LINE_COMMENT=106 +SHOW_MULTILINE_COMMENT=107 +SHOW_WS=108 +SETTING=109 +SETTING_LINE_COMMENT=110 +SETTTING_MULTILINE_COMMENT=111 +SETTING_WS=112 +LOOKUP_LINE_COMMENT=113 +LOOKUP_MULTILINE_COMMENT=114 +LOOKUP_WS=115 +LOOKUP_FIELD_LINE_COMMENT=116 +LOOKUP_FIELD_MULTILINE_COMMENT=117 +LOOKUP_FIELD_WS=118 +USING=119 +JOIN_LINE_COMMENT=120 +JOIN_MULTILINE_COMMENT=121 +JOIN_WS=122 +METRICS_LINE_COMMENT=123 +METRICS_MULTILINE_COMMENT=124 +METRICS_WS=125 +CLOSING_METRICS_LINE_COMMENT=126 +CLOSING_METRICS_MULTILINE_COMMENT=127 +CLOSING_METRICS_WS=128 'dissect'=1 'drop'=2 'enrich'=3 @@ -133,46 +142,47 @@ CLOSING_METRICS_WS=119 'sort'=14 'stats'=15 'where'=16 -':'=24 -'|'=25 -'by'=29 -'and'=30 -'asc'=31 -'='=32 -'::'=33 -','=34 -'desc'=35 -'.'=36 -'false'=37 -'first'=38 -'in'=39 -'is'=40 -'last'=41 -'like'=42 -'('=43 -'not'=44 -'null'=45 -'nulls'=46 -'or'=47 -'?'=48 -'rlike'=49 -')'=50 -'true'=51 -'=='=52 -'=~'=53 -'!='=54 -'<'=55 -'<='=56 -'>'=57 -'>='=58 -'+'=59 -'-'=60 -'*'=61 -'/'=62 -'%'=63 -']'=66 -'metadata'=75 -'as'=84 -'on'=88 -'with'=89 -'info'=100 +'|'=29 +'by'=33 +'and'=34 +'asc'=35 +'='=36 +'::'=37 +':'=38 +','=39 +'desc'=40 +'.'=41 +'false'=42 +'first'=43 +'in'=44 +'is'=45 +'last'=46 +'like'=47 +'('=48 +'not'=49 +'null'=50 +'nulls'=51 +'or'=52 +'?'=53 +'rlike'=54 +')'=55 +'true'=56 +'=='=57 +'=~'=58 +'!='=59 +'<'=60 +'<='=61 +'>'=62 +'>='=63 +'+'=64 +'-'=65 +'*'=66 +'/'=67 +'%'=68 +']'=71 +'metadata'=80 +'as'=89 +'on'=93 +'with'=94 +'info'=105 +'USING'=119 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index d2bee9c67af5b..d675f772b5a3b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -140,6 +140,12 @@ public enum Cap { */ CASE_MV, + /** + * Support for loading values over enrich. This is supported by all versions of ESQL but not + * the unit test CsvTests. + */ + ENRICH_LOAD, + /** * Optimization for ST_CENTROID changed some results in cartesian data. #108713 */ @@ -278,6 +284,11 @@ public enum Cap { */ RANGEQUERY_FOR_DATETIME, + /** + * Enforce strict type checking on ENRICH range types, and warnings for KEYWORD parsing at runtime. Done in #115091. + */ + ENRICH_STRICT_RANGE_TYPES, + /** * Fix for non-unique attribute names in ROW and logical plans. * https://github.com/elastic/elasticsearch/issues/110541 @@ -302,7 +313,7 @@ public enum Cap { /** * Support for match operator as a colon. Previous support for match operator as MATCH has been removed */ - MATCH_OPERATOR_COLON(Build.current().isSnapshot()), + MATCH_OPERATOR_COLON, /** * Removing support for the {@code META} keyword. @@ -317,32 +328,32 @@ public enum Cap { /** * Support for nanosecond dates as a data type */ - DATE_NANOS_TYPE(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + DATE_NANOS_TYPE(), /** * Support for to_date_nanos function */ - TO_DATE_NANOS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + TO_DATE_NANOS(), /** * Support for date nanos type in binary comparisons */ - DATE_NANOS_BINARY_COMPARISON(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + DATE_NANOS_BINARY_COMPARISON(), /** * Support Least and Greatest functions on Date Nanos type */ - LEAST_GREATEST_FOR_DATENANOS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + LEAST_GREATEST_FOR_DATENANOS(), /** * Support for date_trunc function on date nanos type */ - DATE_TRUNC_DATE_NANOS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + DATE_TRUNC_DATE_NANOS(), /** * support aggregations on date nanos */ - DATE_NANOS_AGGREGATIONS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + DATE_NANOS_AGGREGATIONS(), /** * Support for datetime in least and greatest functions @@ -420,6 +431,12 @@ public enum Cap { */ SORTING_ON_SOURCE_AND_COUNTERS_FORBIDDEN, + /** + * Fix {@code SORT} when the {@code _source} field is not a sort key but + * is being returned. + */ + SORT_RETURNING_SOURCE_OK, + /** * Allow filter per individual aggregation. */ @@ -430,6 +447,11 @@ public enum Cap { */ PER_AGG_FILTERING_ORDS, + /** + * Support for {@code STD_DEV} aggregation. + */ + STD_DEV, + /** * Fix for https://github.com/elastic/elasticsearch/issues/114714 */ @@ -489,7 +511,17 @@ public enum Cap { /** * Support implicit casting from string literal to DATE_PERIOD or TIME_DURATION. */ - IMPLICIT_CASTING_STRING_LITERAL_TO_TEMPORAL_AMOUNT; + IMPLICIT_CASTING_STRING_LITERAL_TO_TEMPORAL_AMOUNT, + + /** + * LOOKUP JOIN + */ + JOIN_LOOKUP(Build.current().isSnapshot()), + + /** + * Fix for https://github.com/elastic/elasticsearch/issues/117054 + */ + FIX_NESTED_FIELDS_NAME_CLASH_IN_INDEXRESOLVER; private final boolean enabled; @@ -535,9 +567,6 @@ public static Set capabilities(boolean all) { for (NodeFeature feature : new EsqlFeatures().getFeatures()) { caps.add(cap(feature)); } - for (NodeFeature feature : new EsqlFeatures().getHistoricalFeatures().keySet()) { - caps.add(cap(feature)); - } return Set.copyOf(caps); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 562d42a94483f..7ad4c3d3e644d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.logging.Logger; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.Column; @@ -20,6 +21,7 @@ import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.EmptyAttribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; @@ -75,6 +77,12 @@ import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.Rename; import org.elasticsearch.xpack.esql.plan.logical.UnresolvedRelation; +import org.elasticsearch.xpack.esql.plan.logical.join.Join; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.UsingJoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.LookupJoin; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; @@ -98,12 +106,14 @@ import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; +import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.core.enrich.EnrichPolicy.GEO_MATCH_TYPE; @@ -189,6 +199,9 @@ private static class ResolveTable extends ParameterizedAnalyzerRule maybeResolveAttribute(ua, childrenOutput)); } @@ -588,6 +620,77 @@ private LogicalPlan resolveLookup(Lookup l, List childrenOutput) { return l; } + private Join resolveLookupJoin(LookupJoin join) { + JoinConfig config = join.config(); + // for now, support only (LEFT) USING clauses + JoinType type = config.type(); + // rewrite the join into a equi-join between the field with the same name between left and right + // per SQL standard, the USING columns are placed first in the output, followed by the rest of left, then right + if (type instanceof UsingJoinType using) { + List cols = using.columns(); + // the lookup cannot be resolved, bail out + if (Expressions.anyMatch(cols, c -> c instanceof UnresolvedAttribute ua && ua.customMessage())) { + return join; + } + + JoinType coreJoin = using.coreJoin(); + // verify the join type + if (coreJoin != JoinTypes.LEFT) { + String name = cols.get(0).name(); + UnresolvedAttribute errorAttribute = new UnresolvedAttribute( + join.source(), + name, + "Only LEFT join is supported with USING" + ); + return join.withConfig(new JoinConfig(type, singletonList(errorAttribute), emptyList(), emptyList())); + } + // resolve the using columns against the left and the right side then assemble the new join config + List leftKeys = resolveUsingColumns(cols, join.left().output(), "left"); + List rightKeys = resolveUsingColumns(cols, join.right().output(), "right"); + List output = new ArrayList<>(join.left().output()); + // the order is stable (since the AttributeSet preservers the insertion order) + output.addAll(join.right().outputSet().subtract(new AttributeSet(rightKeys))); + + // update the config - pick the left keys as those in the output + type = new UsingJoinType(coreJoin, rightKeys); + config = new JoinConfig(type, leftKeys, leftKeys, rightKeys); + join = new LookupJoin(join.source(), join.left(), join.right(), config, output); + } + // everything else is unsupported for now + else { + UnresolvedAttribute errorAttribute = new UnresolvedAttribute(join.source(), "unsupported", "Unsupported join type"); + // add error message + return join.withConfig(new JoinConfig(type, singletonList(errorAttribute), emptyList(), emptyList())); + } + return join; + } + + private List resolveUsingColumns(List cols, List output, String side) { + List resolved = new ArrayList<>(cols.size()); + for (Attribute col : cols) { + if (col instanceof UnresolvedAttribute ua) { + Attribute resolvedCol = maybeResolveAttribute(ua, output); + if (resolvedCol instanceof UnresolvedAttribute ucol) { + String message = ua.unresolvedMessage(); + String match = "column [" + ucol.name() + "]"; + resolvedCol = ucol.withUnresolvedMessage(message.replace(match, match + "in " + side + " side of join")); + } + resolved.add(resolvedCol); + } + // columns are expected to be unresolved - if that's not the case return an error + else { + return singletonList( + new UnresolvedAttribute( + col.source(), + col.name(), + "Surprised to discover column [ " + col.name() + "] already resolved" + ) + ); + } + } + return resolved; + } + private Attribute maybeResolveAttribute(UnresolvedAttribute ua, List childrenOutput) { return maybeResolveAttribute(ua, childrenOutput, log); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java index 9d7c5e141a2b1..460d30618df79 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.analysis; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.UnresolvedRelation; @@ -22,14 +23,16 @@ public class PreAnalyzer { public static class PreAnalysis { - public static final PreAnalysis EMPTY = new PreAnalysis(emptyList(), emptyList()); + public static final PreAnalysis EMPTY = new PreAnalysis(emptyList(), emptyList(), emptyList()); public final List indices; public final List enriches; + public final List lookupIndices; - public PreAnalysis(List indices, List enriches) { + public PreAnalysis(List indices, List enriches, List lookupIndices) { this.indices = indices; this.enriches = enriches; + this.lookupIndices = lookupIndices; } } @@ -44,13 +47,17 @@ public PreAnalysis preAnalyze(LogicalPlan plan) { protected PreAnalysis doPreAnalyze(LogicalPlan plan) { List indices = new ArrayList<>(); List unresolvedEnriches = new ArrayList<>(); + List lookupIndices = new ArrayList<>(); - plan.forEachUp(UnresolvedRelation.class, p -> indices.add(new TableInfo(p.table()))); + plan.forEachUp(UnresolvedRelation.class, p -> { + List list = p.indexMode() == IndexMode.LOOKUP ? lookupIndices : indices; + list.add(new TableInfo(p.table())); + }); plan.forEachUp(Enrich.class, unresolvedEnriches::add); // mark plan as preAnalyzed (if it were marked, there would be no analysis) plan.forEachUp(LogicalPlan::setPreAnalyzed); - return new PreAnalysis(indices, unresolvedEnriches); + return new PreAnalysis(indices, unresolvedEnriches, lookupIndices); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index d399c826e0bf2..3ebb52641232e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.esql.analysis; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; @@ -52,6 +54,7 @@ import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; +import org.elasticsearch.xpack.esql.plan.logical.join.LookupJoin; import org.elasticsearch.xpack.esql.stats.FeatureMetric; import org.elasticsearch.xpack.esql.stats.Metrics; @@ -82,9 +85,11 @@ public class Verifier { private final Metrics metrics; + private final XPackLicenseState licenseState; - public Verifier(Metrics metrics) { + public Verifier(Metrics metrics, XPackLicenseState licenseState) { this.metrics = metrics; + this.licenseState = licenseState; } /** @@ -168,6 +173,20 @@ else if (p instanceof Lookup lookup) { else { lookup.matchFields().forEach(unresolvedExpressions); } + } else if (p instanceof LookupJoin lj) { + // expect right side to always be a lookup index + lj.right().forEachUp(EsRelation.class, r -> { + if (r.indexMode() != IndexMode.LOOKUP) { + failures.add( + fail( + r, + "LOOKUP JOIN right side [{}] must be a lookup index (index_mode=lookup, not [{}]", + r.index().name(), + r.indexMode().getName() + ) + ); + } + }); } else { @@ -196,11 +215,16 @@ else if (p instanceof Lookup lookup) { checkOperationsOnUnsignedLong(p, failures); checkBinaryComparison(p, failures); checkForSortableDataTypes(p, failures); + checkSort(p, failures); checkFullTextQueryFunctions(p, failures); }); checkRemoteEnrich(plan, failures); + if (failures.isEmpty()) { + checkLicense(plan, licenseState, failures); + } + // gather metrics if (failures.isEmpty()) { gatherMetrics(plan, partialMetrics); @@ -209,6 +233,18 @@ else if (p instanceof Lookup lookup) { return failures; } + private void checkSort(LogicalPlan p, Set failures) { + if (p instanceof OrderBy ob) { + ob.order().forEach(o -> { + o.forEachDown(Function.class, f -> { + if (f instanceof AggregateFunction) { + failures.add(fail(f, "Aggregate functions are not allowed in SORT [{}]", f.functionName())); + } + }); + }); + } + } + private static void checkFilterConditionType(LogicalPlan p, Set localFailures) { if (p instanceof Filter f) { Expression condition = f.condition(); @@ -488,7 +524,7 @@ private static void checkRow(LogicalPlan p, Set failures) { if (p instanceof Row row) { row.fields().forEach(a -> { if (DataType.isRepresentable(a.dataType()) == false) { - failures.add(fail(a, "cannot use [{}] directly in a row assignment", a.child().sourceText())); + failures.add(fail(a.child(), "cannot use [{}] directly in a row assignment", a.child().sourceText())); } }); } @@ -546,6 +582,14 @@ private static void checkBinaryComparison(LogicalPlan p, Set failures) }); } + private void checkLicense(LogicalPlan plan, XPackLicenseState licenseState, Set failures) { + plan.forEachExpressionDown(Function.class, p -> { + if (p.checkLicense(licenseState) == false) { + failures.add(new Failure(p, "current license is non-compliant for function [" + p.sourceText() + "]")); + } + }); + } + private void gatherMetrics(LogicalPlan plan, BitSet b) { plan.forEachDown(p -> FeatureMetric.set(p, b)); for (int i = b.nextSetBit(0); i >= 0; i = b.nextSetBit(i + 1)) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java index 286ddbaa29a5b..e52e9ae989a92 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java @@ -41,6 +41,7 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OutputOperator; +import org.elasticsearch.compute.operator.Warnings; import org.elasticsearch.compute.operator.lookup.EnrichQuerySourceOperator; import org.elasticsearch.compute.operator.lookup.MergePositionsOperator; import org.elasticsearch.compute.operator.lookup.QueryList; @@ -78,6 +79,7 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders; import org.elasticsearch.xpack.esql.planner.PlannerUtils; @@ -166,6 +168,10 @@ abstract class AbstractLookupService list releasables.add(mergePositionsOperator); SearchExecutionContext searchExecutionContext = searchContext.getSearchExecutionContext(); QueryList queryList = queryList(request, searchExecutionContext, inputBlock, request.inputDataType); + var warnings = Warnings.createWarnings( + DriverContext.WarningsMode.COLLECT, + request.source.source().getLineNumber(), + request.source.source().getColumnNumber(), + request.source.text() + ); var queryOperator = new EnrichQuerySourceOperator( driverContext.blockFactory(), EnrichQuerySourceOperator.DEFAULT_MAX_PAGE_SIZE, queryList, - searchExecutionContext.getIndexReader() + searchExecutionContext.getIndexReader(), + warnings ); releasables.add(queryOperator); var extractFieldsOperator = extractFieldsOperator(searchContext, driverContext, request.extractFields); @@ -447,13 +460,22 @@ abstract static class Request { final DataType inputDataType; final Page inputPage; final List extractFields; + final Source source; - Request(String sessionId, String index, DataType inputDataType, Page inputPage, List extractFields) { + Request( + String sessionId, + String index, + DataType inputDataType, + Page inputPage, + List extractFields, + Source source + ) { this.sessionId = sessionId; this.index = index; this.inputDataType = inputDataType; this.inputPage = inputPage; this.extractFields = extractFields; + this.source = source; } } @@ -467,6 +489,7 @@ abstract static class TransportRequest extends org.elasticsearch.transport.Trans final DataType inputDataType; final Page inputPage; final List extractFields; + final Source source; // TODO: Remove this workaround once we have Block RefCount final Page toRelease; final RefCounted refs = AbstractRefCounted.of(this::releasePage); @@ -477,7 +500,8 @@ abstract static class TransportRequest extends org.elasticsearch.transport.Trans DataType inputDataType, Page inputPage, Page toRelease, - List extractFields + List extractFields, + Source source ) { this.sessionId = sessionId; this.shardId = shardId; @@ -485,6 +509,7 @@ abstract static class TransportRequest extends org.elasticsearch.transport.Trans this.inputPage = inputPage; this.toRelease = toRelease; this.extractFields = extractFields; + this.source = source; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java index 6e5845fae33b7..df608a04632a2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java @@ -16,9 +16,11 @@ import org.elasticsearch.compute.operator.AsyncOperator; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; +import org.elasticsearch.compute.operator.ResponseHeadersCollector; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import java.io.IOException; @@ -35,6 +37,8 @@ public final class EnrichLookupOperator extends AsyncOperator { private final String matchType; private final String matchField; private final List enrichFields; + private final ResponseHeadersCollector responseHeadersCollector; + private final Source source; private long totalTerms = 0L; public record Factory( @@ -47,7 +51,8 @@ public record Factory( String enrichIndex, String matchType, String matchField, - List enrichFields + List enrichFields, + Source source ) implements OperatorFactory { @Override public String describe() { @@ -75,7 +80,8 @@ public Operator get(DriverContext driverContext) { enrichIndex, matchType, matchField, - enrichFields + enrichFields, + source ); } } @@ -91,7 +97,8 @@ public EnrichLookupOperator( String enrichIndex, String matchType, String matchField, - List enrichFields + List enrichFields, + Source source ) { super(driverContext, maxOutstandingRequests); this.sessionId = sessionId; @@ -103,6 +110,8 @@ public EnrichLookupOperator( this.matchType = matchType; this.matchField = matchField; this.enrichFields = enrichFields; + this.source = source; + this.responseHeadersCollector = new ResponseHeadersCollector(enrichLookupService.getThreadContext()); } @Override @@ -116,9 +125,14 @@ protected void performAsync(Page inputPage, ActionListener listener) { matchType, matchField, new Page(inputBlock), - enrichFields + enrichFields, + source + ); + enrichLookupService.lookupAsync( + request, + parentTask, + ActionListener.runBefore(listener.map(inputPage::appendPage), responseHeadersCollector::collect) ); - enrichLookupService.lookupAsync(request, parentTask, listener.map(inputPage::appendPage)); } @Override @@ -140,6 +154,7 @@ public String toString() { protected void doClose() { // TODO: Maybe create a sub-task as the parent task of all the lookup tasks // then cancel it when this operator terminates early (e.g., have enough result). + responseHeadersCollector.finish(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 2d85b46e33a8c..50a1ffce4841f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -18,6 +18,8 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.lookup.QueryList; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.RangeFieldMapper; +import org.elasticsearch.index.mapper.RangeType; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchService; @@ -27,6 +29,7 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; @@ -71,13 +74,15 @@ protected TransportRequest transportRequest(EnrichLookupService.Request request, request.matchField, request.inputPage, null, - request.extractFields + request.extractFields, + request.source ); } @Override protected QueryList queryList(TransportRequest request, SearchExecutionContext context, Block inputBlock, DataType inputDataType) { MappedFieldType fieldType = context.getFieldType(request.matchField); + validateTypes(inputDataType, fieldType); return switch (request.matchType) { case "match", "range" -> termQueryList(fieldType, context, inputBlock, inputDataType); case "geo_match" -> QueryList.geoShapeQueryList(fieldType, context, inputBlock); @@ -85,6 +90,33 @@ protected QueryList queryList(TransportRequest request, SearchExecutionContext c }; } + private static void validateTypes(DataType inputDataType, MappedFieldType fieldType) { + if (fieldType instanceof RangeFieldMapper.RangeFieldType rangeType) { + // For range policy types, the ENRICH index field type will be one of a list of supported range types, + // which need to match the input data type (eg. ip-range -> ip, date-range -> date, etc.) + if (rangeTypesCompatible(rangeType.rangeType(), inputDataType) == false) { + throw new EsqlIllegalArgumentException( + "ENRICH range and input types are incompatible: range[" + rangeType.rangeType() + "], input[" + inputDataType + "]" + ); + } + } + // For match policies, the ENRICH index field will always be KEYWORD, and input type will be converted to KEYWORD. + // For geo_match, type validation is done earlier, in the Analyzer. + } + + private static boolean rangeTypesCompatible(RangeType rangeType, DataType inputDataType) { + if (inputDataType.noText() == DataType.KEYWORD) { + // We allow runtime parsing of string types to numeric types + return true; + } + return switch (rangeType) { + case INTEGER, LONG -> inputDataType.isWholeNumber(); + case IP -> inputDataType == DataType.IP; + case DATE -> inputDataType.isDate(); + default -> rangeType.isNumeric() == inputDataType.isNumeric(); + }; + } + public static class Request extends AbstractLookupService.Request { private final String matchType; private final String matchField; @@ -96,9 +128,10 @@ public static class Request extends AbstractLookupService.Request { String matchType, String matchField, Page inputPage, - List extractFields + List extractFields, + Source source ) { - super(sessionId, index, inputDataType, inputPage, extractFields); + super(sessionId, index, inputDataType, inputPage, extractFields, source); this.matchType = matchType; this.matchField = matchField; } @@ -116,9 +149,10 @@ protected static class TransportRequest extends AbstractLookupService.TransportR String matchField, Page inputPage, Page toRelease, - List extractFields + List extractFields, + Source source ) { - super(sessionId, shardId, inputDataType, inputPage, toRelease, extractFields); + super(sessionId, shardId, inputDataType, inputPage, toRelease, extractFields, source); this.matchType = matchType; this.matchField = matchField; } @@ -138,6 +172,10 @@ static TransportRequest readFrom(StreamInput in, BlockFactory blockFactory) thro } PlanStreamInput planIn = new PlanStreamInput(in, in.namedWriteableRegistry(), null); List extractFields = planIn.readNamedWriteableCollectionAsList(NamedExpression.class); + var source = Source.EMPTY; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + source = Source.readFrom(planIn); + } TransportRequest result = new TransportRequest( sessionId, shardId, @@ -146,7 +184,8 @@ static TransportRequest readFrom(StreamInput in, BlockFactory blockFactory) thro matchField, inputPage, inputPage, - extractFields + extractFields, + source ); result.setParentTask(parentTaskId); return result; @@ -165,6 +204,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeWriteable(inputPage); PlanStreamOutput planOut = new PlanStreamOutput(out, null); planOut.writeNamedWriteableCollection(extractFields); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + source.writeTo(planOut); + } } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java index 836b400c54f8c..f09f7d0e23e7b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java @@ -19,6 +19,7 @@ import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import java.io.IOException; @@ -36,7 +37,8 @@ public record Factory( DataType inputDataType, String lookupIndex, String matchField, - List loadFields + List loadFields, + Source source ) implements OperatorFactory { @Override public String describe() { @@ -63,7 +65,8 @@ public Operator get(DriverContext driverContext) { inputDataType, lookupIndex, matchField, - loadFields + loadFields, + source ); } } @@ -76,6 +79,7 @@ public Operator get(DriverContext driverContext) { private final String lookupIndex; private final String matchField; private final List loadFields; + private final Source source; private long totalTerms = 0L; public LookupFromIndexOperator( @@ -88,7 +92,8 @@ public LookupFromIndexOperator( DataType inputDataType, String lookupIndex, String matchField, - List loadFields + List loadFields, + Source source ) { super(driverContext, maxOutstandingRequests); this.sessionId = sessionId; @@ -99,6 +104,7 @@ public LookupFromIndexOperator( this.lookupIndex = lookupIndex; this.matchField = matchField; this.loadFields = loadFields; + this.source = source; } @Override @@ -111,7 +117,8 @@ protected void performAsync(Page inputPage, ActionListener listener) { inputDataType, matchField, new Page(inputBlock), - loadFields + loadFields, + source ); lookupService.lookupAsync(request, parentTask, listener.map(inputPage::appendPage)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java index ef204e88c234f..849e8e890e248 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.enrich; +import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -25,6 +26,7 @@ import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; @@ -68,7 +70,8 @@ protected TransportRequest transportRequest(LookupFromIndexService.Request reque request.inputPage, null, request.extractFields, - request.matchField + request.matchField, + request.source ); } @@ -87,9 +90,10 @@ public static class Request extends AbstractLookupService.Request { DataType inputDataType, String matchField, Page inputPage, - List extractFields + List extractFields, + Source source ) { - super(sessionId, index, inputDataType, inputPage, extractFields); + super(sessionId, index, inputDataType, inputPage, extractFields, source); this.matchField = matchField; } } @@ -104,9 +108,10 @@ protected static class TransportRequest extends AbstractLookupService.TransportR Page inputPage, Page toRelease, List extractFields, - String matchField + String matchField, + Source source ) { - super(sessionId, shardId, inputDataType, inputPage, toRelease, extractFields); + super(sessionId, shardId, inputDataType, inputPage, toRelease, extractFields, source); this.matchField = matchField; } @@ -122,6 +127,10 @@ static TransportRequest readFrom(StreamInput in, BlockFactory blockFactory) thro PlanStreamInput planIn = new PlanStreamInput(in, in.namedWriteableRegistry(), null); List extractFields = planIn.readNamedWriteableCollectionAsList(NamedExpression.class); String matchField = in.readString(); + var source = Source.EMPTY; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + source = Source.readFrom(planIn); + } TransportRequest result = new TransportRequest( sessionId, shardId, @@ -129,7 +138,8 @@ static TransportRequest readFrom(StreamInput in, BlockFactory blockFactory) thro inputPage, inputPage, extractFields, - matchField + matchField, + source ); result.setParentTask(parentTaskId); return result; @@ -145,6 +155,9 @@ public void writeTo(StreamOutput out) throws IOException { PlanStreamOutput planOut = new PlanStreamOutput(out, null); planOut.writeNamedWriteableCollection(extractFields); out.writeString(matchField); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + source.writeTo(planOut); + } } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java index 816388193c5f6..c1269009c6a41 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.indices.IndicesExpressionGrouper; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; @@ -40,13 +41,13 @@ public class PlanExecutor { private final Verifier verifier; private final PlanningMetricsManager planningMetricsManager; - public PlanExecutor(IndexResolver indexResolver, MeterRegistry meterRegistry) { + public PlanExecutor(IndexResolver indexResolver, MeterRegistry meterRegistry, XPackLicenseState licenseState) { this.indexResolver = indexResolver; this.preAnalyzer = new PreAnalyzer(); this.functionRegistry = new EsqlFunctionRegistry(); this.mapper = new Mapper(); this.metrics = new Metrics(functionRegistry); - this.verifier = new Verifier(metrics); + this.verifier = new Verifier(metrics, licenseState); this.planningMetricsManager = new PlanningMetricsManager(meterRegistry); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/ExpressionWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/ExpressionWritables.java new file mode 100644 index 0000000000000..7e2de0094c2ab --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/ExpressionWritables.java @@ -0,0 +1,213 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.xpack.esql.core.expression.ExpressionCoreWritables; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; +import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; +import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; +import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; +import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateWritables; +import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextWritables; +import org.elasticsearch.xpack.esql.expression.function.scalar.ScalarFunctionWritables; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromBase64; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBase64; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDateNanos; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatetime; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDegrees; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoShape; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToIP; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToRadians; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToUnsignedLong; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToVersion; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Abs; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Acos; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Asin; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cbrt; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Ceil; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cos; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Exp; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Floor; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log10; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Signum; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sin; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sinh; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sqrt; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tan; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tanh; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvFunctionWritables; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialDisjoint; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialWithin; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StDistance; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StY; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.ByteLength; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Space; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Trim; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; +import org.elasticsearch.xpack.esql.expression.function.scalar.util.Delay; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Neg; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Sub; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; + +import java.util.ArrayList; +import java.util.List; + +public class ExpressionWritables { + + public static List getNamedWriteables() { + List entries = new ArrayList<>(); + + entries.addAll(allExpressions()); + entries.addAll(aggregates()); + entries.addAll(scalars()); + entries.addAll(spatials()); + entries.addAll(arithmetics()); + entries.addAll(binaryComparisons()); + entries.addAll(fullText()); + entries.addAll(unaryScalars()); + return entries; + } + + public static List attributes() { + List entries = new ArrayList<>(); + entries.addAll(ExpressionCoreWritables.attributes()); + entries.add(UnsupportedAttribute.ENTRY); + return entries; + } + + public static List namedExpressions() { + List entries = new ArrayList<>(); + entries.addAll(ExpressionCoreWritables.namedExpressions()); + entries.add(UnsupportedAttribute.NAMED_EXPRESSION_ENTRY); + return entries; + } + + public static List expressions() { + List entries = new ArrayList<>(); + entries.addAll(ExpressionCoreWritables.expressions()); + entries.add(UnsupportedAttribute.EXPRESSION_ENTRY); + entries.add(Order.ENTRY); + return entries; + } + + public static List allExpressions() { + List entries = new ArrayList<>(); + entries.addAll(expressions()); + entries.addAll(namedExpressions()); + entries.addAll(attributes()); + return entries; + } + + public static List aggregates() { + return AggregateWritables.getNamedWriteables(); + } + + public static List scalars() { + return ScalarFunctionWritables.getNamedWriteables(); + } + + public static List unaryScalars() { + List entries = new ArrayList<>(); + entries.add(Abs.ENTRY); + entries.add(Acos.ENTRY); + entries.add(Asin.ENTRY); + entries.add(Atan.ENTRY); + entries.add(ByteLength.ENTRY); + entries.add(Cbrt.ENTRY); + entries.add(Ceil.ENTRY); + entries.add(Cos.ENTRY); + entries.add(Cosh.ENTRY); + entries.add(Exp.ENTRY); + entries.add(Floor.ENTRY); + entries.add(FromBase64.ENTRY); + entries.add(IsNotNull.ENTRY); + entries.add(IsNull.ENTRY); + entries.add(Length.ENTRY); + entries.add(Log10.ENTRY); + entries.add(LTrim.ENTRY); + entries.add(Neg.ENTRY); + entries.add(Not.ENTRY); + entries.add(RLike.ENTRY); + entries.add(RTrim.ENTRY); + entries.add(Signum.ENTRY); + entries.add(Sin.ENTRY); + entries.add(Sinh.ENTRY); + entries.add(Space.ENTRY); + entries.add(Sqrt.ENTRY); + entries.add(StX.ENTRY); + entries.add(StY.ENTRY); + entries.add(Tan.ENTRY); + entries.add(Tanh.ENTRY); + entries.add(ToBase64.ENTRY); + entries.add(ToBoolean.ENTRY); + entries.add(ToCartesianPoint.ENTRY); + entries.add(ToDatetime.ENTRY); + entries.add(ToDateNanos.ENTRY); + entries.add(ToDegrees.ENTRY); + entries.add(ToDouble.ENTRY); + entries.add(ToGeoShape.ENTRY); + entries.add(ToCartesianShape.ENTRY); + entries.add(ToGeoPoint.ENTRY); + entries.add(ToIP.ENTRY); + entries.add(ToInteger.ENTRY); + entries.add(ToLong.ENTRY); + entries.add(ToRadians.ENTRY); + entries.add(ToString.ENTRY); + entries.add(ToUnsignedLong.ENTRY); + entries.add(ToVersion.ENTRY); + entries.add(Trim.ENTRY); + entries.add(WildcardLike.ENTRY); + entries.add(Delay.ENTRY); + // mv functions + entries.addAll(MvFunctionWritables.getNamedWriteables()); + return entries; + } + + private static List spatials() { + return List.of(SpatialContains.ENTRY, SpatialDisjoint.ENTRY, SpatialIntersects.ENTRY, SpatialWithin.ENTRY, StDistance.ENTRY); + } + + private static List arithmetics() { + return List.of(Add.ENTRY, Div.ENTRY, Mod.ENTRY, Mul.ENTRY, Sub.ENTRY); + } + + private static List binaryComparisons() { + return List.of(Equals.ENTRY, GreaterThan.ENTRY, GreaterThanOrEqual.ENTRY, LessThan.ENTRY, LessThanOrEqual.ENTRY, NotEquals.ENTRY); + } + + private static List fullText() { + return FullTextWritables.getNamedWriteables(); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index ca02441d2e1ad..ea1669ccc7a4f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; import org.elasticsearch.xpack.esql.expression.function.aggregate.Rate; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; +import org.elasticsearch.xpack.esql.expression.function.aggregate.StdDev; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.aggregate.Top; import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; @@ -276,6 +277,7 @@ private static FunctionDefinition[][] functions() { def(MedianAbsoluteDeviation.class, uni(MedianAbsoluteDeviation::new), "median_absolute_deviation"), def(Min.class, uni(Min::new), "min"), def(Percentile.class, bi(Percentile::new), "percentile"), + def(StdDev.class, uni(StdDev::new), "std_dev"), def(Sum.class, uni(Sum::new), "sum"), def(Top.class, tri(Top::new), "top"), def(Values.class, uni(Values::new), "values"), @@ -501,7 +503,9 @@ private static DataType getTargetType(String[] names) { types.add(type); } } + return types.stream() + .filter(DATA_TYPE_CASTING_PRIORITY::containsKey) .min((dt1, dt2) -> DATA_TYPE_CASTING_PRIORITY.get(dt1).compareTo(DATA_TYPE_CASTING_PRIORITY.get(dt2))) .orElse(UNSUPPORTED); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java index f7a74cc2ae93f..87efccfc90ab3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -30,27 +29,6 @@ * A type of {@code Function} that takes multiple values and extracts a single value out of them. For example, {@code AVG()}. */ public abstract class AggregateFunction extends Function { - public static List getNamedWriteables() { - return List.of( - Avg.ENTRY, - Count.ENTRY, - CountDistinct.ENTRY, - Max.ENTRY, - Median.ENTRY, - MedianAbsoluteDeviation.ENTRY, - Min.ENTRY, - Percentile.ENTRY, - Rate.ENTRY, - SpatialCentroid.ENTRY, - Sum.ENTRY, - Top.ENTRY, - Values.ENTRY, - // internal functions - ToPartial.ENTRY, - FromPartial.ENTRY, - WeightedAvg.ENTRY - ); - } private final Expression field; private final List parameters; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java new file mode 100644 index 0000000000000..d74b5c8b386b8 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; + +import java.util.List; + +public class AggregateWritables { + + public static List getNamedWriteables() { + return List.of( + Avg.ENTRY, + Count.ENTRY, + CountDistinct.ENTRY, + Max.ENTRY, + Median.ENTRY, + MedianAbsoluteDeviation.ENTRY, + Min.ENTRY, + Percentile.ENTRY, + Rate.ENTRY, + SpatialCentroid.ENTRY, + StdDev.ENTRY, + Sum.ENTRY, + Top.ENTRY, + Values.ENTRY, + // internal functions + ToPartial.ENTRY, + FromPartial.ENTRY, + WeightedAvg.ENTRY + ); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java index fa8a9e7d8c837..3a0d616d407a3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java @@ -58,7 +58,9 @@ public class Count extends AggregateFunction implements ToAggregator, SurrogateE ), @Example( description = "To count the same stream of data based on two different expressions " - + "use the pattern `COUNT( OR NULL)`", + + "use the pattern `COUNT( OR NULL)`. This builds on the three-valued logic " + + "({wikipedia}/Three-valued_logic[3VL]) of the language: `TRUE OR NULL` is `TRUE`, but `FALSE OR NULL` is `NULL`, " + + "plus the way COUNT handles `NULL`s: `COUNT(TRUE)` and `COUNT(FALSE)` are both 1, but `COUNT(NULL)` is 0.", file = "stats", tag = "count-or-null" ) } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java index ac2d4ff3cbc43..2165c3c7ad1a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java @@ -55,7 +55,7 @@ public class Max extends AggregateFunction implements ToAggregator, SurrogateExp ); @FunctionInfo( - returnType = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "long", "version" }, + returnType = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "long", "version" }, description = "The maximum value of a field.", isAggregation = true, examples = { @@ -72,7 +72,7 @@ public Max( Source source, @Param( name = "field", - type = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "text", "long", "version" } + type = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "text", "long", "version" } ) Expression field ) { this(source, field, Literal.TRUE); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java index a5fc8196847b7..7d67868dd4134 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java @@ -55,7 +55,7 @@ public class Min extends AggregateFunction implements ToAggregator, SurrogateExp ); @FunctionInfo( - returnType = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "long", "version" }, + returnType = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "long", "version" }, description = "The minimum value of a field.", isAggregation = true, examples = { @@ -72,7 +72,7 @@ public Min( Source source, @Param( name = "field", - type = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "text", "long", "version" } + type = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "text", "long", "version" } ) Expression field ) { this(source, field, Literal.TRUE); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDev.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDev.java new file mode 100644 index 0000000000000..189b6a81912cb --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDev.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.StdDevDoubleAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.StdDevIntAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.StdDevLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.planner.ToAggregator; + +import java.io.IOException; +import java.util.List; + +import static java.util.Collections.emptyList; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; + +public class StdDev extends AggregateFunction implements ToAggregator { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "StdDev", StdDev::new); + + @FunctionInfo( + returnType = "double", + description = "The standard deviation of a numeric field.", + isAggregation = true, + examples = { + @Example(file = "stats", tag = "stdev"), + @Example( + description = "The expression can use inline functions. For example, to calculate the standard " + + "deviation of each employee's maximum salary changes, first use `MV_MAX` on each row, " + + "and then use `STD_DEV` on the result", + file = "stats", + tag = "docsStatsStdDevNestedExpression" + ) } + ) + public StdDev(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { + this(source, field, Literal.TRUE); + } + + public StdDev(Source source, Expression field, Expression filter) { + super(source, field, filter, emptyList()); + } + + private StdDev(StreamInput in) throws IOException { + super(in); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + public DataType dataType() { + return DataType.DOUBLE; + } + + @Override + protected Expression.TypeResolution resolveType() { + return isType( + field(), + dt -> dt.isNumeric() && dt != DataType.UNSIGNED_LONG, + sourceText(), + DEFAULT, + "numeric except unsigned_long or counter types" + ); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, StdDev::new, field(), filter()); + } + + @Override + public StdDev replaceChildren(List newChildren) { + return new StdDev(source(), newChildren.get(0), newChildren.get(1)); + } + + public StdDev withFilter(Expression filter) { + return new StdDev(source(), field(), filter); + } + + @Override + public final AggregatorFunctionSupplier supplier(List inputChannels) { + DataType type = field().dataType(); + if (type == DataType.LONG) { + return new StdDevLongAggregatorFunctionSupplier(inputChannels); + } + if (type == DataType.INTEGER) { + return new StdDevIntAggregatorFunctionSupplier(inputChannels); + } + if (type == DataType.DOUBLE) { + return new StdDevDoubleAggregatorFunctionSupplier(inputChannels); + } + throw EsqlIllegalArgumentException.illegalDataType(type); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java index 111eab051719b..e7df990b20422 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java @@ -52,7 +52,7 @@ public class Values extends AggregateFunction implements ToAggregator { ); @FunctionInfo( - returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "version" }, + returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "version" }, preview = true, description = "Returns all values in a group as a multivalued field. The order of the returned values isn't guaranteed. " + "If you need the values returned in order use <>.", @@ -70,7 +70,10 @@ public class Values extends AggregateFunction implements ToAggregator { ) public Values( Source source, - @Param(name = "field", type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }) Expression v + @Param( + name = "field", + type = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" } + ) Expression v ) { this(source, v, Literal.TRUE); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java index 4106df331d101..9f08401a42dd1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java @@ -94,7 +94,7 @@ * {@link org.elasticsearch.common.io.stream.NamedWriteable#writeTo}, * and a deserializing constructor. Then add an {@link org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry} * constant and add that constant to the list in - * {@link org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction#getNamedWriteables}. + * {@link org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateWritables#getNamedWriteables}. * *
  • * Do the same with {@link org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry}. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java index 1a3667de992cd..9addf08e1b5f9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; @@ -29,9 +28,6 @@ * {@link org.elasticsearch.xpack.esql.optimizer.LocalPhysicalPlanOptimizer} to rewrite them into Lucene queries. */ public abstract class FullTextFunction extends Function { - public static List getNamedWriteables() { - return List.of(QueryString.ENTRY, Match.ENTRY); - } private final Expression query; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java new file mode 100644 index 0000000000000..d59c736783172 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MatchQueryPredicate; +import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MultiMatchQueryPredicate; + +import java.util.List; + +public class FullTextWritables { + + public static List getNamedWriteables() { + return List.of(MatchQueryPredicate.ENTRY, MultiMatchQueryPredicate.ENTRY, QueryString.ENTRY, Match.ENTRY); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/GroupingWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/GroupingWritables.java new file mode 100644 index 0000000000000..89b9036e97e3a --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/GroupingWritables.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.grouping; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; + +import java.util.List; + +public class GroupingWritables { + + public static List getNamedWriteables() { + return List.of(Bucket.ENTRY, Categorize.ENTRY); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java index 65985f234ac92..404ce7e3900c9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java @@ -7,57 +7,11 @@ package org.elasticsearch.xpack.esql.expression.function.scalar; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.FullTextPredicate; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; -import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; -import org.elasticsearch.xpack.esql.expression.function.grouping.Categorize; -import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; -import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; -import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateDiff; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateExtract; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateFormat; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.Now; -import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; -import org.elasticsearch.xpack.esql.expression.function.scalar.ip.IpPrefix; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan2; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.E; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Hypot; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pi; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tau; -import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; -import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.BinarySpatialFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.BitLength; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.EndsWith; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Locate; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Repeat; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Reverse; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Right; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Split; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.StartsWith; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Substring; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToLower; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToUpper; -import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.InsensitiveEquals; -import java.util.ArrayList; import java.util.List; /** @@ -71,56 +25,6 @@ *

    */ public abstract class EsqlScalarFunction extends ScalarFunction implements EvaluatorMapper { - public static List getNamedWriteables() { - List entries = new ArrayList<>(); - entries.add(And.ENTRY); - entries.add(Atan2.ENTRY); - entries.add(BitLength.ENTRY); - entries.add(Bucket.ENTRY); - entries.add(Case.ENTRY); - entries.add(Categorize.ENTRY); - entries.add(CIDRMatch.ENTRY); - entries.add(Coalesce.ENTRY); - entries.add(Concat.ENTRY); - entries.add(E.ENTRY); - entries.add(EndsWith.ENTRY); - entries.add(Greatest.ENTRY); - entries.add(Hypot.ENTRY); - entries.add(In.ENTRY); - entries.add(InsensitiveEquals.ENTRY); - entries.add(DateExtract.ENTRY); - entries.add(DateDiff.ENTRY); - entries.add(DateFormat.ENTRY); - entries.add(DateParse.ENTRY); - entries.add(DateTrunc.ENTRY); - entries.add(IpPrefix.ENTRY); - entries.add(Least.ENTRY); - entries.add(Left.ENTRY); - entries.add(Locate.ENTRY); - entries.add(Log.ENTRY); - entries.add(Now.ENTRY); - entries.add(Or.ENTRY); - entries.add(Pi.ENTRY); - entries.add(Pow.ENTRY); - entries.add(Right.ENTRY); - entries.add(Repeat.ENTRY); - entries.add(Replace.ENTRY); - entries.add(Reverse.ENTRY); - entries.add(Round.ENTRY); - entries.add(Split.ENTRY); - entries.add(Substring.ENTRY); - entries.add(StartsWith.ENTRY); - entries.add(Tau.ENTRY); - entries.add(ToLower.ENTRY); - entries.add(ToUpper.ENTRY); - entries.addAll(BinarySpatialFunction.getNamedWriteables()); - entries.addAll(EsqlArithmeticOperation.getNamedWriteables()); - entries.addAll(EsqlBinaryComparison.getNamedWriteables()); - entries.addAll(FullTextPredicate.getNamedWriteables()); - entries.addAll(UnaryScalarFunction.getNamedWriteables()); - return entries; - } - protected EsqlScalarFunction(Source source) { super(source); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java new file mode 100644 index 0000000000000..192ca6c43e57d --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.expression.function.grouping.GroupingWritables; +import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; +import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; +import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateDiff; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateExtract; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateFormat; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.Now; +import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; +import org.elasticsearch.xpack.esql.expression.function.scalar.ip.IpPrefix; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan2; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.E; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Hypot; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pi; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tau; +import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.BitLength; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.EndsWith; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Locate; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Repeat; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Reverse; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Right; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Split; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.StartsWith; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Substring; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToLower; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToUpper; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.InsensitiveEquals; + +import java.util.ArrayList; +import java.util.List; + +public class ScalarFunctionWritables { + public static List getNamedWriteables() { + List entries = new ArrayList<>(); + entries.add(And.ENTRY); + entries.add(Atan2.ENTRY); + entries.add(BitLength.ENTRY); + entries.add(Case.ENTRY); + entries.add(CIDRMatch.ENTRY); + entries.add(Coalesce.ENTRY); + entries.add(Concat.ENTRY); + entries.add(E.ENTRY); + entries.add(EndsWith.ENTRY); + entries.add(Greatest.ENTRY); + entries.add(Hypot.ENTRY); + entries.add(In.ENTRY); + entries.add(InsensitiveEquals.ENTRY); + entries.add(DateExtract.ENTRY); + entries.add(DateDiff.ENTRY); + entries.add(DateFormat.ENTRY); + entries.add(DateParse.ENTRY); + entries.add(DateTrunc.ENTRY); + entries.add(IpPrefix.ENTRY); + entries.add(Least.ENTRY); + entries.add(Left.ENTRY); + entries.add(Locate.ENTRY); + entries.add(Log.ENTRY); + entries.add(Now.ENTRY); + entries.add(Or.ENTRY); + entries.add(Pi.ENTRY); + entries.add(Pow.ENTRY); + entries.add(Right.ENTRY); + entries.add(Repeat.ENTRY); + entries.add(Replace.ENTRY); + entries.add(Reverse.ENTRY); + entries.add(Round.ENTRY); + entries.add(Split.ENTRY); + entries.add(Substring.ENTRY); + entries.add(StartsWith.ENTRY); + entries.add(Tau.ENTRY); + entries.add(ToLower.ENTRY); + entries.add(ToUpper.ENTRY); + + entries.addAll(GroupingWritables.getNamedWriteables()); + return entries; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java index 610fe1c5ea000..d2af110a5203f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java @@ -7,130 +7,20 @@ package org.elasticsearch.xpack.esql.expression.function.scalar; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromBase64; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBase64; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDateNanos; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatetime; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDegrees; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoShape; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToIP; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToRadians; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToUnsignedLong; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToVersion; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Abs; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Acos; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Asin; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cbrt; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Ceil; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cos; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Exp; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Floor; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log10; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Signum; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sin; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sinh; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sqrt; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tan; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tanh; -import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.AbstractMultivalueFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX; -import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StY; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.ByteLength; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Space; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Trim; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; -import org.elasticsearch.xpack.esql.expression.function.scalar.util.Delay; -import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Neg; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; -import java.util.List; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNumeric; public abstract class UnaryScalarFunction extends EsqlScalarFunction { - public static List getNamedWriteables() { - List entries = new ArrayList<>(); - entries.add(Abs.ENTRY); - entries.add(Acos.ENTRY); - entries.add(Asin.ENTRY); - entries.add(Atan.ENTRY); - entries.add(ByteLength.ENTRY); - entries.add(Cbrt.ENTRY); - entries.add(Ceil.ENTRY); - entries.add(Cos.ENTRY); - entries.add(Cosh.ENTRY); - entries.add(Exp.ENTRY); - entries.add(Floor.ENTRY); - entries.add(FromBase64.ENTRY); - entries.add(IsNotNull.ENTRY); - entries.add(IsNull.ENTRY); - entries.add(Length.ENTRY); - entries.add(Log10.ENTRY); - entries.add(LTrim.ENTRY); - entries.add(Neg.ENTRY); - entries.add(Not.ENTRY); - entries.add(RLike.ENTRY); - entries.add(RTrim.ENTRY); - entries.add(Signum.ENTRY); - entries.add(Sin.ENTRY); - entries.add(Sinh.ENTRY); - entries.add(Space.ENTRY); - entries.add(Sqrt.ENTRY); - entries.add(StX.ENTRY); - entries.add(StY.ENTRY); - entries.add(Tan.ENTRY); - entries.add(Tanh.ENTRY); - entries.add(ToBase64.ENTRY); - entries.add(ToBoolean.ENTRY); - entries.add(ToCartesianPoint.ENTRY); - entries.add(ToDatetime.ENTRY); - entries.add(ToDateNanos.ENTRY); - entries.add(ToDegrees.ENTRY); - entries.add(ToDouble.ENTRY); - entries.add(ToGeoShape.ENTRY); - entries.add(ToCartesianShape.ENTRY); - entries.add(ToGeoPoint.ENTRY); - entries.add(ToIP.ENTRY); - entries.add(ToInteger.ENTRY); - entries.add(ToLong.ENTRY); - entries.add(ToRadians.ENTRY); - entries.add(ToString.ENTRY); - entries.add(ToUnsignedLong.ENTRY); - entries.add(ToVersion.ENTRY); - entries.add(Trim.ENTRY); - entries.add(WildcardLike.ENTRY); - entries.add(Delay.ENTRY); - entries.addAll(AbstractMultivalueFunction.getNamedWriteables()); - return entries; - } - protected final Expression field; public UnaryScalarFunction(Source source, Expression field) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java index 6e38d72500840..a35b67d7ac3fd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java @@ -62,7 +62,7 @@ public interface DateTruncFactoryProvider { protected static final ZoneId DEFAULT_TZ = ZoneOffset.UTC; @FunctionInfo( - returnType = "date", + returnType = { "date", "date_nanos" }, description = "Rounds down a date to the closest interval.", examples = { @Example(file = "date", tag = "docsDateTrunc"), @@ -83,7 +83,7 @@ public DateTrunc( type = { "date_period", "time_duration" }, description = "Interval; expressed using the timespan literal syntax." ) Expression interval, - @Param(name = "date", type = { "date" }, description = "Date expression") Expression field + @Param(name = "date", type = { "date", "date_nanos" }, description = "Date expression") Expression field ) { super(source, List.of(interval, field)); this.interval = interval; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java index 6a3b58728b192..a32761cfd9948 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.data.Block; @@ -22,7 +21,6 @@ import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import java.io.IOException; -import java.util.List; /** * Base class for functions that reduce multivalued fields into single valued fields. @@ -32,27 +30,6 @@ *

    */ public abstract class AbstractMultivalueFunction extends UnaryScalarFunction { - public static List getNamedWriteables() { - return List.of( - MvAppend.ENTRY, - MvAvg.ENTRY, - MvConcat.ENTRY, - MvCount.ENTRY, - MvDedupe.ENTRY, - MvFirst.ENTRY, - MvLast.ENTRY, - MvMax.ENTRY, - MvMedian.ENTRY, - MvMedianAbsoluteDeviation.ENTRY, - MvMin.ENTRY, - MvPercentile.ENTRY, - MvPSeriesWeightedSum.ENTRY, - MvSlice.ENTRY, - MvSort.ENTRY, - MvSum.ENTRY, - MvZip.ENTRY - ); - } protected AbstractMultivalueFunction(Source source, Expression field) { super(source, field); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFunctionWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFunctionWritables.java new file mode 100644 index 0000000000000..7f8fcd910ad6d --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFunctionWritables.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; + +import java.util.List; + +public class MvFunctionWritables { + public static List getNamedWriteables() { + return List.of( + MvAppend.ENTRY, + MvAvg.ENTRY, + MvConcat.ENTRY, + MvCount.ENTRY, + MvDedupe.ENTRY, + MvFirst.ENTRY, + MvLast.ENTRY, + MvMax.ENTRY, + MvMedian.ENTRY, + MvMedianAbsoluteDeviation.ENTRY, + MvMin.ENTRY, + MvPercentile.ENTRY, + MvPSeriesWeightedSum.ENTRY, + MvSlice.ENTRY, + MvSort.ENTRY, + MvSum.ENTRY, + MvZip.ENTRY + ); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java index 46538b77edc74..eccc7ee4672c9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java @@ -112,7 +112,7 @@ * {@link org.elasticsearch.common.io.stream.NamedWriteable#writeTo}, * and a deserializing constructor. Then add an {@link org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry} * constant and register it. To register it, look for a method like - * {@link org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction#getNamedWriteables()} + * {@link org.elasticsearch.xpack.esql.expression.function.scalar.ScalarFunctionWritables#getNamedWriteables()} * in your function's class hierarchy. Keep going up until you hit a function with that name. * Then add your new "ENTRY" constant to the list it returns. *
  • diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java index 8839244e6c601..4d08b0e9687ec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.geometry.Geometry; @@ -22,7 +21,6 @@ import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; import java.io.IOException; -import java.util.List; import java.util.Objects; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; @@ -40,9 +38,6 @@ * and of compatible CRS. For example geo_point and geo_shape can be compared, but not geo_point and cartesian_point. */ public abstract class BinarySpatialFunction extends BinaryScalarFunction implements SpatialEvaluatorFactory.SpatialSourceResolution { - public static List getNamedWriteables() { - return List.of(SpatialContains.ENTRY, SpatialDisjoint.ENTRY, SpatialIntersects.ENTRY, SpatialWithin.ENTRY, StDistance.ENTRY); - } private final SpatialTypeResolver spatialTypeResolver; private SpatialCrsType crsType; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java index 62201bcfa858d..74394d796855f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; @@ -21,7 +20,6 @@ import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import java.io.IOException; -import java.util.List; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; @@ -31,9 +29,6 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.commonType; public abstract class EsqlArithmeticOperation extends ArithmeticOperation implements EvaluatorMapper { - public static List getNamedWriteables() { - return List.of(Add.ENTRY, Div.ENTRY, Mod.ENTRY, Mul.ENTRY, Sub.ENTRY); - } /** * The only role of this enum is to fit the super constructor that expects a BinaryOperation which is diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java index db771a6354883..cbbf87fb6c4cb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.comparison; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -25,7 +24,6 @@ import java.io.IOException; import java.time.ZoneId; -import java.util.List; import java.util.Map; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; @@ -33,9 +31,6 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.commonType; public abstract class EsqlBinaryComparison extends BinaryComparison implements EvaluatorMapper { - public static List getNamedWriteables() { - return List.of(Equals.ENTRY, GreaterThan.ENTRY, GreaterThanOrEqual.ENTRY, LessThan.ENTRY, LessThanOrEqual.ENTRY, NotEquals.ENTRY); - } private final Map evaluatorMap; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThan.java index 56ade3982e0d8..3ae7bd93092ef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThan.java @@ -53,12 +53,12 @@ public LessThan( Source source, @Param( name = "lhs", - type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" }, + type = { "boolean", "date_nanos", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" }, description = "An expression." ) Expression left, @Param( name = "rhs", - type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" }, + type = { "boolean", "date_nanos", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" }, description = "An expression." ) Expression right ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index 1e1cc3b86a9d5..47e5b9acfbf9d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -29,6 +29,7 @@ import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.NameId; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.session.Configuration; @@ -160,7 +161,7 @@ public Block[] readCachedBlockArray() throws IOException { @Override public String sourceText() { - return configuration.query(); + return configuration == null ? Source.EMPTY.text() : configuration.query(); } static void throwOnNullOptionalRead(Class type) throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 5007b011092f0..a5f97cf961378 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -47,6 +47,7 @@ import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceLimitAndSortAsTopN; import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceOrderByExpressionWithEval; import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceRegexMatch; +import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceRowAsLocalRelation; import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceStatsFilteredAggWithEval; import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceTrivialTypeConversions; import org.elasticsearch.xpack.esql.optimizer.rules.logical.SetAsOptimized; @@ -192,6 +193,6 @@ protected static Batch operators() { } protected static Batch cleanup() { - return new Batch<>("Clean Up", new ReplaceLimitAndSortAsTopN()); + return new Batch<>("Clean Up", new ReplaceLimitAndSortAsTopN(), new ReplaceRowAsLocalRelation()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java index 153efa5b5c233..fb9d3f7e2f91e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java @@ -18,7 +18,7 @@ import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.Join; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; public final class PushDownAndCombineLimits extends OptimizerRules.OptimizerRule { @@ -63,7 +63,7 @@ public LogicalPlan rule(Limit limit) { } } } else if (limit.child() instanceof Join join) { - if (join.config().type() == JoinType.LEFT && join.right() instanceof LocalRelation) { + if (join.config().type() == JoinTypes.LEFT && join.right() instanceof LocalRelation) { // This is a hash join from something like a lookup. return join.replaceChildren(limit.replaceChild(join.left()), join.right()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRowAsLocalRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRowAsLocalRelation.java new file mode 100644 index 0000000000000..eebeb1dc14f48 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRowAsLocalRelation.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules.logical; + +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.Row; +import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; +import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; + +import java.util.ArrayList; +import java.util.List; + +public final class ReplaceRowAsLocalRelation extends OptimizerRules.OptimizerRule { + + @Override + protected LogicalPlan rule(Row row) { + var fields = row.fields(); + List values = new ArrayList<>(fields.size()); + fields.forEach(f -> values.add(f.child().fold())); + var blocks = BlockUtils.fromListRow(PlannerUtils.NON_BREAKING_BLOCK_FACTORY, values); + return new LocalRelation(row.source(), row.output(), LocalSupplier.of(blocks)); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java index 1c20f765c6d51..ea9cd76bcb9bc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java @@ -17,10 +17,12 @@ import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; import org.elasticsearch.xpack.esql.plan.physical.LeafExec; +import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.rule.Rule; import java.util.ArrayList; +import java.util.Collections; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; @@ -93,8 +95,16 @@ public PhysicalPlan apply(PhysicalPlan plan) { private static Set missingAttributes(PhysicalPlan p) { var missing = new LinkedHashSet(); - var input = p.inputSet(); + var inputSet = p.inputSet(); + // FIXME: the extractors should work on the right side as well + // skip the lookup join since the right side is always materialized and a projection + if (p instanceof LookupJoinExec join) { + // collect fields used in the join condition + return Collections.emptySet(); + } + + var input = inputSet; // collect field attributes used inside expressions p.forEachExpression(TypedAttribute.class, f -> { if (f instanceof FieldAttribute || f instanceof MetadataAttribute) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java index f01e7c4b1f3a6..9f574ee8005b2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.expression.predicate.Range; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; @@ -253,8 +252,6 @@ static boolean canPushToSource(Expression exp, LucenePushdownPredicates lucenePu && Expressions.foldable(cidrMatch.matches()); } else if (exp instanceof SpatialRelatesFunction spatial) { return canPushSpatialFunctionToSource(spatial, lucenePushdownPredicates); - } else if (exp instanceof StringQueryPredicate) { - return true; } else if (exp instanceof QueryString) { return true; } else if (exp instanceof Match mf) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 8f9c5956dddd5..c83fdbe8847a9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -23,7 +23,11 @@ null null null null -':' +null +null +null +null +null '|' null null @@ -33,6 +37,7 @@ null 'asc' '=' '::' +':' ',' 'desc' '.' @@ -113,6 +118,10 @@ null null null null +'USING' +null +null +null null null null @@ -141,11 +150,15 @@ WHERE DEV_INLINESTATS DEV_LOOKUP DEV_METRICS +DEV_JOIN +DEV_JOIN_FULL +DEV_JOIN_LEFT +DEV_JOIN_RIGHT +DEV_JOIN_LOOKUP UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS -COLON PIPE QUOTED_STRING INTEGER_LITERAL @@ -155,6 +168,7 @@ AND ASC ASSIGN CAST_OP +COLON COMMA DESC DOT @@ -235,6 +249,10 @@ LOOKUP_WS LOOKUP_FIELD_LINE_COMMENT LOOKUP_FIELD_MULTILINE_COMMENT LOOKUP_FIELD_WS +USING +JOIN_LINE_COMMENT +JOIN_MULTILINE_COMMENT +JOIN_WS METRICS_LINE_COMMENT METRICS_MULTILINE_COMMENT METRICS_WS @@ -262,11 +280,15 @@ WHERE DEV_INLINESTATS DEV_LOOKUP DEV_METRICS +DEV_JOIN +DEV_JOIN_FULL +DEV_JOIN_LEFT +DEV_JOIN_RIGHT +DEV_JOIN_LOOKUP UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS -COLON PIPE DIGIT LETTER @@ -286,6 +308,7 @@ AND ASC ASSIGN CAST_OP +COLON COMMA DESC DOT @@ -316,7 +339,6 @@ MINUS ASTERISK SLASH PERCENT -EXPRESSION_COLON NESTED_WHERE NAMED_OR_POSITIONAL_PARAM OPENING_BRACKET @@ -427,6 +449,16 @@ LOOKUP_FIELD_ID_PATTERN LOOKUP_FIELD_LINE_COMMENT LOOKUP_FIELD_MULTILINE_COMMENT LOOKUP_FIELD_WS +JOIN_PIPE +JOIN_JOIN +JOIN_AS +JOIN_ON +USING +JOIN_UNQUOTED_IDENTIFER +JOIN_QUOTED_IDENTIFIER +JOIN_LINE_COMMENT +JOIN_MULTILINE_COMMENT +JOIN_WS METRICS_PIPE METRICS_UNQUOTED_SOURCE METRICS_QUOTED_SOURCE @@ -461,8 +493,9 @@ SHOW_MODE SETTING_MODE LOOKUP_MODE LOOKUP_FIELD_MODE +JOIN_MODE METRICS_MODE CLOSING_METRICS_MODE atn: -[4, 0, 119, 1484, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 4, 19, 580, 8, 19, 11, 19, 12, 19, 581, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 590, 8, 20, 10, 20, 12, 20, 593, 9, 20, 1, 20, 3, 20, 596, 8, 20, 1, 20, 3, 20, 599, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 608, 8, 21, 10, 21, 12, 21, 611, 9, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 4, 22, 619, 8, 22, 11, 22, 12, 22, 620, 1, 22, 1, 22, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 3, 29, 642, 8, 29, 1, 29, 4, 29, 645, 8, 29, 11, 29, 12, 29, 646, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 3, 32, 656, 8, 32, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 3, 34, 663, 8, 34, 1, 35, 1, 35, 1, 35, 5, 35, 668, 8, 35, 10, 35, 12, 35, 671, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 679, 8, 35, 10, 35, 12, 35, 682, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 3, 35, 689, 8, 35, 1, 35, 3, 35, 692, 8, 35, 3, 35, 694, 8, 35, 1, 36, 4, 36, 697, 8, 36, 11, 36, 12, 36, 698, 1, 37, 4, 37, 702, 8, 37, 11, 37, 12, 37, 703, 1, 37, 1, 37, 5, 37, 708, 8, 37, 10, 37, 12, 37, 711, 9, 37, 1, 37, 1, 37, 4, 37, 715, 8, 37, 11, 37, 12, 37, 716, 1, 37, 4, 37, 720, 8, 37, 11, 37, 12, 37, 721, 1, 37, 1, 37, 5, 37, 726, 8, 37, 10, 37, 12, 37, 729, 9, 37, 3, 37, 731, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 4, 37, 737, 8, 37, 11, 37, 12, 37, 738, 1, 37, 1, 37, 3, 37, 743, 8, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 3, 75, 874, 8, 75, 1, 75, 5, 75, 877, 8, 75, 10, 75, 12, 75, 880, 9, 75, 1, 75, 1, 75, 4, 75, 884, 8, 75, 11, 75, 12, 75, 885, 3, 75, 888, 8, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 5, 78, 902, 8, 78, 10, 78, 12, 78, 905, 9, 78, 1, 78, 1, 78, 3, 78, 909, 8, 78, 1, 78, 4, 78, 912, 8, 78, 11, 78, 12, 78, 913, 3, 78, 916, 8, 78, 1, 79, 1, 79, 4, 79, 920, 8, 79, 11, 79, 12, 79, 921, 1, 79, 1, 79, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 3, 96, 999, 8, 96, 1, 97, 4, 97, 1002, 8, 97, 11, 97, 12, 97, 1003, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 3, 108, 1053, 8, 108, 1, 109, 1, 109, 3, 109, 1057, 8, 109, 1, 109, 5, 109, 1060, 8, 109, 10, 109, 12, 109, 1063, 9, 109, 1, 109, 1, 109, 3, 109, 1067, 8, 109, 1, 109, 4, 109, 1070, 8, 109, 11, 109, 12, 109, 1071, 3, 109, 1074, 8, 109, 1, 110, 1, 110, 4, 110, 1078, 8, 110, 11, 110, 12, 110, 1079, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 130, 4, 130, 1165, 8, 130, 11, 130, 12, 130, 1166, 1, 130, 1, 130, 3, 130, 1171, 8, 130, 1, 130, 4, 130, 1174, 8, 130, 11, 130, 12, 130, 1175, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 4, 163, 1321, 8, 163, 11, 163, 12, 163, 1322, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 198, 2, 609, 680, 0, 199, 15, 1, 17, 2, 19, 3, 21, 4, 23, 5, 25, 6, 27, 7, 29, 8, 31, 9, 33, 10, 35, 11, 37, 12, 39, 13, 41, 14, 43, 15, 45, 16, 47, 17, 49, 18, 51, 19, 53, 20, 55, 21, 57, 22, 59, 23, 61, 24, 63, 25, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 0, 85, 26, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 0, 163, 0, 165, 64, 167, 65, 169, 66, 171, 67, 173, 0, 175, 68, 177, 69, 179, 70, 181, 71, 183, 0, 185, 0, 187, 72, 189, 73, 191, 74, 193, 0, 195, 0, 197, 0, 199, 0, 201, 0, 203, 0, 205, 75, 207, 0, 209, 76, 211, 0, 213, 0, 215, 77, 217, 78, 219, 79, 221, 0, 223, 0, 225, 0, 227, 0, 229, 0, 231, 0, 233, 0, 235, 80, 237, 81, 239, 82, 241, 83, 243, 0, 245, 0, 247, 0, 249, 0, 251, 0, 253, 0, 255, 84, 257, 0, 259, 85, 261, 86, 263, 87, 265, 0, 267, 0, 269, 88, 271, 89, 273, 0, 275, 90, 277, 0, 279, 91, 281, 92, 283, 93, 285, 0, 287, 0, 289, 0, 291, 0, 293, 0, 295, 0, 297, 0, 299, 0, 301, 0, 303, 94, 305, 95, 307, 96, 309, 0, 311, 0, 313, 0, 315, 0, 317, 0, 319, 0, 321, 97, 323, 98, 325, 99, 327, 0, 329, 100, 331, 101, 333, 102, 335, 103, 337, 0, 339, 0, 341, 104, 343, 105, 345, 106, 347, 107, 349, 0, 351, 0, 353, 0, 355, 0, 357, 0, 359, 0, 361, 0, 363, 108, 365, 109, 367, 110, 369, 0, 371, 0, 373, 0, 375, 0, 377, 111, 379, 112, 381, 113, 383, 0, 385, 0, 387, 0, 389, 114, 391, 115, 393, 116, 395, 0, 397, 0, 399, 117, 401, 118, 403, 119, 405, 0, 407, 0, 409, 0, 411, 0, 15, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 35, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1512, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 1, 63, 1, 0, 0, 0, 1, 85, 1, 0, 0, 0, 1, 87, 1, 0, 0, 0, 1, 89, 1, 0, 0, 0, 1, 91, 1, 0, 0, 0, 1, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 1, 97, 1, 0, 0, 0, 1, 99, 1, 0, 0, 0, 1, 101, 1, 0, 0, 0, 1, 103, 1, 0, 0, 0, 1, 105, 1, 0, 0, 0, 1, 107, 1, 0, 0, 0, 1, 109, 1, 0, 0, 0, 1, 111, 1, 0, 0, 0, 1, 113, 1, 0, 0, 0, 1, 115, 1, 0, 0, 0, 1, 117, 1, 0, 0, 0, 1, 119, 1, 0, 0, 0, 1, 121, 1, 0, 0, 0, 1, 123, 1, 0, 0, 0, 1, 125, 1, 0, 0, 0, 1, 127, 1, 0, 0, 0, 1, 129, 1, 0, 0, 0, 1, 131, 1, 0, 0, 0, 1, 133, 1, 0, 0, 0, 1, 135, 1, 0, 0, 0, 1, 137, 1, 0, 0, 0, 1, 139, 1, 0, 0, 0, 1, 141, 1, 0, 0, 0, 1, 143, 1, 0, 0, 0, 1, 145, 1, 0, 0, 0, 1, 147, 1, 0, 0, 0, 1, 149, 1, 0, 0, 0, 1, 151, 1, 0, 0, 0, 1, 153, 1, 0, 0, 0, 1, 155, 1, 0, 0, 0, 1, 157, 1, 0, 0, 0, 1, 159, 1, 0, 0, 0, 1, 161, 1, 0, 0, 0, 1, 163, 1, 0, 0, 0, 1, 165, 1, 0, 0, 0, 1, 167, 1, 0, 0, 0, 1, 169, 1, 0, 0, 0, 1, 171, 1, 0, 0, 0, 1, 175, 1, 0, 0, 0, 1, 177, 1, 0, 0, 0, 1, 179, 1, 0, 0, 0, 1, 181, 1, 0, 0, 0, 2, 183, 1, 0, 0, 0, 2, 185, 1, 0, 0, 0, 2, 187, 1, 0, 0, 0, 2, 189, 1, 0, 0, 0, 2, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 3, 197, 1, 0, 0, 0, 3, 199, 1, 0, 0, 0, 3, 201, 1, 0, 0, 0, 3, 203, 1, 0, 0, 0, 3, 205, 1, 0, 0, 0, 3, 209, 1, 0, 0, 0, 3, 211, 1, 0, 0, 0, 3, 213, 1, 0, 0, 0, 3, 215, 1, 0, 0, 0, 3, 217, 1, 0, 0, 0, 3, 219, 1, 0, 0, 0, 4, 221, 1, 0, 0, 0, 4, 223, 1, 0, 0, 0, 4, 225, 1, 0, 0, 0, 4, 227, 1, 0, 0, 0, 4, 229, 1, 0, 0, 0, 4, 235, 1, 0, 0, 0, 4, 237, 1, 0, 0, 0, 4, 239, 1, 0, 0, 0, 4, 241, 1, 0, 0, 0, 5, 243, 1, 0, 0, 0, 5, 245, 1, 0, 0, 0, 5, 247, 1, 0, 0, 0, 5, 249, 1, 0, 0, 0, 5, 251, 1, 0, 0, 0, 5, 253, 1, 0, 0, 0, 5, 255, 1, 0, 0, 0, 5, 257, 1, 0, 0, 0, 5, 259, 1, 0, 0, 0, 5, 261, 1, 0, 0, 0, 5, 263, 1, 0, 0, 0, 6, 265, 1, 0, 0, 0, 6, 267, 1, 0, 0, 0, 6, 269, 1, 0, 0, 0, 6, 271, 1, 0, 0, 0, 6, 275, 1, 0, 0, 0, 6, 277, 1, 0, 0, 0, 6, 279, 1, 0, 0, 0, 6, 281, 1, 0, 0, 0, 6, 283, 1, 0, 0, 0, 7, 285, 1, 0, 0, 0, 7, 287, 1, 0, 0, 0, 7, 289, 1, 0, 0, 0, 7, 291, 1, 0, 0, 0, 7, 293, 1, 0, 0, 0, 7, 295, 1, 0, 0, 0, 7, 297, 1, 0, 0, 0, 7, 299, 1, 0, 0, 0, 7, 301, 1, 0, 0, 0, 7, 303, 1, 0, 0, 0, 7, 305, 1, 0, 0, 0, 7, 307, 1, 0, 0, 0, 8, 309, 1, 0, 0, 0, 8, 311, 1, 0, 0, 0, 8, 313, 1, 0, 0, 0, 8, 315, 1, 0, 0, 0, 8, 317, 1, 0, 0, 0, 8, 319, 1, 0, 0, 0, 8, 321, 1, 0, 0, 0, 8, 323, 1, 0, 0, 0, 8, 325, 1, 0, 0, 0, 9, 327, 1, 0, 0, 0, 9, 329, 1, 0, 0, 0, 9, 331, 1, 0, 0, 0, 9, 333, 1, 0, 0, 0, 9, 335, 1, 0, 0, 0, 10, 337, 1, 0, 0, 0, 10, 339, 1, 0, 0, 0, 10, 341, 1, 0, 0, 0, 10, 343, 1, 0, 0, 0, 10, 345, 1, 0, 0, 0, 10, 347, 1, 0, 0, 0, 11, 349, 1, 0, 0, 0, 11, 351, 1, 0, 0, 0, 11, 353, 1, 0, 0, 0, 11, 355, 1, 0, 0, 0, 11, 357, 1, 0, 0, 0, 11, 359, 1, 0, 0, 0, 11, 361, 1, 0, 0, 0, 11, 363, 1, 0, 0, 0, 11, 365, 1, 0, 0, 0, 11, 367, 1, 0, 0, 0, 12, 369, 1, 0, 0, 0, 12, 371, 1, 0, 0, 0, 12, 373, 1, 0, 0, 0, 12, 375, 1, 0, 0, 0, 12, 377, 1, 0, 0, 0, 12, 379, 1, 0, 0, 0, 12, 381, 1, 0, 0, 0, 13, 383, 1, 0, 0, 0, 13, 385, 1, 0, 0, 0, 13, 387, 1, 0, 0, 0, 13, 389, 1, 0, 0, 0, 13, 391, 1, 0, 0, 0, 13, 393, 1, 0, 0, 0, 14, 395, 1, 0, 0, 0, 14, 397, 1, 0, 0, 0, 14, 399, 1, 0, 0, 0, 14, 401, 1, 0, 0, 0, 14, 403, 1, 0, 0, 0, 14, 405, 1, 0, 0, 0, 14, 407, 1, 0, 0, 0, 14, 409, 1, 0, 0, 0, 14, 411, 1, 0, 0, 0, 15, 413, 1, 0, 0, 0, 17, 423, 1, 0, 0, 0, 19, 430, 1, 0, 0, 0, 21, 439, 1, 0, 0, 0, 23, 446, 1, 0, 0, 0, 25, 456, 1, 0, 0, 0, 27, 463, 1, 0, 0, 0, 29, 470, 1, 0, 0, 0, 31, 477, 1, 0, 0, 0, 33, 485, 1, 0, 0, 0, 35, 497, 1, 0, 0, 0, 37, 506, 1, 0, 0, 0, 39, 512, 1, 0, 0, 0, 41, 519, 1, 0, 0, 0, 43, 526, 1, 0, 0, 0, 45, 534, 1, 0, 0, 0, 47, 542, 1, 0, 0, 0, 49, 557, 1, 0, 0, 0, 51, 567, 1, 0, 0, 0, 53, 579, 1, 0, 0, 0, 55, 585, 1, 0, 0, 0, 57, 602, 1, 0, 0, 0, 59, 618, 1, 0, 0, 0, 61, 624, 1, 0, 0, 0, 63, 626, 1, 0, 0, 0, 65, 630, 1, 0, 0, 0, 67, 632, 1, 0, 0, 0, 69, 634, 1, 0, 0, 0, 71, 637, 1, 0, 0, 0, 73, 639, 1, 0, 0, 0, 75, 648, 1, 0, 0, 0, 77, 650, 1, 0, 0, 0, 79, 655, 1, 0, 0, 0, 81, 657, 1, 0, 0, 0, 83, 662, 1, 0, 0, 0, 85, 693, 1, 0, 0, 0, 87, 696, 1, 0, 0, 0, 89, 742, 1, 0, 0, 0, 91, 744, 1, 0, 0, 0, 93, 747, 1, 0, 0, 0, 95, 751, 1, 0, 0, 0, 97, 755, 1, 0, 0, 0, 99, 757, 1, 0, 0, 0, 101, 760, 1, 0, 0, 0, 103, 762, 1, 0, 0, 0, 105, 767, 1, 0, 0, 0, 107, 769, 1, 0, 0, 0, 109, 775, 1, 0, 0, 0, 111, 781, 1, 0, 0, 0, 113, 784, 1, 0, 0, 0, 115, 787, 1, 0, 0, 0, 117, 792, 1, 0, 0, 0, 119, 797, 1, 0, 0, 0, 121, 799, 1, 0, 0, 0, 123, 803, 1, 0, 0, 0, 125, 808, 1, 0, 0, 0, 127, 814, 1, 0, 0, 0, 129, 817, 1, 0, 0, 0, 131, 819, 1, 0, 0, 0, 133, 825, 1, 0, 0, 0, 135, 827, 1, 0, 0, 0, 137, 832, 1, 0, 0, 0, 139, 835, 1, 0, 0, 0, 141, 838, 1, 0, 0, 0, 143, 841, 1, 0, 0, 0, 145, 843, 1, 0, 0, 0, 147, 846, 1, 0, 0, 0, 149, 848, 1, 0, 0, 0, 151, 851, 1, 0, 0, 0, 153, 853, 1, 0, 0, 0, 155, 855, 1, 0, 0, 0, 157, 857, 1, 0, 0, 0, 159, 859, 1, 0, 0, 0, 161, 861, 1, 0, 0, 0, 163, 866, 1, 0, 0, 0, 165, 887, 1, 0, 0, 0, 167, 889, 1, 0, 0, 0, 169, 894, 1, 0, 0, 0, 171, 915, 1, 0, 0, 0, 173, 917, 1, 0, 0, 0, 175, 925, 1, 0, 0, 0, 177, 927, 1, 0, 0, 0, 179, 931, 1, 0, 0, 0, 181, 935, 1, 0, 0, 0, 183, 939, 1, 0, 0, 0, 185, 944, 1, 0, 0, 0, 187, 949, 1, 0, 0, 0, 189, 953, 1, 0, 0, 0, 191, 957, 1, 0, 0, 0, 193, 961, 1, 0, 0, 0, 195, 966, 1, 0, 0, 0, 197, 970, 1, 0, 0, 0, 199, 974, 1, 0, 0, 0, 201, 978, 1, 0, 0, 0, 203, 982, 1, 0, 0, 0, 205, 986, 1, 0, 0, 0, 207, 998, 1, 0, 0, 0, 209, 1001, 1, 0, 0, 0, 211, 1005, 1, 0, 0, 0, 213, 1009, 1, 0, 0, 0, 215, 1013, 1, 0, 0, 0, 217, 1017, 1, 0, 0, 0, 219, 1021, 1, 0, 0, 0, 221, 1025, 1, 0, 0, 0, 223, 1030, 1, 0, 0, 0, 225, 1034, 1, 0, 0, 0, 227, 1038, 1, 0, 0, 0, 229, 1043, 1, 0, 0, 0, 231, 1052, 1, 0, 0, 0, 233, 1073, 1, 0, 0, 0, 235, 1077, 1, 0, 0, 0, 237, 1081, 1, 0, 0, 0, 239, 1085, 1, 0, 0, 0, 241, 1089, 1, 0, 0, 0, 243, 1093, 1, 0, 0, 0, 245, 1098, 1, 0, 0, 0, 247, 1102, 1, 0, 0, 0, 249, 1106, 1, 0, 0, 0, 251, 1110, 1, 0, 0, 0, 253, 1115, 1, 0, 0, 0, 255, 1120, 1, 0, 0, 0, 257, 1123, 1, 0, 0, 0, 259, 1127, 1, 0, 0, 0, 261, 1131, 1, 0, 0, 0, 263, 1135, 1, 0, 0, 0, 265, 1139, 1, 0, 0, 0, 267, 1144, 1, 0, 0, 0, 269, 1149, 1, 0, 0, 0, 271, 1154, 1, 0, 0, 0, 273, 1161, 1, 0, 0, 0, 275, 1170, 1, 0, 0, 0, 277, 1177, 1, 0, 0, 0, 279, 1181, 1, 0, 0, 0, 281, 1185, 1, 0, 0, 0, 283, 1189, 1, 0, 0, 0, 285, 1193, 1, 0, 0, 0, 287, 1199, 1, 0, 0, 0, 289, 1203, 1, 0, 0, 0, 291, 1207, 1, 0, 0, 0, 293, 1211, 1, 0, 0, 0, 295, 1215, 1, 0, 0, 0, 297, 1219, 1, 0, 0, 0, 299, 1223, 1, 0, 0, 0, 301, 1228, 1, 0, 0, 0, 303, 1233, 1, 0, 0, 0, 305, 1237, 1, 0, 0, 0, 307, 1241, 1, 0, 0, 0, 309, 1245, 1, 0, 0, 0, 311, 1250, 1, 0, 0, 0, 313, 1254, 1, 0, 0, 0, 315, 1259, 1, 0, 0, 0, 317, 1264, 1, 0, 0, 0, 319, 1268, 1, 0, 0, 0, 321, 1272, 1, 0, 0, 0, 323, 1276, 1, 0, 0, 0, 325, 1280, 1, 0, 0, 0, 327, 1284, 1, 0, 0, 0, 329, 1289, 1, 0, 0, 0, 331, 1294, 1, 0, 0, 0, 333, 1298, 1, 0, 0, 0, 335, 1302, 1, 0, 0, 0, 337, 1306, 1, 0, 0, 0, 339, 1311, 1, 0, 0, 0, 341, 1320, 1, 0, 0, 0, 343, 1324, 1, 0, 0, 0, 345, 1328, 1, 0, 0, 0, 347, 1332, 1, 0, 0, 0, 349, 1336, 1, 0, 0, 0, 351, 1341, 1, 0, 0, 0, 353, 1345, 1, 0, 0, 0, 355, 1349, 1, 0, 0, 0, 357, 1353, 1, 0, 0, 0, 359, 1358, 1, 0, 0, 0, 361, 1362, 1, 0, 0, 0, 363, 1366, 1, 0, 0, 0, 365, 1370, 1, 0, 0, 0, 367, 1374, 1, 0, 0, 0, 369, 1378, 1, 0, 0, 0, 371, 1384, 1, 0, 0, 0, 373, 1388, 1, 0, 0, 0, 375, 1392, 1, 0, 0, 0, 377, 1396, 1, 0, 0, 0, 379, 1400, 1, 0, 0, 0, 381, 1404, 1, 0, 0, 0, 383, 1408, 1, 0, 0, 0, 385, 1413, 1, 0, 0, 0, 387, 1419, 1, 0, 0, 0, 389, 1425, 1, 0, 0, 0, 391, 1429, 1, 0, 0, 0, 393, 1433, 1, 0, 0, 0, 395, 1437, 1, 0, 0, 0, 397, 1443, 1, 0, 0, 0, 399, 1449, 1, 0, 0, 0, 401, 1453, 1, 0, 0, 0, 403, 1457, 1, 0, 0, 0, 405, 1461, 1, 0, 0, 0, 407, 1467, 1, 0, 0, 0, 409, 1473, 1, 0, 0, 0, 411, 1479, 1, 0, 0, 0, 413, 414, 7, 0, 0, 0, 414, 415, 7, 1, 0, 0, 415, 416, 7, 2, 0, 0, 416, 417, 7, 2, 0, 0, 417, 418, 7, 3, 0, 0, 418, 419, 7, 4, 0, 0, 419, 420, 7, 5, 0, 0, 420, 421, 1, 0, 0, 0, 421, 422, 6, 0, 0, 0, 422, 16, 1, 0, 0, 0, 423, 424, 7, 0, 0, 0, 424, 425, 7, 6, 0, 0, 425, 426, 7, 7, 0, 0, 426, 427, 7, 8, 0, 0, 427, 428, 1, 0, 0, 0, 428, 429, 6, 1, 1, 0, 429, 18, 1, 0, 0, 0, 430, 431, 7, 3, 0, 0, 431, 432, 7, 9, 0, 0, 432, 433, 7, 6, 0, 0, 433, 434, 7, 1, 0, 0, 434, 435, 7, 4, 0, 0, 435, 436, 7, 10, 0, 0, 436, 437, 1, 0, 0, 0, 437, 438, 6, 2, 2, 0, 438, 20, 1, 0, 0, 0, 439, 440, 7, 3, 0, 0, 440, 441, 7, 11, 0, 0, 441, 442, 7, 12, 0, 0, 442, 443, 7, 13, 0, 0, 443, 444, 1, 0, 0, 0, 444, 445, 6, 3, 0, 0, 445, 22, 1, 0, 0, 0, 446, 447, 7, 3, 0, 0, 447, 448, 7, 14, 0, 0, 448, 449, 7, 8, 0, 0, 449, 450, 7, 13, 0, 0, 450, 451, 7, 12, 0, 0, 451, 452, 7, 1, 0, 0, 452, 453, 7, 9, 0, 0, 453, 454, 1, 0, 0, 0, 454, 455, 6, 4, 3, 0, 455, 24, 1, 0, 0, 0, 456, 457, 7, 15, 0, 0, 457, 458, 7, 6, 0, 0, 458, 459, 7, 7, 0, 0, 459, 460, 7, 16, 0, 0, 460, 461, 1, 0, 0, 0, 461, 462, 6, 5, 4, 0, 462, 26, 1, 0, 0, 0, 463, 464, 7, 17, 0, 0, 464, 465, 7, 6, 0, 0, 465, 466, 7, 7, 0, 0, 466, 467, 7, 18, 0, 0, 467, 468, 1, 0, 0, 0, 468, 469, 6, 6, 0, 0, 469, 28, 1, 0, 0, 0, 470, 471, 7, 18, 0, 0, 471, 472, 7, 3, 0, 0, 472, 473, 7, 3, 0, 0, 473, 474, 7, 8, 0, 0, 474, 475, 1, 0, 0, 0, 475, 476, 6, 7, 1, 0, 476, 30, 1, 0, 0, 0, 477, 478, 7, 13, 0, 0, 478, 479, 7, 1, 0, 0, 479, 480, 7, 16, 0, 0, 480, 481, 7, 1, 0, 0, 481, 482, 7, 5, 0, 0, 482, 483, 1, 0, 0, 0, 483, 484, 6, 8, 0, 0, 484, 32, 1, 0, 0, 0, 485, 486, 7, 16, 0, 0, 486, 487, 7, 11, 0, 0, 487, 488, 5, 95, 0, 0, 488, 489, 7, 3, 0, 0, 489, 490, 7, 14, 0, 0, 490, 491, 7, 8, 0, 0, 491, 492, 7, 12, 0, 0, 492, 493, 7, 9, 0, 0, 493, 494, 7, 0, 0, 0, 494, 495, 1, 0, 0, 0, 495, 496, 6, 9, 5, 0, 496, 34, 1, 0, 0, 0, 497, 498, 7, 6, 0, 0, 498, 499, 7, 3, 0, 0, 499, 500, 7, 9, 0, 0, 500, 501, 7, 12, 0, 0, 501, 502, 7, 16, 0, 0, 502, 503, 7, 3, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 10, 6, 0, 505, 36, 1, 0, 0, 0, 506, 507, 7, 6, 0, 0, 507, 508, 7, 7, 0, 0, 508, 509, 7, 19, 0, 0, 509, 510, 1, 0, 0, 0, 510, 511, 6, 11, 0, 0, 511, 38, 1, 0, 0, 0, 512, 513, 7, 2, 0, 0, 513, 514, 7, 10, 0, 0, 514, 515, 7, 7, 0, 0, 515, 516, 7, 19, 0, 0, 516, 517, 1, 0, 0, 0, 517, 518, 6, 12, 7, 0, 518, 40, 1, 0, 0, 0, 519, 520, 7, 2, 0, 0, 520, 521, 7, 7, 0, 0, 521, 522, 7, 6, 0, 0, 522, 523, 7, 5, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 13, 0, 0, 525, 42, 1, 0, 0, 0, 526, 527, 7, 2, 0, 0, 527, 528, 7, 5, 0, 0, 528, 529, 7, 12, 0, 0, 529, 530, 7, 5, 0, 0, 530, 531, 7, 2, 0, 0, 531, 532, 1, 0, 0, 0, 532, 533, 6, 14, 0, 0, 533, 44, 1, 0, 0, 0, 534, 535, 7, 19, 0, 0, 535, 536, 7, 10, 0, 0, 536, 537, 7, 3, 0, 0, 537, 538, 7, 6, 0, 0, 538, 539, 7, 3, 0, 0, 539, 540, 1, 0, 0, 0, 540, 541, 6, 15, 0, 0, 541, 46, 1, 0, 0, 0, 542, 543, 4, 16, 0, 0, 543, 544, 7, 1, 0, 0, 544, 545, 7, 9, 0, 0, 545, 546, 7, 13, 0, 0, 546, 547, 7, 1, 0, 0, 547, 548, 7, 9, 0, 0, 548, 549, 7, 3, 0, 0, 549, 550, 7, 2, 0, 0, 550, 551, 7, 5, 0, 0, 551, 552, 7, 12, 0, 0, 552, 553, 7, 5, 0, 0, 553, 554, 7, 2, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 6, 16, 0, 0, 556, 48, 1, 0, 0, 0, 557, 558, 4, 17, 1, 0, 558, 559, 7, 13, 0, 0, 559, 560, 7, 7, 0, 0, 560, 561, 7, 7, 0, 0, 561, 562, 7, 18, 0, 0, 562, 563, 7, 20, 0, 0, 563, 564, 7, 8, 0, 0, 564, 565, 1, 0, 0, 0, 565, 566, 6, 17, 8, 0, 566, 50, 1, 0, 0, 0, 567, 568, 4, 18, 2, 0, 568, 569, 7, 16, 0, 0, 569, 570, 7, 3, 0, 0, 570, 571, 7, 5, 0, 0, 571, 572, 7, 6, 0, 0, 572, 573, 7, 1, 0, 0, 573, 574, 7, 4, 0, 0, 574, 575, 7, 2, 0, 0, 575, 576, 1, 0, 0, 0, 576, 577, 6, 18, 9, 0, 577, 52, 1, 0, 0, 0, 578, 580, 8, 21, 0, 0, 579, 578, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 579, 1, 0, 0, 0, 581, 582, 1, 0, 0, 0, 582, 583, 1, 0, 0, 0, 583, 584, 6, 19, 0, 0, 584, 54, 1, 0, 0, 0, 585, 586, 5, 47, 0, 0, 586, 587, 5, 47, 0, 0, 587, 591, 1, 0, 0, 0, 588, 590, 8, 22, 0, 0, 589, 588, 1, 0, 0, 0, 590, 593, 1, 0, 0, 0, 591, 589, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 595, 1, 0, 0, 0, 593, 591, 1, 0, 0, 0, 594, 596, 5, 13, 0, 0, 595, 594, 1, 0, 0, 0, 595, 596, 1, 0, 0, 0, 596, 598, 1, 0, 0, 0, 597, 599, 5, 10, 0, 0, 598, 597, 1, 0, 0, 0, 598, 599, 1, 0, 0, 0, 599, 600, 1, 0, 0, 0, 600, 601, 6, 20, 10, 0, 601, 56, 1, 0, 0, 0, 602, 603, 5, 47, 0, 0, 603, 604, 5, 42, 0, 0, 604, 609, 1, 0, 0, 0, 605, 608, 3, 57, 21, 0, 606, 608, 9, 0, 0, 0, 607, 605, 1, 0, 0, 0, 607, 606, 1, 0, 0, 0, 608, 611, 1, 0, 0, 0, 609, 610, 1, 0, 0, 0, 609, 607, 1, 0, 0, 0, 610, 612, 1, 0, 0, 0, 611, 609, 1, 0, 0, 0, 612, 613, 5, 42, 0, 0, 613, 614, 5, 47, 0, 0, 614, 615, 1, 0, 0, 0, 615, 616, 6, 21, 10, 0, 616, 58, 1, 0, 0, 0, 617, 619, 7, 23, 0, 0, 618, 617, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 618, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 623, 6, 22, 10, 0, 623, 60, 1, 0, 0, 0, 624, 625, 5, 58, 0, 0, 625, 62, 1, 0, 0, 0, 626, 627, 5, 124, 0, 0, 627, 628, 1, 0, 0, 0, 628, 629, 6, 24, 11, 0, 629, 64, 1, 0, 0, 0, 630, 631, 7, 24, 0, 0, 631, 66, 1, 0, 0, 0, 632, 633, 7, 25, 0, 0, 633, 68, 1, 0, 0, 0, 634, 635, 5, 92, 0, 0, 635, 636, 7, 26, 0, 0, 636, 70, 1, 0, 0, 0, 637, 638, 8, 27, 0, 0, 638, 72, 1, 0, 0, 0, 639, 641, 7, 3, 0, 0, 640, 642, 7, 28, 0, 0, 641, 640, 1, 0, 0, 0, 641, 642, 1, 0, 0, 0, 642, 644, 1, 0, 0, 0, 643, 645, 3, 65, 25, 0, 644, 643, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 644, 1, 0, 0, 0, 646, 647, 1, 0, 0, 0, 647, 74, 1, 0, 0, 0, 648, 649, 5, 64, 0, 0, 649, 76, 1, 0, 0, 0, 650, 651, 5, 96, 0, 0, 651, 78, 1, 0, 0, 0, 652, 656, 8, 29, 0, 0, 653, 654, 5, 96, 0, 0, 654, 656, 5, 96, 0, 0, 655, 652, 1, 0, 0, 0, 655, 653, 1, 0, 0, 0, 656, 80, 1, 0, 0, 0, 657, 658, 5, 95, 0, 0, 658, 82, 1, 0, 0, 0, 659, 663, 3, 67, 26, 0, 660, 663, 3, 65, 25, 0, 661, 663, 3, 81, 33, 0, 662, 659, 1, 0, 0, 0, 662, 660, 1, 0, 0, 0, 662, 661, 1, 0, 0, 0, 663, 84, 1, 0, 0, 0, 664, 669, 5, 34, 0, 0, 665, 668, 3, 69, 27, 0, 666, 668, 3, 71, 28, 0, 667, 665, 1, 0, 0, 0, 667, 666, 1, 0, 0, 0, 668, 671, 1, 0, 0, 0, 669, 667, 1, 0, 0, 0, 669, 670, 1, 0, 0, 0, 670, 672, 1, 0, 0, 0, 671, 669, 1, 0, 0, 0, 672, 694, 5, 34, 0, 0, 673, 674, 5, 34, 0, 0, 674, 675, 5, 34, 0, 0, 675, 676, 5, 34, 0, 0, 676, 680, 1, 0, 0, 0, 677, 679, 8, 22, 0, 0, 678, 677, 1, 0, 0, 0, 679, 682, 1, 0, 0, 0, 680, 681, 1, 0, 0, 0, 680, 678, 1, 0, 0, 0, 681, 683, 1, 0, 0, 0, 682, 680, 1, 0, 0, 0, 683, 684, 5, 34, 0, 0, 684, 685, 5, 34, 0, 0, 685, 686, 5, 34, 0, 0, 686, 688, 1, 0, 0, 0, 687, 689, 5, 34, 0, 0, 688, 687, 1, 0, 0, 0, 688, 689, 1, 0, 0, 0, 689, 691, 1, 0, 0, 0, 690, 692, 5, 34, 0, 0, 691, 690, 1, 0, 0, 0, 691, 692, 1, 0, 0, 0, 692, 694, 1, 0, 0, 0, 693, 664, 1, 0, 0, 0, 693, 673, 1, 0, 0, 0, 694, 86, 1, 0, 0, 0, 695, 697, 3, 65, 25, 0, 696, 695, 1, 0, 0, 0, 697, 698, 1, 0, 0, 0, 698, 696, 1, 0, 0, 0, 698, 699, 1, 0, 0, 0, 699, 88, 1, 0, 0, 0, 700, 702, 3, 65, 25, 0, 701, 700, 1, 0, 0, 0, 702, 703, 1, 0, 0, 0, 703, 701, 1, 0, 0, 0, 703, 704, 1, 0, 0, 0, 704, 705, 1, 0, 0, 0, 705, 709, 3, 105, 45, 0, 706, 708, 3, 65, 25, 0, 707, 706, 1, 0, 0, 0, 708, 711, 1, 0, 0, 0, 709, 707, 1, 0, 0, 0, 709, 710, 1, 0, 0, 0, 710, 743, 1, 0, 0, 0, 711, 709, 1, 0, 0, 0, 712, 714, 3, 105, 45, 0, 713, 715, 3, 65, 25, 0, 714, 713, 1, 0, 0, 0, 715, 716, 1, 0, 0, 0, 716, 714, 1, 0, 0, 0, 716, 717, 1, 0, 0, 0, 717, 743, 1, 0, 0, 0, 718, 720, 3, 65, 25, 0, 719, 718, 1, 0, 0, 0, 720, 721, 1, 0, 0, 0, 721, 719, 1, 0, 0, 0, 721, 722, 1, 0, 0, 0, 722, 730, 1, 0, 0, 0, 723, 727, 3, 105, 45, 0, 724, 726, 3, 65, 25, 0, 725, 724, 1, 0, 0, 0, 726, 729, 1, 0, 0, 0, 727, 725, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 731, 1, 0, 0, 0, 729, 727, 1, 0, 0, 0, 730, 723, 1, 0, 0, 0, 730, 731, 1, 0, 0, 0, 731, 732, 1, 0, 0, 0, 732, 733, 3, 73, 29, 0, 733, 743, 1, 0, 0, 0, 734, 736, 3, 105, 45, 0, 735, 737, 3, 65, 25, 0, 736, 735, 1, 0, 0, 0, 737, 738, 1, 0, 0, 0, 738, 736, 1, 0, 0, 0, 738, 739, 1, 0, 0, 0, 739, 740, 1, 0, 0, 0, 740, 741, 3, 73, 29, 0, 741, 743, 1, 0, 0, 0, 742, 701, 1, 0, 0, 0, 742, 712, 1, 0, 0, 0, 742, 719, 1, 0, 0, 0, 742, 734, 1, 0, 0, 0, 743, 90, 1, 0, 0, 0, 744, 745, 7, 30, 0, 0, 745, 746, 7, 31, 0, 0, 746, 92, 1, 0, 0, 0, 747, 748, 7, 12, 0, 0, 748, 749, 7, 9, 0, 0, 749, 750, 7, 0, 0, 0, 750, 94, 1, 0, 0, 0, 751, 752, 7, 12, 0, 0, 752, 753, 7, 2, 0, 0, 753, 754, 7, 4, 0, 0, 754, 96, 1, 0, 0, 0, 755, 756, 5, 61, 0, 0, 756, 98, 1, 0, 0, 0, 757, 758, 5, 58, 0, 0, 758, 759, 5, 58, 0, 0, 759, 100, 1, 0, 0, 0, 760, 761, 5, 44, 0, 0, 761, 102, 1, 0, 0, 0, 762, 763, 7, 0, 0, 0, 763, 764, 7, 3, 0, 0, 764, 765, 7, 2, 0, 0, 765, 766, 7, 4, 0, 0, 766, 104, 1, 0, 0, 0, 767, 768, 5, 46, 0, 0, 768, 106, 1, 0, 0, 0, 769, 770, 7, 15, 0, 0, 770, 771, 7, 12, 0, 0, 771, 772, 7, 13, 0, 0, 772, 773, 7, 2, 0, 0, 773, 774, 7, 3, 0, 0, 774, 108, 1, 0, 0, 0, 775, 776, 7, 15, 0, 0, 776, 777, 7, 1, 0, 0, 777, 778, 7, 6, 0, 0, 778, 779, 7, 2, 0, 0, 779, 780, 7, 5, 0, 0, 780, 110, 1, 0, 0, 0, 781, 782, 7, 1, 0, 0, 782, 783, 7, 9, 0, 0, 783, 112, 1, 0, 0, 0, 784, 785, 7, 1, 0, 0, 785, 786, 7, 2, 0, 0, 786, 114, 1, 0, 0, 0, 787, 788, 7, 13, 0, 0, 788, 789, 7, 12, 0, 0, 789, 790, 7, 2, 0, 0, 790, 791, 7, 5, 0, 0, 791, 116, 1, 0, 0, 0, 792, 793, 7, 13, 0, 0, 793, 794, 7, 1, 0, 0, 794, 795, 7, 18, 0, 0, 795, 796, 7, 3, 0, 0, 796, 118, 1, 0, 0, 0, 797, 798, 5, 40, 0, 0, 798, 120, 1, 0, 0, 0, 799, 800, 7, 9, 0, 0, 800, 801, 7, 7, 0, 0, 801, 802, 7, 5, 0, 0, 802, 122, 1, 0, 0, 0, 803, 804, 7, 9, 0, 0, 804, 805, 7, 20, 0, 0, 805, 806, 7, 13, 0, 0, 806, 807, 7, 13, 0, 0, 807, 124, 1, 0, 0, 0, 808, 809, 7, 9, 0, 0, 809, 810, 7, 20, 0, 0, 810, 811, 7, 13, 0, 0, 811, 812, 7, 13, 0, 0, 812, 813, 7, 2, 0, 0, 813, 126, 1, 0, 0, 0, 814, 815, 7, 7, 0, 0, 815, 816, 7, 6, 0, 0, 816, 128, 1, 0, 0, 0, 817, 818, 5, 63, 0, 0, 818, 130, 1, 0, 0, 0, 819, 820, 7, 6, 0, 0, 820, 821, 7, 13, 0, 0, 821, 822, 7, 1, 0, 0, 822, 823, 7, 18, 0, 0, 823, 824, 7, 3, 0, 0, 824, 132, 1, 0, 0, 0, 825, 826, 5, 41, 0, 0, 826, 134, 1, 0, 0, 0, 827, 828, 7, 5, 0, 0, 828, 829, 7, 6, 0, 0, 829, 830, 7, 20, 0, 0, 830, 831, 7, 3, 0, 0, 831, 136, 1, 0, 0, 0, 832, 833, 5, 61, 0, 0, 833, 834, 5, 61, 0, 0, 834, 138, 1, 0, 0, 0, 835, 836, 5, 61, 0, 0, 836, 837, 5, 126, 0, 0, 837, 140, 1, 0, 0, 0, 838, 839, 5, 33, 0, 0, 839, 840, 5, 61, 0, 0, 840, 142, 1, 0, 0, 0, 841, 842, 5, 60, 0, 0, 842, 144, 1, 0, 0, 0, 843, 844, 5, 60, 0, 0, 844, 845, 5, 61, 0, 0, 845, 146, 1, 0, 0, 0, 846, 847, 5, 62, 0, 0, 847, 148, 1, 0, 0, 0, 848, 849, 5, 62, 0, 0, 849, 850, 5, 61, 0, 0, 850, 150, 1, 0, 0, 0, 851, 852, 5, 43, 0, 0, 852, 152, 1, 0, 0, 0, 853, 854, 5, 45, 0, 0, 854, 154, 1, 0, 0, 0, 855, 856, 5, 42, 0, 0, 856, 156, 1, 0, 0, 0, 857, 858, 5, 47, 0, 0, 858, 158, 1, 0, 0, 0, 859, 860, 5, 37, 0, 0, 860, 160, 1, 0, 0, 0, 861, 862, 4, 73, 3, 0, 862, 863, 3, 61, 23, 0, 863, 864, 1, 0, 0, 0, 864, 865, 6, 73, 12, 0, 865, 162, 1, 0, 0, 0, 866, 867, 3, 45, 15, 0, 867, 868, 1, 0, 0, 0, 868, 869, 6, 74, 13, 0, 869, 164, 1, 0, 0, 0, 870, 873, 3, 129, 57, 0, 871, 874, 3, 67, 26, 0, 872, 874, 3, 81, 33, 0, 873, 871, 1, 0, 0, 0, 873, 872, 1, 0, 0, 0, 874, 878, 1, 0, 0, 0, 875, 877, 3, 83, 34, 0, 876, 875, 1, 0, 0, 0, 877, 880, 1, 0, 0, 0, 878, 876, 1, 0, 0, 0, 878, 879, 1, 0, 0, 0, 879, 888, 1, 0, 0, 0, 880, 878, 1, 0, 0, 0, 881, 883, 3, 129, 57, 0, 882, 884, 3, 65, 25, 0, 883, 882, 1, 0, 0, 0, 884, 885, 1, 0, 0, 0, 885, 883, 1, 0, 0, 0, 885, 886, 1, 0, 0, 0, 886, 888, 1, 0, 0, 0, 887, 870, 1, 0, 0, 0, 887, 881, 1, 0, 0, 0, 888, 166, 1, 0, 0, 0, 889, 890, 5, 91, 0, 0, 890, 891, 1, 0, 0, 0, 891, 892, 6, 76, 0, 0, 892, 893, 6, 76, 0, 0, 893, 168, 1, 0, 0, 0, 894, 895, 5, 93, 0, 0, 895, 896, 1, 0, 0, 0, 896, 897, 6, 77, 11, 0, 897, 898, 6, 77, 11, 0, 898, 170, 1, 0, 0, 0, 899, 903, 3, 67, 26, 0, 900, 902, 3, 83, 34, 0, 901, 900, 1, 0, 0, 0, 902, 905, 1, 0, 0, 0, 903, 901, 1, 0, 0, 0, 903, 904, 1, 0, 0, 0, 904, 916, 1, 0, 0, 0, 905, 903, 1, 0, 0, 0, 906, 909, 3, 81, 33, 0, 907, 909, 3, 75, 30, 0, 908, 906, 1, 0, 0, 0, 908, 907, 1, 0, 0, 0, 909, 911, 1, 0, 0, 0, 910, 912, 3, 83, 34, 0, 911, 910, 1, 0, 0, 0, 912, 913, 1, 0, 0, 0, 913, 911, 1, 0, 0, 0, 913, 914, 1, 0, 0, 0, 914, 916, 1, 0, 0, 0, 915, 899, 1, 0, 0, 0, 915, 908, 1, 0, 0, 0, 916, 172, 1, 0, 0, 0, 917, 919, 3, 77, 31, 0, 918, 920, 3, 79, 32, 0, 919, 918, 1, 0, 0, 0, 920, 921, 1, 0, 0, 0, 921, 919, 1, 0, 0, 0, 921, 922, 1, 0, 0, 0, 922, 923, 1, 0, 0, 0, 923, 924, 3, 77, 31, 0, 924, 174, 1, 0, 0, 0, 925, 926, 3, 173, 79, 0, 926, 176, 1, 0, 0, 0, 927, 928, 3, 55, 20, 0, 928, 929, 1, 0, 0, 0, 929, 930, 6, 81, 10, 0, 930, 178, 1, 0, 0, 0, 931, 932, 3, 57, 21, 0, 932, 933, 1, 0, 0, 0, 933, 934, 6, 82, 10, 0, 934, 180, 1, 0, 0, 0, 935, 936, 3, 59, 22, 0, 936, 937, 1, 0, 0, 0, 937, 938, 6, 83, 10, 0, 938, 182, 1, 0, 0, 0, 939, 940, 3, 167, 76, 0, 940, 941, 1, 0, 0, 0, 941, 942, 6, 84, 14, 0, 942, 943, 6, 84, 15, 0, 943, 184, 1, 0, 0, 0, 944, 945, 3, 63, 24, 0, 945, 946, 1, 0, 0, 0, 946, 947, 6, 85, 16, 0, 947, 948, 6, 85, 11, 0, 948, 186, 1, 0, 0, 0, 949, 950, 3, 59, 22, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 86, 10, 0, 952, 188, 1, 0, 0, 0, 953, 954, 3, 55, 20, 0, 954, 955, 1, 0, 0, 0, 955, 956, 6, 87, 10, 0, 956, 190, 1, 0, 0, 0, 957, 958, 3, 57, 21, 0, 958, 959, 1, 0, 0, 0, 959, 960, 6, 88, 10, 0, 960, 192, 1, 0, 0, 0, 961, 962, 3, 63, 24, 0, 962, 963, 1, 0, 0, 0, 963, 964, 6, 89, 16, 0, 964, 965, 6, 89, 11, 0, 965, 194, 1, 0, 0, 0, 966, 967, 3, 167, 76, 0, 967, 968, 1, 0, 0, 0, 968, 969, 6, 90, 14, 0, 969, 196, 1, 0, 0, 0, 970, 971, 3, 169, 77, 0, 971, 972, 1, 0, 0, 0, 972, 973, 6, 91, 17, 0, 973, 198, 1, 0, 0, 0, 974, 975, 3, 61, 23, 0, 975, 976, 1, 0, 0, 0, 976, 977, 6, 92, 12, 0, 977, 200, 1, 0, 0, 0, 978, 979, 3, 101, 43, 0, 979, 980, 1, 0, 0, 0, 980, 981, 6, 93, 18, 0, 981, 202, 1, 0, 0, 0, 982, 983, 3, 97, 41, 0, 983, 984, 1, 0, 0, 0, 984, 985, 6, 94, 19, 0, 985, 204, 1, 0, 0, 0, 986, 987, 7, 16, 0, 0, 987, 988, 7, 3, 0, 0, 988, 989, 7, 5, 0, 0, 989, 990, 7, 12, 0, 0, 990, 991, 7, 0, 0, 0, 991, 992, 7, 12, 0, 0, 992, 993, 7, 5, 0, 0, 993, 994, 7, 12, 0, 0, 994, 206, 1, 0, 0, 0, 995, 999, 8, 32, 0, 0, 996, 997, 5, 47, 0, 0, 997, 999, 8, 33, 0, 0, 998, 995, 1, 0, 0, 0, 998, 996, 1, 0, 0, 0, 999, 208, 1, 0, 0, 0, 1000, 1002, 3, 207, 96, 0, 1001, 1000, 1, 0, 0, 0, 1002, 1003, 1, 0, 0, 0, 1003, 1001, 1, 0, 0, 0, 1003, 1004, 1, 0, 0, 0, 1004, 210, 1, 0, 0, 0, 1005, 1006, 3, 209, 97, 0, 1006, 1007, 1, 0, 0, 0, 1007, 1008, 6, 98, 20, 0, 1008, 212, 1, 0, 0, 0, 1009, 1010, 3, 85, 35, 0, 1010, 1011, 1, 0, 0, 0, 1011, 1012, 6, 99, 21, 0, 1012, 214, 1, 0, 0, 0, 1013, 1014, 3, 55, 20, 0, 1014, 1015, 1, 0, 0, 0, 1015, 1016, 6, 100, 10, 0, 1016, 216, 1, 0, 0, 0, 1017, 1018, 3, 57, 21, 0, 1018, 1019, 1, 0, 0, 0, 1019, 1020, 6, 101, 10, 0, 1020, 218, 1, 0, 0, 0, 1021, 1022, 3, 59, 22, 0, 1022, 1023, 1, 0, 0, 0, 1023, 1024, 6, 102, 10, 0, 1024, 220, 1, 0, 0, 0, 1025, 1026, 3, 63, 24, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1028, 6, 103, 16, 0, 1028, 1029, 6, 103, 11, 0, 1029, 222, 1, 0, 0, 0, 1030, 1031, 3, 105, 45, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 104, 22, 0, 1033, 224, 1, 0, 0, 0, 1034, 1035, 3, 101, 43, 0, 1035, 1036, 1, 0, 0, 0, 1036, 1037, 6, 105, 18, 0, 1037, 226, 1, 0, 0, 0, 1038, 1039, 4, 106, 4, 0, 1039, 1040, 3, 129, 57, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 106, 23, 0, 1042, 228, 1, 0, 0, 0, 1043, 1044, 4, 107, 5, 0, 1044, 1045, 3, 165, 75, 0, 1045, 1046, 1, 0, 0, 0, 1046, 1047, 6, 107, 24, 0, 1047, 230, 1, 0, 0, 0, 1048, 1053, 3, 67, 26, 0, 1049, 1053, 3, 65, 25, 0, 1050, 1053, 3, 81, 33, 0, 1051, 1053, 3, 155, 70, 0, 1052, 1048, 1, 0, 0, 0, 1052, 1049, 1, 0, 0, 0, 1052, 1050, 1, 0, 0, 0, 1052, 1051, 1, 0, 0, 0, 1053, 232, 1, 0, 0, 0, 1054, 1057, 3, 67, 26, 0, 1055, 1057, 3, 155, 70, 0, 1056, 1054, 1, 0, 0, 0, 1056, 1055, 1, 0, 0, 0, 1057, 1061, 1, 0, 0, 0, 1058, 1060, 3, 231, 108, 0, 1059, 1058, 1, 0, 0, 0, 1060, 1063, 1, 0, 0, 0, 1061, 1059, 1, 0, 0, 0, 1061, 1062, 1, 0, 0, 0, 1062, 1074, 1, 0, 0, 0, 1063, 1061, 1, 0, 0, 0, 1064, 1067, 3, 81, 33, 0, 1065, 1067, 3, 75, 30, 0, 1066, 1064, 1, 0, 0, 0, 1066, 1065, 1, 0, 0, 0, 1067, 1069, 1, 0, 0, 0, 1068, 1070, 3, 231, 108, 0, 1069, 1068, 1, 0, 0, 0, 1070, 1071, 1, 0, 0, 0, 1071, 1069, 1, 0, 0, 0, 1071, 1072, 1, 0, 0, 0, 1072, 1074, 1, 0, 0, 0, 1073, 1056, 1, 0, 0, 0, 1073, 1066, 1, 0, 0, 0, 1074, 234, 1, 0, 0, 0, 1075, 1078, 3, 233, 109, 0, 1076, 1078, 3, 173, 79, 0, 1077, 1075, 1, 0, 0, 0, 1077, 1076, 1, 0, 0, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1077, 1, 0, 0, 0, 1079, 1080, 1, 0, 0, 0, 1080, 236, 1, 0, 0, 0, 1081, 1082, 3, 55, 20, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 111, 10, 0, 1084, 238, 1, 0, 0, 0, 1085, 1086, 3, 57, 21, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 112, 10, 0, 1088, 240, 1, 0, 0, 0, 1089, 1090, 3, 59, 22, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 113, 10, 0, 1092, 242, 1, 0, 0, 0, 1093, 1094, 3, 63, 24, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 114, 16, 0, 1096, 1097, 6, 114, 11, 0, 1097, 244, 1, 0, 0, 0, 1098, 1099, 3, 97, 41, 0, 1099, 1100, 1, 0, 0, 0, 1100, 1101, 6, 115, 19, 0, 1101, 246, 1, 0, 0, 0, 1102, 1103, 3, 101, 43, 0, 1103, 1104, 1, 0, 0, 0, 1104, 1105, 6, 116, 18, 0, 1105, 248, 1, 0, 0, 0, 1106, 1107, 3, 105, 45, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 117, 22, 0, 1109, 250, 1, 0, 0, 0, 1110, 1111, 4, 118, 6, 0, 1111, 1112, 3, 129, 57, 0, 1112, 1113, 1, 0, 0, 0, 1113, 1114, 6, 118, 23, 0, 1114, 252, 1, 0, 0, 0, 1115, 1116, 4, 119, 7, 0, 1116, 1117, 3, 165, 75, 0, 1117, 1118, 1, 0, 0, 0, 1118, 1119, 6, 119, 24, 0, 1119, 254, 1, 0, 0, 0, 1120, 1121, 7, 12, 0, 0, 1121, 1122, 7, 2, 0, 0, 1122, 256, 1, 0, 0, 0, 1123, 1124, 3, 235, 110, 0, 1124, 1125, 1, 0, 0, 0, 1125, 1126, 6, 121, 25, 0, 1126, 258, 1, 0, 0, 0, 1127, 1128, 3, 55, 20, 0, 1128, 1129, 1, 0, 0, 0, 1129, 1130, 6, 122, 10, 0, 1130, 260, 1, 0, 0, 0, 1131, 1132, 3, 57, 21, 0, 1132, 1133, 1, 0, 0, 0, 1133, 1134, 6, 123, 10, 0, 1134, 262, 1, 0, 0, 0, 1135, 1136, 3, 59, 22, 0, 1136, 1137, 1, 0, 0, 0, 1137, 1138, 6, 124, 10, 0, 1138, 264, 1, 0, 0, 0, 1139, 1140, 3, 63, 24, 0, 1140, 1141, 1, 0, 0, 0, 1141, 1142, 6, 125, 16, 0, 1142, 1143, 6, 125, 11, 0, 1143, 266, 1, 0, 0, 0, 1144, 1145, 3, 167, 76, 0, 1145, 1146, 1, 0, 0, 0, 1146, 1147, 6, 126, 14, 0, 1147, 1148, 6, 126, 26, 0, 1148, 268, 1, 0, 0, 0, 1149, 1150, 7, 7, 0, 0, 1150, 1151, 7, 9, 0, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 127, 27, 0, 1153, 270, 1, 0, 0, 0, 1154, 1155, 7, 19, 0, 0, 1155, 1156, 7, 1, 0, 0, 1156, 1157, 7, 5, 0, 0, 1157, 1158, 7, 10, 0, 0, 1158, 1159, 1, 0, 0, 0, 1159, 1160, 6, 128, 27, 0, 1160, 272, 1, 0, 0, 0, 1161, 1162, 8, 34, 0, 0, 1162, 274, 1, 0, 0, 0, 1163, 1165, 3, 273, 129, 0, 1164, 1163, 1, 0, 0, 0, 1165, 1166, 1, 0, 0, 0, 1166, 1164, 1, 0, 0, 0, 1166, 1167, 1, 0, 0, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1169, 3, 61, 23, 0, 1169, 1171, 1, 0, 0, 0, 1170, 1164, 1, 0, 0, 0, 1170, 1171, 1, 0, 0, 0, 1171, 1173, 1, 0, 0, 0, 1172, 1174, 3, 273, 129, 0, 1173, 1172, 1, 0, 0, 0, 1174, 1175, 1, 0, 0, 0, 1175, 1173, 1, 0, 0, 0, 1175, 1176, 1, 0, 0, 0, 1176, 276, 1, 0, 0, 0, 1177, 1178, 3, 275, 130, 0, 1178, 1179, 1, 0, 0, 0, 1179, 1180, 6, 131, 28, 0, 1180, 278, 1, 0, 0, 0, 1181, 1182, 3, 55, 20, 0, 1182, 1183, 1, 0, 0, 0, 1183, 1184, 6, 132, 10, 0, 1184, 280, 1, 0, 0, 0, 1185, 1186, 3, 57, 21, 0, 1186, 1187, 1, 0, 0, 0, 1187, 1188, 6, 133, 10, 0, 1188, 282, 1, 0, 0, 0, 1189, 1190, 3, 59, 22, 0, 1190, 1191, 1, 0, 0, 0, 1191, 1192, 6, 134, 10, 0, 1192, 284, 1, 0, 0, 0, 1193, 1194, 3, 63, 24, 0, 1194, 1195, 1, 0, 0, 0, 1195, 1196, 6, 135, 16, 0, 1196, 1197, 6, 135, 11, 0, 1197, 1198, 6, 135, 11, 0, 1198, 286, 1, 0, 0, 0, 1199, 1200, 3, 97, 41, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 136, 19, 0, 1202, 288, 1, 0, 0, 0, 1203, 1204, 3, 101, 43, 0, 1204, 1205, 1, 0, 0, 0, 1205, 1206, 6, 137, 18, 0, 1206, 290, 1, 0, 0, 0, 1207, 1208, 3, 105, 45, 0, 1208, 1209, 1, 0, 0, 0, 1209, 1210, 6, 138, 22, 0, 1210, 292, 1, 0, 0, 0, 1211, 1212, 3, 271, 128, 0, 1212, 1213, 1, 0, 0, 0, 1213, 1214, 6, 139, 29, 0, 1214, 294, 1, 0, 0, 0, 1215, 1216, 3, 235, 110, 0, 1216, 1217, 1, 0, 0, 0, 1217, 1218, 6, 140, 25, 0, 1218, 296, 1, 0, 0, 0, 1219, 1220, 3, 175, 80, 0, 1220, 1221, 1, 0, 0, 0, 1221, 1222, 6, 141, 30, 0, 1222, 298, 1, 0, 0, 0, 1223, 1224, 4, 142, 8, 0, 1224, 1225, 3, 129, 57, 0, 1225, 1226, 1, 0, 0, 0, 1226, 1227, 6, 142, 23, 0, 1227, 300, 1, 0, 0, 0, 1228, 1229, 4, 143, 9, 0, 1229, 1230, 3, 165, 75, 0, 1230, 1231, 1, 0, 0, 0, 1231, 1232, 6, 143, 24, 0, 1232, 302, 1, 0, 0, 0, 1233, 1234, 3, 55, 20, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1236, 6, 144, 10, 0, 1236, 304, 1, 0, 0, 0, 1237, 1238, 3, 57, 21, 0, 1238, 1239, 1, 0, 0, 0, 1239, 1240, 6, 145, 10, 0, 1240, 306, 1, 0, 0, 0, 1241, 1242, 3, 59, 22, 0, 1242, 1243, 1, 0, 0, 0, 1243, 1244, 6, 146, 10, 0, 1244, 308, 1, 0, 0, 0, 1245, 1246, 3, 63, 24, 0, 1246, 1247, 1, 0, 0, 0, 1247, 1248, 6, 147, 16, 0, 1248, 1249, 6, 147, 11, 0, 1249, 310, 1, 0, 0, 0, 1250, 1251, 3, 105, 45, 0, 1251, 1252, 1, 0, 0, 0, 1252, 1253, 6, 148, 22, 0, 1253, 312, 1, 0, 0, 0, 1254, 1255, 4, 149, 10, 0, 1255, 1256, 3, 129, 57, 0, 1256, 1257, 1, 0, 0, 0, 1257, 1258, 6, 149, 23, 0, 1258, 314, 1, 0, 0, 0, 1259, 1260, 4, 150, 11, 0, 1260, 1261, 3, 165, 75, 0, 1261, 1262, 1, 0, 0, 0, 1262, 1263, 6, 150, 24, 0, 1263, 316, 1, 0, 0, 0, 1264, 1265, 3, 175, 80, 0, 1265, 1266, 1, 0, 0, 0, 1266, 1267, 6, 151, 30, 0, 1267, 318, 1, 0, 0, 0, 1268, 1269, 3, 171, 78, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 152, 31, 0, 1271, 320, 1, 0, 0, 0, 1272, 1273, 3, 55, 20, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 6, 153, 10, 0, 1275, 322, 1, 0, 0, 0, 1276, 1277, 3, 57, 21, 0, 1277, 1278, 1, 0, 0, 0, 1278, 1279, 6, 154, 10, 0, 1279, 324, 1, 0, 0, 0, 1280, 1281, 3, 59, 22, 0, 1281, 1282, 1, 0, 0, 0, 1282, 1283, 6, 155, 10, 0, 1283, 326, 1, 0, 0, 0, 1284, 1285, 3, 63, 24, 0, 1285, 1286, 1, 0, 0, 0, 1286, 1287, 6, 156, 16, 0, 1287, 1288, 6, 156, 11, 0, 1288, 328, 1, 0, 0, 0, 1289, 1290, 7, 1, 0, 0, 1290, 1291, 7, 9, 0, 0, 1291, 1292, 7, 15, 0, 0, 1292, 1293, 7, 7, 0, 0, 1293, 330, 1, 0, 0, 0, 1294, 1295, 3, 55, 20, 0, 1295, 1296, 1, 0, 0, 0, 1296, 1297, 6, 158, 10, 0, 1297, 332, 1, 0, 0, 0, 1298, 1299, 3, 57, 21, 0, 1299, 1300, 1, 0, 0, 0, 1300, 1301, 6, 159, 10, 0, 1301, 334, 1, 0, 0, 0, 1302, 1303, 3, 59, 22, 0, 1303, 1304, 1, 0, 0, 0, 1304, 1305, 6, 160, 10, 0, 1305, 336, 1, 0, 0, 0, 1306, 1307, 3, 169, 77, 0, 1307, 1308, 1, 0, 0, 0, 1308, 1309, 6, 161, 17, 0, 1309, 1310, 6, 161, 11, 0, 1310, 338, 1, 0, 0, 0, 1311, 1312, 3, 61, 23, 0, 1312, 1313, 1, 0, 0, 0, 1313, 1314, 6, 162, 12, 0, 1314, 340, 1, 0, 0, 0, 1315, 1321, 3, 75, 30, 0, 1316, 1321, 3, 65, 25, 0, 1317, 1321, 3, 105, 45, 0, 1318, 1321, 3, 67, 26, 0, 1319, 1321, 3, 81, 33, 0, 1320, 1315, 1, 0, 0, 0, 1320, 1316, 1, 0, 0, 0, 1320, 1317, 1, 0, 0, 0, 1320, 1318, 1, 0, 0, 0, 1320, 1319, 1, 0, 0, 0, 1321, 1322, 1, 0, 0, 0, 1322, 1320, 1, 0, 0, 0, 1322, 1323, 1, 0, 0, 0, 1323, 342, 1, 0, 0, 0, 1324, 1325, 3, 55, 20, 0, 1325, 1326, 1, 0, 0, 0, 1326, 1327, 6, 164, 10, 0, 1327, 344, 1, 0, 0, 0, 1328, 1329, 3, 57, 21, 0, 1329, 1330, 1, 0, 0, 0, 1330, 1331, 6, 165, 10, 0, 1331, 346, 1, 0, 0, 0, 1332, 1333, 3, 59, 22, 0, 1333, 1334, 1, 0, 0, 0, 1334, 1335, 6, 166, 10, 0, 1335, 348, 1, 0, 0, 0, 1336, 1337, 3, 63, 24, 0, 1337, 1338, 1, 0, 0, 0, 1338, 1339, 6, 167, 16, 0, 1339, 1340, 6, 167, 11, 0, 1340, 350, 1, 0, 0, 0, 1341, 1342, 3, 61, 23, 0, 1342, 1343, 1, 0, 0, 0, 1343, 1344, 6, 168, 12, 0, 1344, 352, 1, 0, 0, 0, 1345, 1346, 3, 101, 43, 0, 1346, 1347, 1, 0, 0, 0, 1347, 1348, 6, 169, 18, 0, 1348, 354, 1, 0, 0, 0, 1349, 1350, 3, 105, 45, 0, 1350, 1351, 1, 0, 0, 0, 1351, 1352, 6, 170, 22, 0, 1352, 356, 1, 0, 0, 0, 1353, 1354, 3, 269, 127, 0, 1354, 1355, 1, 0, 0, 0, 1355, 1356, 6, 171, 32, 0, 1356, 1357, 6, 171, 33, 0, 1357, 358, 1, 0, 0, 0, 1358, 1359, 3, 209, 97, 0, 1359, 1360, 1, 0, 0, 0, 1360, 1361, 6, 172, 20, 0, 1361, 360, 1, 0, 0, 0, 1362, 1363, 3, 85, 35, 0, 1363, 1364, 1, 0, 0, 0, 1364, 1365, 6, 173, 21, 0, 1365, 362, 1, 0, 0, 0, 1366, 1367, 3, 55, 20, 0, 1367, 1368, 1, 0, 0, 0, 1368, 1369, 6, 174, 10, 0, 1369, 364, 1, 0, 0, 0, 1370, 1371, 3, 57, 21, 0, 1371, 1372, 1, 0, 0, 0, 1372, 1373, 6, 175, 10, 0, 1373, 366, 1, 0, 0, 0, 1374, 1375, 3, 59, 22, 0, 1375, 1376, 1, 0, 0, 0, 1376, 1377, 6, 176, 10, 0, 1377, 368, 1, 0, 0, 0, 1378, 1379, 3, 63, 24, 0, 1379, 1380, 1, 0, 0, 0, 1380, 1381, 6, 177, 16, 0, 1381, 1382, 6, 177, 11, 0, 1382, 1383, 6, 177, 11, 0, 1383, 370, 1, 0, 0, 0, 1384, 1385, 3, 101, 43, 0, 1385, 1386, 1, 0, 0, 0, 1386, 1387, 6, 178, 18, 0, 1387, 372, 1, 0, 0, 0, 1388, 1389, 3, 105, 45, 0, 1389, 1390, 1, 0, 0, 0, 1390, 1391, 6, 179, 22, 0, 1391, 374, 1, 0, 0, 0, 1392, 1393, 3, 235, 110, 0, 1393, 1394, 1, 0, 0, 0, 1394, 1395, 6, 180, 25, 0, 1395, 376, 1, 0, 0, 0, 1396, 1397, 3, 55, 20, 0, 1397, 1398, 1, 0, 0, 0, 1398, 1399, 6, 181, 10, 0, 1399, 378, 1, 0, 0, 0, 1400, 1401, 3, 57, 21, 0, 1401, 1402, 1, 0, 0, 0, 1402, 1403, 6, 182, 10, 0, 1403, 380, 1, 0, 0, 0, 1404, 1405, 3, 59, 22, 0, 1405, 1406, 1, 0, 0, 0, 1406, 1407, 6, 183, 10, 0, 1407, 382, 1, 0, 0, 0, 1408, 1409, 3, 63, 24, 0, 1409, 1410, 1, 0, 0, 0, 1410, 1411, 6, 184, 16, 0, 1411, 1412, 6, 184, 11, 0, 1412, 384, 1, 0, 0, 0, 1413, 1414, 3, 209, 97, 0, 1414, 1415, 1, 0, 0, 0, 1415, 1416, 6, 185, 20, 0, 1416, 1417, 6, 185, 11, 0, 1417, 1418, 6, 185, 34, 0, 1418, 386, 1, 0, 0, 0, 1419, 1420, 3, 85, 35, 0, 1420, 1421, 1, 0, 0, 0, 1421, 1422, 6, 186, 21, 0, 1422, 1423, 6, 186, 11, 0, 1423, 1424, 6, 186, 34, 0, 1424, 388, 1, 0, 0, 0, 1425, 1426, 3, 55, 20, 0, 1426, 1427, 1, 0, 0, 0, 1427, 1428, 6, 187, 10, 0, 1428, 390, 1, 0, 0, 0, 1429, 1430, 3, 57, 21, 0, 1430, 1431, 1, 0, 0, 0, 1431, 1432, 6, 188, 10, 0, 1432, 392, 1, 0, 0, 0, 1433, 1434, 3, 59, 22, 0, 1434, 1435, 1, 0, 0, 0, 1435, 1436, 6, 189, 10, 0, 1436, 394, 1, 0, 0, 0, 1437, 1438, 3, 61, 23, 0, 1438, 1439, 1, 0, 0, 0, 1439, 1440, 6, 190, 12, 0, 1440, 1441, 6, 190, 11, 0, 1441, 1442, 6, 190, 9, 0, 1442, 396, 1, 0, 0, 0, 1443, 1444, 3, 101, 43, 0, 1444, 1445, 1, 0, 0, 0, 1445, 1446, 6, 191, 18, 0, 1446, 1447, 6, 191, 11, 0, 1447, 1448, 6, 191, 9, 0, 1448, 398, 1, 0, 0, 0, 1449, 1450, 3, 55, 20, 0, 1450, 1451, 1, 0, 0, 0, 1451, 1452, 6, 192, 10, 0, 1452, 400, 1, 0, 0, 0, 1453, 1454, 3, 57, 21, 0, 1454, 1455, 1, 0, 0, 0, 1455, 1456, 6, 193, 10, 0, 1456, 402, 1, 0, 0, 0, 1457, 1458, 3, 59, 22, 0, 1458, 1459, 1, 0, 0, 0, 1459, 1460, 6, 194, 10, 0, 1460, 404, 1, 0, 0, 0, 1461, 1462, 3, 175, 80, 0, 1462, 1463, 1, 0, 0, 0, 1463, 1464, 6, 195, 11, 0, 1464, 1465, 6, 195, 0, 0, 1465, 1466, 6, 195, 30, 0, 1466, 406, 1, 0, 0, 0, 1467, 1468, 3, 171, 78, 0, 1468, 1469, 1, 0, 0, 0, 1469, 1470, 6, 196, 11, 0, 1470, 1471, 6, 196, 0, 0, 1471, 1472, 6, 196, 31, 0, 1472, 408, 1, 0, 0, 0, 1473, 1474, 3, 91, 38, 0, 1474, 1475, 1, 0, 0, 0, 1475, 1476, 6, 197, 11, 0, 1476, 1477, 6, 197, 0, 0, 1477, 1478, 6, 197, 35, 0, 1478, 410, 1, 0, 0, 0, 1479, 1480, 3, 63, 24, 0, 1480, 1481, 1, 0, 0, 0, 1481, 1482, 6, 198, 16, 0, 1482, 1483, 6, 198, 11, 0, 1483, 412, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 581, 591, 595, 598, 607, 609, 620, 641, 646, 655, 662, 667, 669, 680, 688, 691, 693, 698, 703, 709, 716, 721, 727, 730, 738, 742, 873, 878, 885, 887, 903, 908, 913, 915, 921, 998, 1003, 1052, 1056, 1061, 1066, 1071, 1073, 1077, 1079, 1166, 1170, 1175, 1320, 1322, 36, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 24, 0, 7, 16, 0, 7, 65, 0, 5, 0, 0, 7, 25, 0, 7, 66, 0, 7, 34, 0, 7, 32, 0, 7, 76, 0, 7, 26, 0, 7, 36, 0, 7, 48, 0, 7, 64, 0, 7, 80, 0, 5, 10, 0, 5, 7, 0, 7, 90, 0, 7, 89, 0, 7, 68, 0, 7, 67, 0, 7, 88, 0, 5, 12, 0, 5, 14, 0, 7, 29, 0] \ No newline at end of file +[4, 0, 128, 1601, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 4, 24, 654, 8, 24, 11, 24, 12, 24, 655, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 664, 8, 25, 10, 25, 12, 25, 667, 9, 25, 1, 25, 3, 25, 670, 8, 25, 1, 25, 3, 25, 673, 8, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 682, 8, 26, 10, 26, 12, 26, 685, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 4, 27, 693, 8, 27, 11, 27, 12, 27, 694, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 714, 8, 33, 1, 33, 4, 33, 717, 8, 33, 11, 33, 12, 33, 718, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 3, 36, 728, 8, 36, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 3, 38, 735, 8, 38, 1, 39, 1, 39, 1, 39, 5, 39, 740, 8, 39, 10, 39, 12, 39, 743, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 751, 8, 39, 10, 39, 12, 39, 754, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 761, 8, 39, 1, 39, 3, 39, 764, 8, 39, 3, 39, 766, 8, 39, 1, 40, 4, 40, 769, 8, 40, 11, 40, 12, 40, 770, 1, 41, 4, 41, 774, 8, 41, 11, 41, 12, 41, 775, 1, 41, 1, 41, 5, 41, 780, 8, 41, 10, 41, 12, 41, 783, 9, 41, 1, 41, 1, 41, 4, 41, 787, 8, 41, 11, 41, 12, 41, 788, 1, 41, 4, 41, 792, 8, 41, 11, 41, 12, 41, 793, 1, 41, 1, 41, 5, 41, 798, 8, 41, 10, 41, 12, 41, 801, 9, 41, 3, 41, 803, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 809, 8, 41, 11, 41, 12, 41, 810, 1, 41, 1, 41, 3, 41, 815, 8, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 3, 79, 943, 8, 79, 1, 79, 5, 79, 946, 8, 79, 10, 79, 12, 79, 949, 9, 79, 1, 79, 1, 79, 4, 79, 953, 8, 79, 11, 79, 12, 79, 954, 3, 79, 957, 8, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 5, 82, 971, 8, 82, 10, 82, 12, 82, 974, 9, 82, 1, 82, 1, 82, 3, 82, 978, 8, 82, 1, 82, 4, 82, 981, 8, 82, 11, 82, 12, 82, 982, 3, 82, 985, 8, 82, 1, 83, 1, 83, 4, 83, 989, 8, 83, 11, 83, 12, 83, 990, 1, 83, 1, 83, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 3, 100, 1068, 8, 100, 1, 101, 4, 101, 1071, 8, 101, 11, 101, 12, 101, 1072, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 3, 112, 1122, 8, 112, 1, 113, 1, 113, 3, 113, 1126, 8, 113, 1, 113, 5, 113, 1129, 8, 113, 10, 113, 12, 113, 1132, 9, 113, 1, 113, 1, 113, 3, 113, 1136, 8, 113, 1, 113, 4, 113, 1139, 8, 113, 11, 113, 12, 113, 1140, 3, 113, 1143, 8, 113, 1, 114, 1, 114, 4, 114, 1147, 8, 114, 11, 114, 12, 114, 1148, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 134, 4, 134, 1234, 8, 134, 11, 134, 12, 134, 1235, 1, 134, 1, 134, 3, 134, 1240, 8, 134, 1, 134, 4, 134, 1243, 8, 134, 11, 134, 12, 134, 1244, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 4, 167, 1390, 8, 167, 11, 167, 12, 167, 1391, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 212, 2, 683, 752, 0, 213, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 27, 70, 28, 72, 29, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 68, 172, 0, 174, 69, 176, 70, 178, 71, 180, 72, 182, 0, 184, 73, 186, 74, 188, 75, 190, 76, 192, 0, 194, 0, 196, 77, 198, 78, 200, 79, 202, 0, 204, 0, 206, 0, 208, 0, 210, 0, 212, 0, 214, 80, 216, 0, 218, 81, 220, 0, 222, 0, 224, 82, 226, 83, 228, 84, 230, 0, 232, 0, 234, 0, 236, 0, 238, 0, 240, 0, 242, 0, 244, 85, 246, 86, 248, 87, 250, 88, 252, 0, 254, 0, 256, 0, 258, 0, 260, 0, 262, 0, 264, 89, 266, 0, 268, 90, 270, 91, 272, 92, 274, 0, 276, 0, 278, 93, 280, 94, 282, 0, 284, 95, 286, 0, 288, 96, 290, 97, 292, 98, 294, 0, 296, 0, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 0, 310, 0, 312, 99, 314, 100, 316, 101, 318, 0, 320, 0, 322, 0, 324, 0, 326, 0, 328, 0, 330, 102, 332, 103, 334, 104, 336, 0, 338, 105, 340, 106, 342, 107, 344, 108, 346, 0, 348, 0, 350, 109, 352, 110, 354, 111, 356, 112, 358, 0, 360, 0, 362, 0, 364, 0, 366, 0, 368, 0, 370, 0, 372, 113, 374, 114, 376, 115, 378, 0, 380, 0, 382, 0, 384, 0, 386, 116, 388, 117, 390, 118, 392, 0, 394, 0, 396, 0, 398, 0, 400, 119, 402, 0, 404, 0, 406, 120, 408, 121, 410, 122, 412, 0, 414, 0, 416, 0, 418, 123, 420, 124, 422, 125, 424, 0, 426, 0, 428, 126, 430, 127, 432, 128, 434, 0, 436, 0, 438, 0, 440, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 2, 0, 74, 74, 106, 106, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1628, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 0, 68, 1, 0, 0, 0, 0, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 1, 94, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 1, 106, 1, 0, 0, 0, 1, 108, 1, 0, 0, 0, 1, 110, 1, 0, 0, 0, 1, 112, 1, 0, 0, 0, 1, 114, 1, 0, 0, 0, 1, 116, 1, 0, 0, 0, 1, 118, 1, 0, 0, 0, 1, 120, 1, 0, 0, 0, 1, 122, 1, 0, 0, 0, 1, 124, 1, 0, 0, 0, 1, 126, 1, 0, 0, 0, 1, 128, 1, 0, 0, 0, 1, 130, 1, 0, 0, 0, 1, 132, 1, 0, 0, 0, 1, 134, 1, 0, 0, 0, 1, 136, 1, 0, 0, 0, 1, 138, 1, 0, 0, 0, 1, 140, 1, 0, 0, 0, 1, 142, 1, 0, 0, 0, 1, 144, 1, 0, 0, 0, 1, 146, 1, 0, 0, 0, 1, 148, 1, 0, 0, 0, 1, 150, 1, 0, 0, 0, 1, 152, 1, 0, 0, 0, 1, 154, 1, 0, 0, 0, 1, 156, 1, 0, 0, 0, 1, 158, 1, 0, 0, 0, 1, 160, 1, 0, 0, 0, 1, 162, 1, 0, 0, 0, 1, 164, 1, 0, 0, 0, 1, 166, 1, 0, 0, 0, 1, 168, 1, 0, 0, 0, 1, 170, 1, 0, 0, 0, 1, 172, 1, 0, 0, 0, 1, 174, 1, 0, 0, 0, 1, 176, 1, 0, 0, 0, 1, 178, 1, 0, 0, 0, 1, 180, 1, 0, 0, 0, 1, 184, 1, 0, 0, 0, 1, 186, 1, 0, 0, 0, 1, 188, 1, 0, 0, 0, 1, 190, 1, 0, 0, 0, 2, 192, 1, 0, 0, 0, 2, 194, 1, 0, 0, 0, 2, 196, 1, 0, 0, 0, 2, 198, 1, 0, 0, 0, 2, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 218, 1, 0, 0, 0, 3, 220, 1, 0, 0, 0, 3, 222, 1, 0, 0, 0, 3, 224, 1, 0, 0, 0, 3, 226, 1, 0, 0, 0, 3, 228, 1, 0, 0, 0, 4, 230, 1, 0, 0, 0, 4, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 4, 236, 1, 0, 0, 0, 4, 238, 1, 0, 0, 0, 4, 244, 1, 0, 0, 0, 4, 246, 1, 0, 0, 0, 4, 248, 1, 0, 0, 0, 4, 250, 1, 0, 0, 0, 5, 252, 1, 0, 0, 0, 5, 254, 1, 0, 0, 0, 5, 256, 1, 0, 0, 0, 5, 258, 1, 0, 0, 0, 5, 260, 1, 0, 0, 0, 5, 262, 1, 0, 0, 0, 5, 264, 1, 0, 0, 0, 5, 266, 1, 0, 0, 0, 5, 268, 1, 0, 0, 0, 5, 270, 1, 0, 0, 0, 5, 272, 1, 0, 0, 0, 6, 274, 1, 0, 0, 0, 6, 276, 1, 0, 0, 0, 6, 278, 1, 0, 0, 0, 6, 280, 1, 0, 0, 0, 6, 284, 1, 0, 0, 0, 6, 286, 1, 0, 0, 0, 6, 288, 1, 0, 0, 0, 6, 290, 1, 0, 0, 0, 6, 292, 1, 0, 0, 0, 7, 294, 1, 0, 0, 0, 7, 296, 1, 0, 0, 0, 7, 298, 1, 0, 0, 0, 7, 300, 1, 0, 0, 0, 7, 302, 1, 0, 0, 0, 7, 304, 1, 0, 0, 0, 7, 306, 1, 0, 0, 0, 7, 308, 1, 0, 0, 0, 7, 310, 1, 0, 0, 0, 7, 312, 1, 0, 0, 0, 7, 314, 1, 0, 0, 0, 7, 316, 1, 0, 0, 0, 8, 318, 1, 0, 0, 0, 8, 320, 1, 0, 0, 0, 8, 322, 1, 0, 0, 0, 8, 324, 1, 0, 0, 0, 8, 326, 1, 0, 0, 0, 8, 328, 1, 0, 0, 0, 8, 330, 1, 0, 0, 0, 8, 332, 1, 0, 0, 0, 8, 334, 1, 0, 0, 0, 9, 336, 1, 0, 0, 0, 9, 338, 1, 0, 0, 0, 9, 340, 1, 0, 0, 0, 9, 342, 1, 0, 0, 0, 9, 344, 1, 0, 0, 0, 10, 346, 1, 0, 0, 0, 10, 348, 1, 0, 0, 0, 10, 350, 1, 0, 0, 0, 10, 352, 1, 0, 0, 0, 10, 354, 1, 0, 0, 0, 10, 356, 1, 0, 0, 0, 11, 358, 1, 0, 0, 0, 11, 360, 1, 0, 0, 0, 11, 362, 1, 0, 0, 0, 11, 364, 1, 0, 0, 0, 11, 366, 1, 0, 0, 0, 11, 368, 1, 0, 0, 0, 11, 370, 1, 0, 0, 0, 11, 372, 1, 0, 0, 0, 11, 374, 1, 0, 0, 0, 11, 376, 1, 0, 0, 0, 12, 378, 1, 0, 0, 0, 12, 380, 1, 0, 0, 0, 12, 382, 1, 0, 0, 0, 12, 384, 1, 0, 0, 0, 12, 386, 1, 0, 0, 0, 12, 388, 1, 0, 0, 0, 12, 390, 1, 0, 0, 0, 13, 392, 1, 0, 0, 0, 13, 394, 1, 0, 0, 0, 13, 396, 1, 0, 0, 0, 13, 398, 1, 0, 0, 0, 13, 400, 1, 0, 0, 0, 13, 402, 1, 0, 0, 0, 13, 404, 1, 0, 0, 0, 13, 406, 1, 0, 0, 0, 13, 408, 1, 0, 0, 0, 13, 410, 1, 0, 0, 0, 14, 412, 1, 0, 0, 0, 14, 414, 1, 0, 0, 0, 14, 416, 1, 0, 0, 0, 14, 418, 1, 0, 0, 0, 14, 420, 1, 0, 0, 0, 14, 422, 1, 0, 0, 0, 15, 424, 1, 0, 0, 0, 15, 426, 1, 0, 0, 0, 15, 428, 1, 0, 0, 0, 15, 430, 1, 0, 0, 0, 15, 432, 1, 0, 0, 0, 15, 434, 1, 0, 0, 0, 15, 436, 1, 0, 0, 0, 15, 438, 1, 0, 0, 0, 15, 440, 1, 0, 0, 0, 16, 442, 1, 0, 0, 0, 18, 452, 1, 0, 0, 0, 20, 459, 1, 0, 0, 0, 22, 468, 1, 0, 0, 0, 24, 475, 1, 0, 0, 0, 26, 485, 1, 0, 0, 0, 28, 492, 1, 0, 0, 0, 30, 499, 1, 0, 0, 0, 32, 506, 1, 0, 0, 0, 34, 514, 1, 0, 0, 0, 36, 526, 1, 0, 0, 0, 38, 535, 1, 0, 0, 0, 40, 541, 1, 0, 0, 0, 42, 548, 1, 0, 0, 0, 44, 555, 1, 0, 0, 0, 46, 563, 1, 0, 0, 0, 48, 571, 1, 0, 0, 0, 50, 586, 1, 0, 0, 0, 52, 598, 1, 0, 0, 0, 54, 609, 1, 0, 0, 0, 56, 617, 1, 0, 0, 0, 58, 625, 1, 0, 0, 0, 60, 633, 1, 0, 0, 0, 62, 642, 1, 0, 0, 0, 64, 653, 1, 0, 0, 0, 66, 659, 1, 0, 0, 0, 68, 676, 1, 0, 0, 0, 70, 692, 1, 0, 0, 0, 72, 698, 1, 0, 0, 0, 74, 702, 1, 0, 0, 0, 76, 704, 1, 0, 0, 0, 78, 706, 1, 0, 0, 0, 80, 709, 1, 0, 0, 0, 82, 711, 1, 0, 0, 0, 84, 720, 1, 0, 0, 0, 86, 722, 1, 0, 0, 0, 88, 727, 1, 0, 0, 0, 90, 729, 1, 0, 0, 0, 92, 734, 1, 0, 0, 0, 94, 765, 1, 0, 0, 0, 96, 768, 1, 0, 0, 0, 98, 814, 1, 0, 0, 0, 100, 816, 1, 0, 0, 0, 102, 819, 1, 0, 0, 0, 104, 823, 1, 0, 0, 0, 106, 827, 1, 0, 0, 0, 108, 829, 1, 0, 0, 0, 110, 832, 1, 0, 0, 0, 112, 834, 1, 0, 0, 0, 114, 836, 1, 0, 0, 0, 116, 841, 1, 0, 0, 0, 118, 843, 1, 0, 0, 0, 120, 849, 1, 0, 0, 0, 122, 855, 1, 0, 0, 0, 124, 858, 1, 0, 0, 0, 126, 861, 1, 0, 0, 0, 128, 866, 1, 0, 0, 0, 130, 871, 1, 0, 0, 0, 132, 873, 1, 0, 0, 0, 134, 877, 1, 0, 0, 0, 136, 882, 1, 0, 0, 0, 138, 888, 1, 0, 0, 0, 140, 891, 1, 0, 0, 0, 142, 893, 1, 0, 0, 0, 144, 899, 1, 0, 0, 0, 146, 901, 1, 0, 0, 0, 148, 906, 1, 0, 0, 0, 150, 909, 1, 0, 0, 0, 152, 912, 1, 0, 0, 0, 154, 915, 1, 0, 0, 0, 156, 917, 1, 0, 0, 0, 158, 920, 1, 0, 0, 0, 160, 922, 1, 0, 0, 0, 162, 925, 1, 0, 0, 0, 164, 927, 1, 0, 0, 0, 166, 929, 1, 0, 0, 0, 168, 931, 1, 0, 0, 0, 170, 933, 1, 0, 0, 0, 172, 935, 1, 0, 0, 0, 174, 956, 1, 0, 0, 0, 176, 958, 1, 0, 0, 0, 178, 963, 1, 0, 0, 0, 180, 984, 1, 0, 0, 0, 182, 986, 1, 0, 0, 0, 184, 994, 1, 0, 0, 0, 186, 996, 1, 0, 0, 0, 188, 1000, 1, 0, 0, 0, 190, 1004, 1, 0, 0, 0, 192, 1008, 1, 0, 0, 0, 194, 1013, 1, 0, 0, 0, 196, 1018, 1, 0, 0, 0, 198, 1022, 1, 0, 0, 0, 200, 1026, 1, 0, 0, 0, 202, 1030, 1, 0, 0, 0, 204, 1035, 1, 0, 0, 0, 206, 1039, 1, 0, 0, 0, 208, 1043, 1, 0, 0, 0, 210, 1047, 1, 0, 0, 0, 212, 1051, 1, 0, 0, 0, 214, 1055, 1, 0, 0, 0, 216, 1067, 1, 0, 0, 0, 218, 1070, 1, 0, 0, 0, 220, 1074, 1, 0, 0, 0, 222, 1078, 1, 0, 0, 0, 224, 1082, 1, 0, 0, 0, 226, 1086, 1, 0, 0, 0, 228, 1090, 1, 0, 0, 0, 230, 1094, 1, 0, 0, 0, 232, 1099, 1, 0, 0, 0, 234, 1103, 1, 0, 0, 0, 236, 1107, 1, 0, 0, 0, 238, 1112, 1, 0, 0, 0, 240, 1121, 1, 0, 0, 0, 242, 1142, 1, 0, 0, 0, 244, 1146, 1, 0, 0, 0, 246, 1150, 1, 0, 0, 0, 248, 1154, 1, 0, 0, 0, 250, 1158, 1, 0, 0, 0, 252, 1162, 1, 0, 0, 0, 254, 1167, 1, 0, 0, 0, 256, 1171, 1, 0, 0, 0, 258, 1175, 1, 0, 0, 0, 260, 1179, 1, 0, 0, 0, 262, 1184, 1, 0, 0, 0, 264, 1189, 1, 0, 0, 0, 266, 1192, 1, 0, 0, 0, 268, 1196, 1, 0, 0, 0, 270, 1200, 1, 0, 0, 0, 272, 1204, 1, 0, 0, 0, 274, 1208, 1, 0, 0, 0, 276, 1213, 1, 0, 0, 0, 278, 1218, 1, 0, 0, 0, 280, 1223, 1, 0, 0, 0, 282, 1230, 1, 0, 0, 0, 284, 1239, 1, 0, 0, 0, 286, 1246, 1, 0, 0, 0, 288, 1250, 1, 0, 0, 0, 290, 1254, 1, 0, 0, 0, 292, 1258, 1, 0, 0, 0, 294, 1262, 1, 0, 0, 0, 296, 1268, 1, 0, 0, 0, 298, 1272, 1, 0, 0, 0, 300, 1276, 1, 0, 0, 0, 302, 1280, 1, 0, 0, 0, 304, 1284, 1, 0, 0, 0, 306, 1288, 1, 0, 0, 0, 308, 1292, 1, 0, 0, 0, 310, 1297, 1, 0, 0, 0, 312, 1302, 1, 0, 0, 0, 314, 1306, 1, 0, 0, 0, 316, 1310, 1, 0, 0, 0, 318, 1314, 1, 0, 0, 0, 320, 1319, 1, 0, 0, 0, 322, 1323, 1, 0, 0, 0, 324, 1328, 1, 0, 0, 0, 326, 1333, 1, 0, 0, 0, 328, 1337, 1, 0, 0, 0, 330, 1341, 1, 0, 0, 0, 332, 1345, 1, 0, 0, 0, 334, 1349, 1, 0, 0, 0, 336, 1353, 1, 0, 0, 0, 338, 1358, 1, 0, 0, 0, 340, 1363, 1, 0, 0, 0, 342, 1367, 1, 0, 0, 0, 344, 1371, 1, 0, 0, 0, 346, 1375, 1, 0, 0, 0, 348, 1380, 1, 0, 0, 0, 350, 1389, 1, 0, 0, 0, 352, 1393, 1, 0, 0, 0, 354, 1397, 1, 0, 0, 0, 356, 1401, 1, 0, 0, 0, 358, 1405, 1, 0, 0, 0, 360, 1410, 1, 0, 0, 0, 362, 1414, 1, 0, 0, 0, 364, 1418, 1, 0, 0, 0, 366, 1422, 1, 0, 0, 0, 368, 1427, 1, 0, 0, 0, 370, 1431, 1, 0, 0, 0, 372, 1435, 1, 0, 0, 0, 374, 1439, 1, 0, 0, 0, 376, 1443, 1, 0, 0, 0, 378, 1447, 1, 0, 0, 0, 380, 1453, 1, 0, 0, 0, 382, 1457, 1, 0, 0, 0, 384, 1461, 1, 0, 0, 0, 386, 1465, 1, 0, 0, 0, 388, 1469, 1, 0, 0, 0, 390, 1473, 1, 0, 0, 0, 392, 1477, 1, 0, 0, 0, 394, 1482, 1, 0, 0, 0, 396, 1486, 1, 0, 0, 0, 398, 1490, 1, 0, 0, 0, 400, 1496, 1, 0, 0, 0, 402, 1505, 1, 0, 0, 0, 404, 1509, 1, 0, 0, 0, 406, 1513, 1, 0, 0, 0, 408, 1517, 1, 0, 0, 0, 410, 1521, 1, 0, 0, 0, 412, 1525, 1, 0, 0, 0, 414, 1530, 1, 0, 0, 0, 416, 1536, 1, 0, 0, 0, 418, 1542, 1, 0, 0, 0, 420, 1546, 1, 0, 0, 0, 422, 1550, 1, 0, 0, 0, 424, 1554, 1, 0, 0, 0, 426, 1560, 1, 0, 0, 0, 428, 1566, 1, 0, 0, 0, 430, 1570, 1, 0, 0, 0, 432, 1574, 1, 0, 0, 0, 434, 1578, 1, 0, 0, 0, 436, 1584, 1, 0, 0, 0, 438, 1590, 1, 0, 0, 0, 440, 1596, 1, 0, 0, 0, 442, 443, 7, 0, 0, 0, 443, 444, 7, 1, 0, 0, 444, 445, 7, 2, 0, 0, 445, 446, 7, 2, 0, 0, 446, 447, 7, 3, 0, 0, 447, 448, 7, 4, 0, 0, 448, 449, 7, 5, 0, 0, 449, 450, 1, 0, 0, 0, 450, 451, 6, 0, 0, 0, 451, 17, 1, 0, 0, 0, 452, 453, 7, 0, 0, 0, 453, 454, 7, 6, 0, 0, 454, 455, 7, 7, 0, 0, 455, 456, 7, 8, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 6, 1, 1, 0, 458, 19, 1, 0, 0, 0, 459, 460, 7, 3, 0, 0, 460, 461, 7, 9, 0, 0, 461, 462, 7, 6, 0, 0, 462, 463, 7, 1, 0, 0, 463, 464, 7, 4, 0, 0, 464, 465, 7, 10, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 6, 2, 2, 0, 467, 21, 1, 0, 0, 0, 468, 469, 7, 3, 0, 0, 469, 470, 7, 11, 0, 0, 470, 471, 7, 12, 0, 0, 471, 472, 7, 13, 0, 0, 472, 473, 1, 0, 0, 0, 473, 474, 6, 3, 0, 0, 474, 23, 1, 0, 0, 0, 475, 476, 7, 3, 0, 0, 476, 477, 7, 14, 0, 0, 477, 478, 7, 8, 0, 0, 478, 479, 7, 13, 0, 0, 479, 480, 7, 12, 0, 0, 480, 481, 7, 1, 0, 0, 481, 482, 7, 9, 0, 0, 482, 483, 1, 0, 0, 0, 483, 484, 6, 4, 3, 0, 484, 25, 1, 0, 0, 0, 485, 486, 7, 15, 0, 0, 486, 487, 7, 6, 0, 0, 487, 488, 7, 7, 0, 0, 488, 489, 7, 16, 0, 0, 489, 490, 1, 0, 0, 0, 490, 491, 6, 5, 4, 0, 491, 27, 1, 0, 0, 0, 492, 493, 7, 17, 0, 0, 493, 494, 7, 6, 0, 0, 494, 495, 7, 7, 0, 0, 495, 496, 7, 18, 0, 0, 496, 497, 1, 0, 0, 0, 497, 498, 6, 6, 0, 0, 498, 29, 1, 0, 0, 0, 499, 500, 7, 18, 0, 0, 500, 501, 7, 3, 0, 0, 501, 502, 7, 3, 0, 0, 502, 503, 7, 8, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 7, 1, 0, 505, 31, 1, 0, 0, 0, 506, 507, 7, 13, 0, 0, 507, 508, 7, 1, 0, 0, 508, 509, 7, 16, 0, 0, 509, 510, 7, 1, 0, 0, 510, 511, 7, 5, 0, 0, 511, 512, 1, 0, 0, 0, 512, 513, 6, 8, 0, 0, 513, 33, 1, 0, 0, 0, 514, 515, 7, 16, 0, 0, 515, 516, 7, 11, 0, 0, 516, 517, 5, 95, 0, 0, 517, 518, 7, 3, 0, 0, 518, 519, 7, 14, 0, 0, 519, 520, 7, 8, 0, 0, 520, 521, 7, 12, 0, 0, 521, 522, 7, 9, 0, 0, 522, 523, 7, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 9, 5, 0, 525, 35, 1, 0, 0, 0, 526, 527, 7, 6, 0, 0, 527, 528, 7, 3, 0, 0, 528, 529, 7, 9, 0, 0, 529, 530, 7, 12, 0, 0, 530, 531, 7, 16, 0, 0, 531, 532, 7, 3, 0, 0, 532, 533, 1, 0, 0, 0, 533, 534, 6, 10, 6, 0, 534, 37, 1, 0, 0, 0, 535, 536, 7, 6, 0, 0, 536, 537, 7, 7, 0, 0, 537, 538, 7, 19, 0, 0, 538, 539, 1, 0, 0, 0, 539, 540, 6, 11, 0, 0, 540, 39, 1, 0, 0, 0, 541, 542, 7, 2, 0, 0, 542, 543, 7, 10, 0, 0, 543, 544, 7, 7, 0, 0, 544, 545, 7, 19, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 6, 12, 7, 0, 547, 41, 1, 0, 0, 0, 548, 549, 7, 2, 0, 0, 549, 550, 7, 7, 0, 0, 550, 551, 7, 6, 0, 0, 551, 552, 7, 5, 0, 0, 552, 553, 1, 0, 0, 0, 553, 554, 6, 13, 0, 0, 554, 43, 1, 0, 0, 0, 555, 556, 7, 2, 0, 0, 556, 557, 7, 5, 0, 0, 557, 558, 7, 12, 0, 0, 558, 559, 7, 5, 0, 0, 559, 560, 7, 2, 0, 0, 560, 561, 1, 0, 0, 0, 561, 562, 6, 14, 0, 0, 562, 45, 1, 0, 0, 0, 563, 564, 7, 19, 0, 0, 564, 565, 7, 10, 0, 0, 565, 566, 7, 3, 0, 0, 566, 567, 7, 6, 0, 0, 567, 568, 7, 3, 0, 0, 568, 569, 1, 0, 0, 0, 569, 570, 6, 15, 0, 0, 570, 47, 1, 0, 0, 0, 571, 572, 4, 16, 0, 0, 572, 573, 7, 1, 0, 0, 573, 574, 7, 9, 0, 0, 574, 575, 7, 13, 0, 0, 575, 576, 7, 1, 0, 0, 576, 577, 7, 9, 0, 0, 577, 578, 7, 3, 0, 0, 578, 579, 7, 2, 0, 0, 579, 580, 7, 5, 0, 0, 580, 581, 7, 12, 0, 0, 581, 582, 7, 5, 0, 0, 582, 583, 7, 2, 0, 0, 583, 584, 1, 0, 0, 0, 584, 585, 6, 16, 0, 0, 585, 49, 1, 0, 0, 0, 586, 587, 4, 17, 1, 0, 587, 588, 7, 13, 0, 0, 588, 589, 7, 7, 0, 0, 589, 590, 7, 7, 0, 0, 590, 591, 7, 18, 0, 0, 591, 592, 7, 20, 0, 0, 592, 593, 7, 8, 0, 0, 593, 594, 5, 95, 0, 0, 594, 595, 5, 128020, 0, 0, 595, 596, 1, 0, 0, 0, 596, 597, 6, 17, 8, 0, 597, 51, 1, 0, 0, 0, 598, 599, 4, 18, 2, 0, 599, 600, 7, 16, 0, 0, 600, 601, 7, 3, 0, 0, 601, 602, 7, 5, 0, 0, 602, 603, 7, 6, 0, 0, 603, 604, 7, 1, 0, 0, 604, 605, 7, 4, 0, 0, 605, 606, 7, 2, 0, 0, 606, 607, 1, 0, 0, 0, 607, 608, 6, 18, 9, 0, 608, 53, 1, 0, 0, 0, 609, 610, 4, 19, 3, 0, 610, 611, 7, 21, 0, 0, 611, 612, 7, 7, 0, 0, 612, 613, 7, 1, 0, 0, 613, 614, 7, 9, 0, 0, 614, 615, 1, 0, 0, 0, 615, 616, 6, 19, 10, 0, 616, 55, 1, 0, 0, 0, 617, 618, 4, 20, 4, 0, 618, 619, 7, 15, 0, 0, 619, 620, 7, 20, 0, 0, 620, 621, 7, 13, 0, 0, 621, 622, 7, 13, 0, 0, 622, 623, 1, 0, 0, 0, 623, 624, 6, 20, 10, 0, 624, 57, 1, 0, 0, 0, 625, 626, 4, 21, 5, 0, 626, 627, 7, 13, 0, 0, 627, 628, 7, 3, 0, 0, 628, 629, 7, 15, 0, 0, 629, 630, 7, 5, 0, 0, 630, 631, 1, 0, 0, 0, 631, 632, 6, 21, 10, 0, 632, 59, 1, 0, 0, 0, 633, 634, 4, 22, 6, 0, 634, 635, 7, 6, 0, 0, 635, 636, 7, 1, 0, 0, 636, 637, 7, 17, 0, 0, 637, 638, 7, 10, 0, 0, 638, 639, 7, 5, 0, 0, 639, 640, 1, 0, 0, 0, 640, 641, 6, 22, 10, 0, 641, 61, 1, 0, 0, 0, 642, 643, 4, 23, 7, 0, 643, 644, 7, 13, 0, 0, 644, 645, 7, 7, 0, 0, 645, 646, 7, 7, 0, 0, 646, 647, 7, 18, 0, 0, 647, 648, 7, 20, 0, 0, 648, 649, 7, 8, 0, 0, 649, 650, 1, 0, 0, 0, 650, 651, 6, 23, 10, 0, 651, 63, 1, 0, 0, 0, 652, 654, 8, 22, 0, 0, 653, 652, 1, 0, 0, 0, 654, 655, 1, 0, 0, 0, 655, 653, 1, 0, 0, 0, 655, 656, 1, 0, 0, 0, 656, 657, 1, 0, 0, 0, 657, 658, 6, 24, 0, 0, 658, 65, 1, 0, 0, 0, 659, 660, 5, 47, 0, 0, 660, 661, 5, 47, 0, 0, 661, 665, 1, 0, 0, 0, 662, 664, 8, 23, 0, 0, 663, 662, 1, 0, 0, 0, 664, 667, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 669, 1, 0, 0, 0, 667, 665, 1, 0, 0, 0, 668, 670, 5, 13, 0, 0, 669, 668, 1, 0, 0, 0, 669, 670, 1, 0, 0, 0, 670, 672, 1, 0, 0, 0, 671, 673, 5, 10, 0, 0, 672, 671, 1, 0, 0, 0, 672, 673, 1, 0, 0, 0, 673, 674, 1, 0, 0, 0, 674, 675, 6, 25, 11, 0, 675, 67, 1, 0, 0, 0, 676, 677, 5, 47, 0, 0, 677, 678, 5, 42, 0, 0, 678, 683, 1, 0, 0, 0, 679, 682, 3, 68, 26, 0, 680, 682, 9, 0, 0, 0, 681, 679, 1, 0, 0, 0, 681, 680, 1, 0, 0, 0, 682, 685, 1, 0, 0, 0, 683, 684, 1, 0, 0, 0, 683, 681, 1, 0, 0, 0, 684, 686, 1, 0, 0, 0, 685, 683, 1, 0, 0, 0, 686, 687, 5, 42, 0, 0, 687, 688, 5, 47, 0, 0, 688, 689, 1, 0, 0, 0, 689, 690, 6, 26, 11, 0, 690, 69, 1, 0, 0, 0, 691, 693, 7, 24, 0, 0, 692, 691, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 694, 692, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 696, 1, 0, 0, 0, 696, 697, 6, 27, 11, 0, 697, 71, 1, 0, 0, 0, 698, 699, 5, 124, 0, 0, 699, 700, 1, 0, 0, 0, 700, 701, 6, 28, 12, 0, 701, 73, 1, 0, 0, 0, 702, 703, 7, 25, 0, 0, 703, 75, 1, 0, 0, 0, 704, 705, 7, 26, 0, 0, 705, 77, 1, 0, 0, 0, 706, 707, 5, 92, 0, 0, 707, 708, 7, 27, 0, 0, 708, 79, 1, 0, 0, 0, 709, 710, 8, 28, 0, 0, 710, 81, 1, 0, 0, 0, 711, 713, 7, 3, 0, 0, 712, 714, 7, 29, 0, 0, 713, 712, 1, 0, 0, 0, 713, 714, 1, 0, 0, 0, 714, 716, 1, 0, 0, 0, 715, 717, 3, 74, 29, 0, 716, 715, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 716, 1, 0, 0, 0, 718, 719, 1, 0, 0, 0, 719, 83, 1, 0, 0, 0, 720, 721, 5, 64, 0, 0, 721, 85, 1, 0, 0, 0, 722, 723, 5, 96, 0, 0, 723, 87, 1, 0, 0, 0, 724, 728, 8, 30, 0, 0, 725, 726, 5, 96, 0, 0, 726, 728, 5, 96, 0, 0, 727, 724, 1, 0, 0, 0, 727, 725, 1, 0, 0, 0, 728, 89, 1, 0, 0, 0, 729, 730, 5, 95, 0, 0, 730, 91, 1, 0, 0, 0, 731, 735, 3, 76, 30, 0, 732, 735, 3, 74, 29, 0, 733, 735, 3, 90, 37, 0, 734, 731, 1, 0, 0, 0, 734, 732, 1, 0, 0, 0, 734, 733, 1, 0, 0, 0, 735, 93, 1, 0, 0, 0, 736, 741, 5, 34, 0, 0, 737, 740, 3, 78, 31, 0, 738, 740, 3, 80, 32, 0, 739, 737, 1, 0, 0, 0, 739, 738, 1, 0, 0, 0, 740, 743, 1, 0, 0, 0, 741, 739, 1, 0, 0, 0, 741, 742, 1, 0, 0, 0, 742, 744, 1, 0, 0, 0, 743, 741, 1, 0, 0, 0, 744, 766, 5, 34, 0, 0, 745, 746, 5, 34, 0, 0, 746, 747, 5, 34, 0, 0, 747, 748, 5, 34, 0, 0, 748, 752, 1, 0, 0, 0, 749, 751, 8, 23, 0, 0, 750, 749, 1, 0, 0, 0, 751, 754, 1, 0, 0, 0, 752, 753, 1, 0, 0, 0, 752, 750, 1, 0, 0, 0, 753, 755, 1, 0, 0, 0, 754, 752, 1, 0, 0, 0, 755, 756, 5, 34, 0, 0, 756, 757, 5, 34, 0, 0, 757, 758, 5, 34, 0, 0, 758, 760, 1, 0, 0, 0, 759, 761, 5, 34, 0, 0, 760, 759, 1, 0, 0, 0, 760, 761, 1, 0, 0, 0, 761, 763, 1, 0, 0, 0, 762, 764, 5, 34, 0, 0, 763, 762, 1, 0, 0, 0, 763, 764, 1, 0, 0, 0, 764, 766, 1, 0, 0, 0, 765, 736, 1, 0, 0, 0, 765, 745, 1, 0, 0, 0, 766, 95, 1, 0, 0, 0, 767, 769, 3, 74, 29, 0, 768, 767, 1, 0, 0, 0, 769, 770, 1, 0, 0, 0, 770, 768, 1, 0, 0, 0, 770, 771, 1, 0, 0, 0, 771, 97, 1, 0, 0, 0, 772, 774, 3, 74, 29, 0, 773, 772, 1, 0, 0, 0, 774, 775, 1, 0, 0, 0, 775, 773, 1, 0, 0, 0, 775, 776, 1, 0, 0, 0, 776, 777, 1, 0, 0, 0, 777, 781, 3, 116, 50, 0, 778, 780, 3, 74, 29, 0, 779, 778, 1, 0, 0, 0, 780, 783, 1, 0, 0, 0, 781, 779, 1, 0, 0, 0, 781, 782, 1, 0, 0, 0, 782, 815, 1, 0, 0, 0, 783, 781, 1, 0, 0, 0, 784, 786, 3, 116, 50, 0, 785, 787, 3, 74, 29, 0, 786, 785, 1, 0, 0, 0, 787, 788, 1, 0, 0, 0, 788, 786, 1, 0, 0, 0, 788, 789, 1, 0, 0, 0, 789, 815, 1, 0, 0, 0, 790, 792, 3, 74, 29, 0, 791, 790, 1, 0, 0, 0, 792, 793, 1, 0, 0, 0, 793, 791, 1, 0, 0, 0, 793, 794, 1, 0, 0, 0, 794, 802, 1, 0, 0, 0, 795, 799, 3, 116, 50, 0, 796, 798, 3, 74, 29, 0, 797, 796, 1, 0, 0, 0, 798, 801, 1, 0, 0, 0, 799, 797, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 803, 1, 0, 0, 0, 801, 799, 1, 0, 0, 0, 802, 795, 1, 0, 0, 0, 802, 803, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 805, 3, 82, 33, 0, 805, 815, 1, 0, 0, 0, 806, 808, 3, 116, 50, 0, 807, 809, 3, 74, 29, 0, 808, 807, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 808, 1, 0, 0, 0, 810, 811, 1, 0, 0, 0, 811, 812, 1, 0, 0, 0, 812, 813, 3, 82, 33, 0, 813, 815, 1, 0, 0, 0, 814, 773, 1, 0, 0, 0, 814, 784, 1, 0, 0, 0, 814, 791, 1, 0, 0, 0, 814, 806, 1, 0, 0, 0, 815, 99, 1, 0, 0, 0, 816, 817, 7, 31, 0, 0, 817, 818, 7, 32, 0, 0, 818, 101, 1, 0, 0, 0, 819, 820, 7, 12, 0, 0, 820, 821, 7, 9, 0, 0, 821, 822, 7, 0, 0, 0, 822, 103, 1, 0, 0, 0, 823, 824, 7, 12, 0, 0, 824, 825, 7, 2, 0, 0, 825, 826, 7, 4, 0, 0, 826, 105, 1, 0, 0, 0, 827, 828, 5, 61, 0, 0, 828, 107, 1, 0, 0, 0, 829, 830, 5, 58, 0, 0, 830, 831, 5, 58, 0, 0, 831, 109, 1, 0, 0, 0, 832, 833, 5, 58, 0, 0, 833, 111, 1, 0, 0, 0, 834, 835, 5, 44, 0, 0, 835, 113, 1, 0, 0, 0, 836, 837, 7, 0, 0, 0, 837, 838, 7, 3, 0, 0, 838, 839, 7, 2, 0, 0, 839, 840, 7, 4, 0, 0, 840, 115, 1, 0, 0, 0, 841, 842, 5, 46, 0, 0, 842, 117, 1, 0, 0, 0, 843, 844, 7, 15, 0, 0, 844, 845, 7, 12, 0, 0, 845, 846, 7, 13, 0, 0, 846, 847, 7, 2, 0, 0, 847, 848, 7, 3, 0, 0, 848, 119, 1, 0, 0, 0, 849, 850, 7, 15, 0, 0, 850, 851, 7, 1, 0, 0, 851, 852, 7, 6, 0, 0, 852, 853, 7, 2, 0, 0, 853, 854, 7, 5, 0, 0, 854, 121, 1, 0, 0, 0, 855, 856, 7, 1, 0, 0, 856, 857, 7, 9, 0, 0, 857, 123, 1, 0, 0, 0, 858, 859, 7, 1, 0, 0, 859, 860, 7, 2, 0, 0, 860, 125, 1, 0, 0, 0, 861, 862, 7, 13, 0, 0, 862, 863, 7, 12, 0, 0, 863, 864, 7, 2, 0, 0, 864, 865, 7, 5, 0, 0, 865, 127, 1, 0, 0, 0, 866, 867, 7, 13, 0, 0, 867, 868, 7, 1, 0, 0, 868, 869, 7, 18, 0, 0, 869, 870, 7, 3, 0, 0, 870, 129, 1, 0, 0, 0, 871, 872, 5, 40, 0, 0, 872, 131, 1, 0, 0, 0, 873, 874, 7, 9, 0, 0, 874, 875, 7, 7, 0, 0, 875, 876, 7, 5, 0, 0, 876, 133, 1, 0, 0, 0, 877, 878, 7, 9, 0, 0, 878, 879, 7, 20, 0, 0, 879, 880, 7, 13, 0, 0, 880, 881, 7, 13, 0, 0, 881, 135, 1, 0, 0, 0, 882, 883, 7, 9, 0, 0, 883, 884, 7, 20, 0, 0, 884, 885, 7, 13, 0, 0, 885, 886, 7, 13, 0, 0, 886, 887, 7, 2, 0, 0, 887, 137, 1, 0, 0, 0, 888, 889, 7, 7, 0, 0, 889, 890, 7, 6, 0, 0, 890, 139, 1, 0, 0, 0, 891, 892, 5, 63, 0, 0, 892, 141, 1, 0, 0, 0, 893, 894, 7, 6, 0, 0, 894, 895, 7, 13, 0, 0, 895, 896, 7, 1, 0, 0, 896, 897, 7, 18, 0, 0, 897, 898, 7, 3, 0, 0, 898, 143, 1, 0, 0, 0, 899, 900, 5, 41, 0, 0, 900, 145, 1, 0, 0, 0, 901, 902, 7, 5, 0, 0, 902, 903, 7, 6, 0, 0, 903, 904, 7, 20, 0, 0, 904, 905, 7, 3, 0, 0, 905, 147, 1, 0, 0, 0, 906, 907, 5, 61, 0, 0, 907, 908, 5, 61, 0, 0, 908, 149, 1, 0, 0, 0, 909, 910, 5, 61, 0, 0, 910, 911, 5, 126, 0, 0, 911, 151, 1, 0, 0, 0, 912, 913, 5, 33, 0, 0, 913, 914, 5, 61, 0, 0, 914, 153, 1, 0, 0, 0, 915, 916, 5, 60, 0, 0, 916, 155, 1, 0, 0, 0, 917, 918, 5, 60, 0, 0, 918, 919, 5, 61, 0, 0, 919, 157, 1, 0, 0, 0, 920, 921, 5, 62, 0, 0, 921, 159, 1, 0, 0, 0, 922, 923, 5, 62, 0, 0, 923, 924, 5, 61, 0, 0, 924, 161, 1, 0, 0, 0, 925, 926, 5, 43, 0, 0, 926, 163, 1, 0, 0, 0, 927, 928, 5, 45, 0, 0, 928, 165, 1, 0, 0, 0, 929, 930, 5, 42, 0, 0, 930, 167, 1, 0, 0, 0, 931, 932, 5, 47, 0, 0, 932, 169, 1, 0, 0, 0, 933, 934, 5, 37, 0, 0, 934, 171, 1, 0, 0, 0, 935, 936, 3, 46, 15, 0, 936, 937, 1, 0, 0, 0, 937, 938, 6, 78, 13, 0, 938, 173, 1, 0, 0, 0, 939, 942, 3, 140, 62, 0, 940, 943, 3, 76, 30, 0, 941, 943, 3, 90, 37, 0, 942, 940, 1, 0, 0, 0, 942, 941, 1, 0, 0, 0, 943, 947, 1, 0, 0, 0, 944, 946, 3, 92, 38, 0, 945, 944, 1, 0, 0, 0, 946, 949, 1, 0, 0, 0, 947, 945, 1, 0, 0, 0, 947, 948, 1, 0, 0, 0, 948, 957, 1, 0, 0, 0, 949, 947, 1, 0, 0, 0, 950, 952, 3, 140, 62, 0, 951, 953, 3, 74, 29, 0, 952, 951, 1, 0, 0, 0, 953, 954, 1, 0, 0, 0, 954, 952, 1, 0, 0, 0, 954, 955, 1, 0, 0, 0, 955, 957, 1, 0, 0, 0, 956, 939, 1, 0, 0, 0, 956, 950, 1, 0, 0, 0, 957, 175, 1, 0, 0, 0, 958, 959, 5, 91, 0, 0, 959, 960, 1, 0, 0, 0, 960, 961, 6, 80, 0, 0, 961, 962, 6, 80, 0, 0, 962, 177, 1, 0, 0, 0, 963, 964, 5, 93, 0, 0, 964, 965, 1, 0, 0, 0, 965, 966, 6, 81, 12, 0, 966, 967, 6, 81, 12, 0, 967, 179, 1, 0, 0, 0, 968, 972, 3, 76, 30, 0, 969, 971, 3, 92, 38, 0, 970, 969, 1, 0, 0, 0, 971, 974, 1, 0, 0, 0, 972, 970, 1, 0, 0, 0, 972, 973, 1, 0, 0, 0, 973, 985, 1, 0, 0, 0, 974, 972, 1, 0, 0, 0, 975, 978, 3, 90, 37, 0, 976, 978, 3, 84, 34, 0, 977, 975, 1, 0, 0, 0, 977, 976, 1, 0, 0, 0, 978, 980, 1, 0, 0, 0, 979, 981, 3, 92, 38, 0, 980, 979, 1, 0, 0, 0, 981, 982, 1, 0, 0, 0, 982, 980, 1, 0, 0, 0, 982, 983, 1, 0, 0, 0, 983, 985, 1, 0, 0, 0, 984, 968, 1, 0, 0, 0, 984, 977, 1, 0, 0, 0, 985, 181, 1, 0, 0, 0, 986, 988, 3, 86, 35, 0, 987, 989, 3, 88, 36, 0, 988, 987, 1, 0, 0, 0, 989, 990, 1, 0, 0, 0, 990, 988, 1, 0, 0, 0, 990, 991, 1, 0, 0, 0, 991, 992, 1, 0, 0, 0, 992, 993, 3, 86, 35, 0, 993, 183, 1, 0, 0, 0, 994, 995, 3, 182, 83, 0, 995, 185, 1, 0, 0, 0, 996, 997, 3, 66, 25, 0, 997, 998, 1, 0, 0, 0, 998, 999, 6, 85, 11, 0, 999, 187, 1, 0, 0, 0, 1000, 1001, 3, 68, 26, 0, 1001, 1002, 1, 0, 0, 0, 1002, 1003, 6, 86, 11, 0, 1003, 189, 1, 0, 0, 0, 1004, 1005, 3, 70, 27, 0, 1005, 1006, 1, 0, 0, 0, 1006, 1007, 6, 87, 11, 0, 1007, 191, 1, 0, 0, 0, 1008, 1009, 3, 176, 80, 0, 1009, 1010, 1, 0, 0, 0, 1010, 1011, 6, 88, 14, 0, 1011, 1012, 6, 88, 15, 0, 1012, 193, 1, 0, 0, 0, 1013, 1014, 3, 72, 28, 0, 1014, 1015, 1, 0, 0, 0, 1015, 1016, 6, 89, 16, 0, 1016, 1017, 6, 89, 12, 0, 1017, 195, 1, 0, 0, 0, 1018, 1019, 3, 70, 27, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 90, 11, 0, 1021, 197, 1, 0, 0, 0, 1022, 1023, 3, 66, 25, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 91, 11, 0, 1025, 199, 1, 0, 0, 0, 1026, 1027, 3, 68, 26, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1029, 6, 92, 11, 0, 1029, 201, 1, 0, 0, 0, 1030, 1031, 3, 72, 28, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 93, 16, 0, 1033, 1034, 6, 93, 12, 0, 1034, 203, 1, 0, 0, 0, 1035, 1036, 3, 176, 80, 0, 1036, 1037, 1, 0, 0, 0, 1037, 1038, 6, 94, 14, 0, 1038, 205, 1, 0, 0, 0, 1039, 1040, 3, 178, 81, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 95, 17, 0, 1042, 207, 1, 0, 0, 0, 1043, 1044, 3, 110, 47, 0, 1044, 1045, 1, 0, 0, 0, 1045, 1046, 6, 96, 18, 0, 1046, 209, 1, 0, 0, 0, 1047, 1048, 3, 112, 48, 0, 1048, 1049, 1, 0, 0, 0, 1049, 1050, 6, 97, 19, 0, 1050, 211, 1, 0, 0, 0, 1051, 1052, 3, 106, 45, 0, 1052, 1053, 1, 0, 0, 0, 1053, 1054, 6, 98, 20, 0, 1054, 213, 1, 0, 0, 0, 1055, 1056, 7, 16, 0, 0, 1056, 1057, 7, 3, 0, 0, 1057, 1058, 7, 5, 0, 0, 1058, 1059, 7, 12, 0, 0, 1059, 1060, 7, 0, 0, 0, 1060, 1061, 7, 12, 0, 0, 1061, 1062, 7, 5, 0, 0, 1062, 1063, 7, 12, 0, 0, 1063, 215, 1, 0, 0, 0, 1064, 1068, 8, 33, 0, 0, 1065, 1066, 5, 47, 0, 0, 1066, 1068, 8, 34, 0, 0, 1067, 1064, 1, 0, 0, 0, 1067, 1065, 1, 0, 0, 0, 1068, 217, 1, 0, 0, 0, 1069, 1071, 3, 216, 100, 0, 1070, 1069, 1, 0, 0, 0, 1071, 1072, 1, 0, 0, 0, 1072, 1070, 1, 0, 0, 0, 1072, 1073, 1, 0, 0, 0, 1073, 219, 1, 0, 0, 0, 1074, 1075, 3, 218, 101, 0, 1075, 1076, 1, 0, 0, 0, 1076, 1077, 6, 102, 21, 0, 1077, 221, 1, 0, 0, 0, 1078, 1079, 3, 94, 39, 0, 1079, 1080, 1, 0, 0, 0, 1080, 1081, 6, 103, 22, 0, 1081, 223, 1, 0, 0, 0, 1082, 1083, 3, 66, 25, 0, 1083, 1084, 1, 0, 0, 0, 1084, 1085, 6, 104, 11, 0, 1085, 225, 1, 0, 0, 0, 1086, 1087, 3, 68, 26, 0, 1087, 1088, 1, 0, 0, 0, 1088, 1089, 6, 105, 11, 0, 1089, 227, 1, 0, 0, 0, 1090, 1091, 3, 70, 27, 0, 1091, 1092, 1, 0, 0, 0, 1092, 1093, 6, 106, 11, 0, 1093, 229, 1, 0, 0, 0, 1094, 1095, 3, 72, 28, 0, 1095, 1096, 1, 0, 0, 0, 1096, 1097, 6, 107, 16, 0, 1097, 1098, 6, 107, 12, 0, 1098, 231, 1, 0, 0, 0, 1099, 1100, 3, 116, 50, 0, 1100, 1101, 1, 0, 0, 0, 1101, 1102, 6, 108, 23, 0, 1102, 233, 1, 0, 0, 0, 1103, 1104, 3, 112, 48, 0, 1104, 1105, 1, 0, 0, 0, 1105, 1106, 6, 109, 19, 0, 1106, 235, 1, 0, 0, 0, 1107, 1108, 4, 110, 8, 0, 1108, 1109, 3, 140, 62, 0, 1109, 1110, 1, 0, 0, 0, 1110, 1111, 6, 110, 24, 0, 1111, 237, 1, 0, 0, 0, 1112, 1113, 4, 111, 9, 0, 1113, 1114, 3, 174, 79, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 111, 25, 0, 1116, 239, 1, 0, 0, 0, 1117, 1122, 3, 76, 30, 0, 1118, 1122, 3, 74, 29, 0, 1119, 1122, 3, 90, 37, 0, 1120, 1122, 3, 166, 75, 0, 1121, 1117, 1, 0, 0, 0, 1121, 1118, 1, 0, 0, 0, 1121, 1119, 1, 0, 0, 0, 1121, 1120, 1, 0, 0, 0, 1122, 241, 1, 0, 0, 0, 1123, 1126, 3, 76, 30, 0, 1124, 1126, 3, 166, 75, 0, 1125, 1123, 1, 0, 0, 0, 1125, 1124, 1, 0, 0, 0, 1126, 1130, 1, 0, 0, 0, 1127, 1129, 3, 240, 112, 0, 1128, 1127, 1, 0, 0, 0, 1129, 1132, 1, 0, 0, 0, 1130, 1128, 1, 0, 0, 0, 1130, 1131, 1, 0, 0, 0, 1131, 1143, 1, 0, 0, 0, 1132, 1130, 1, 0, 0, 0, 1133, 1136, 3, 90, 37, 0, 1134, 1136, 3, 84, 34, 0, 1135, 1133, 1, 0, 0, 0, 1135, 1134, 1, 0, 0, 0, 1136, 1138, 1, 0, 0, 0, 1137, 1139, 3, 240, 112, 0, 1138, 1137, 1, 0, 0, 0, 1139, 1140, 1, 0, 0, 0, 1140, 1138, 1, 0, 0, 0, 1140, 1141, 1, 0, 0, 0, 1141, 1143, 1, 0, 0, 0, 1142, 1125, 1, 0, 0, 0, 1142, 1135, 1, 0, 0, 0, 1143, 243, 1, 0, 0, 0, 1144, 1147, 3, 242, 113, 0, 1145, 1147, 3, 182, 83, 0, 1146, 1144, 1, 0, 0, 0, 1146, 1145, 1, 0, 0, 0, 1147, 1148, 1, 0, 0, 0, 1148, 1146, 1, 0, 0, 0, 1148, 1149, 1, 0, 0, 0, 1149, 245, 1, 0, 0, 0, 1150, 1151, 3, 66, 25, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 115, 11, 0, 1153, 247, 1, 0, 0, 0, 1154, 1155, 3, 68, 26, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 6, 116, 11, 0, 1157, 249, 1, 0, 0, 0, 1158, 1159, 3, 70, 27, 0, 1159, 1160, 1, 0, 0, 0, 1160, 1161, 6, 117, 11, 0, 1161, 251, 1, 0, 0, 0, 1162, 1163, 3, 72, 28, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1165, 6, 118, 16, 0, 1165, 1166, 6, 118, 12, 0, 1166, 253, 1, 0, 0, 0, 1167, 1168, 3, 106, 45, 0, 1168, 1169, 1, 0, 0, 0, 1169, 1170, 6, 119, 20, 0, 1170, 255, 1, 0, 0, 0, 1171, 1172, 3, 112, 48, 0, 1172, 1173, 1, 0, 0, 0, 1173, 1174, 6, 120, 19, 0, 1174, 257, 1, 0, 0, 0, 1175, 1176, 3, 116, 50, 0, 1176, 1177, 1, 0, 0, 0, 1177, 1178, 6, 121, 23, 0, 1178, 259, 1, 0, 0, 0, 1179, 1180, 4, 122, 10, 0, 1180, 1181, 3, 140, 62, 0, 1181, 1182, 1, 0, 0, 0, 1182, 1183, 6, 122, 24, 0, 1183, 261, 1, 0, 0, 0, 1184, 1185, 4, 123, 11, 0, 1185, 1186, 3, 174, 79, 0, 1186, 1187, 1, 0, 0, 0, 1187, 1188, 6, 123, 25, 0, 1188, 263, 1, 0, 0, 0, 1189, 1190, 7, 12, 0, 0, 1190, 1191, 7, 2, 0, 0, 1191, 265, 1, 0, 0, 0, 1192, 1193, 3, 244, 114, 0, 1193, 1194, 1, 0, 0, 0, 1194, 1195, 6, 125, 26, 0, 1195, 267, 1, 0, 0, 0, 1196, 1197, 3, 66, 25, 0, 1197, 1198, 1, 0, 0, 0, 1198, 1199, 6, 126, 11, 0, 1199, 269, 1, 0, 0, 0, 1200, 1201, 3, 68, 26, 0, 1201, 1202, 1, 0, 0, 0, 1202, 1203, 6, 127, 11, 0, 1203, 271, 1, 0, 0, 0, 1204, 1205, 3, 70, 27, 0, 1205, 1206, 1, 0, 0, 0, 1206, 1207, 6, 128, 11, 0, 1207, 273, 1, 0, 0, 0, 1208, 1209, 3, 72, 28, 0, 1209, 1210, 1, 0, 0, 0, 1210, 1211, 6, 129, 16, 0, 1211, 1212, 6, 129, 12, 0, 1212, 275, 1, 0, 0, 0, 1213, 1214, 3, 176, 80, 0, 1214, 1215, 1, 0, 0, 0, 1215, 1216, 6, 130, 14, 0, 1216, 1217, 6, 130, 27, 0, 1217, 277, 1, 0, 0, 0, 1218, 1219, 7, 7, 0, 0, 1219, 1220, 7, 9, 0, 0, 1220, 1221, 1, 0, 0, 0, 1221, 1222, 6, 131, 28, 0, 1222, 279, 1, 0, 0, 0, 1223, 1224, 7, 19, 0, 0, 1224, 1225, 7, 1, 0, 0, 1225, 1226, 7, 5, 0, 0, 1226, 1227, 7, 10, 0, 0, 1227, 1228, 1, 0, 0, 0, 1228, 1229, 6, 132, 28, 0, 1229, 281, 1, 0, 0, 0, 1230, 1231, 8, 35, 0, 0, 1231, 283, 1, 0, 0, 0, 1232, 1234, 3, 282, 133, 0, 1233, 1232, 1, 0, 0, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1233, 1, 0, 0, 0, 1235, 1236, 1, 0, 0, 0, 1236, 1237, 1, 0, 0, 0, 1237, 1238, 3, 110, 47, 0, 1238, 1240, 1, 0, 0, 0, 1239, 1233, 1, 0, 0, 0, 1239, 1240, 1, 0, 0, 0, 1240, 1242, 1, 0, 0, 0, 1241, 1243, 3, 282, 133, 0, 1242, 1241, 1, 0, 0, 0, 1243, 1244, 1, 0, 0, 0, 1244, 1242, 1, 0, 0, 0, 1244, 1245, 1, 0, 0, 0, 1245, 285, 1, 0, 0, 0, 1246, 1247, 3, 284, 134, 0, 1247, 1248, 1, 0, 0, 0, 1248, 1249, 6, 135, 29, 0, 1249, 287, 1, 0, 0, 0, 1250, 1251, 3, 66, 25, 0, 1251, 1252, 1, 0, 0, 0, 1252, 1253, 6, 136, 11, 0, 1253, 289, 1, 0, 0, 0, 1254, 1255, 3, 68, 26, 0, 1255, 1256, 1, 0, 0, 0, 1256, 1257, 6, 137, 11, 0, 1257, 291, 1, 0, 0, 0, 1258, 1259, 3, 70, 27, 0, 1259, 1260, 1, 0, 0, 0, 1260, 1261, 6, 138, 11, 0, 1261, 293, 1, 0, 0, 0, 1262, 1263, 3, 72, 28, 0, 1263, 1264, 1, 0, 0, 0, 1264, 1265, 6, 139, 16, 0, 1265, 1266, 6, 139, 12, 0, 1266, 1267, 6, 139, 12, 0, 1267, 295, 1, 0, 0, 0, 1268, 1269, 3, 106, 45, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 140, 20, 0, 1271, 297, 1, 0, 0, 0, 1272, 1273, 3, 112, 48, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 6, 141, 19, 0, 1275, 299, 1, 0, 0, 0, 1276, 1277, 3, 116, 50, 0, 1277, 1278, 1, 0, 0, 0, 1278, 1279, 6, 142, 23, 0, 1279, 301, 1, 0, 0, 0, 1280, 1281, 3, 280, 132, 0, 1281, 1282, 1, 0, 0, 0, 1282, 1283, 6, 143, 30, 0, 1283, 303, 1, 0, 0, 0, 1284, 1285, 3, 244, 114, 0, 1285, 1286, 1, 0, 0, 0, 1286, 1287, 6, 144, 26, 0, 1287, 305, 1, 0, 0, 0, 1288, 1289, 3, 184, 84, 0, 1289, 1290, 1, 0, 0, 0, 1290, 1291, 6, 145, 31, 0, 1291, 307, 1, 0, 0, 0, 1292, 1293, 4, 146, 12, 0, 1293, 1294, 3, 140, 62, 0, 1294, 1295, 1, 0, 0, 0, 1295, 1296, 6, 146, 24, 0, 1296, 309, 1, 0, 0, 0, 1297, 1298, 4, 147, 13, 0, 1298, 1299, 3, 174, 79, 0, 1299, 1300, 1, 0, 0, 0, 1300, 1301, 6, 147, 25, 0, 1301, 311, 1, 0, 0, 0, 1302, 1303, 3, 66, 25, 0, 1303, 1304, 1, 0, 0, 0, 1304, 1305, 6, 148, 11, 0, 1305, 313, 1, 0, 0, 0, 1306, 1307, 3, 68, 26, 0, 1307, 1308, 1, 0, 0, 0, 1308, 1309, 6, 149, 11, 0, 1309, 315, 1, 0, 0, 0, 1310, 1311, 3, 70, 27, 0, 1311, 1312, 1, 0, 0, 0, 1312, 1313, 6, 150, 11, 0, 1313, 317, 1, 0, 0, 0, 1314, 1315, 3, 72, 28, 0, 1315, 1316, 1, 0, 0, 0, 1316, 1317, 6, 151, 16, 0, 1317, 1318, 6, 151, 12, 0, 1318, 319, 1, 0, 0, 0, 1319, 1320, 3, 116, 50, 0, 1320, 1321, 1, 0, 0, 0, 1321, 1322, 6, 152, 23, 0, 1322, 321, 1, 0, 0, 0, 1323, 1324, 4, 153, 14, 0, 1324, 1325, 3, 140, 62, 0, 1325, 1326, 1, 0, 0, 0, 1326, 1327, 6, 153, 24, 0, 1327, 323, 1, 0, 0, 0, 1328, 1329, 4, 154, 15, 0, 1329, 1330, 3, 174, 79, 0, 1330, 1331, 1, 0, 0, 0, 1331, 1332, 6, 154, 25, 0, 1332, 325, 1, 0, 0, 0, 1333, 1334, 3, 184, 84, 0, 1334, 1335, 1, 0, 0, 0, 1335, 1336, 6, 155, 31, 0, 1336, 327, 1, 0, 0, 0, 1337, 1338, 3, 180, 82, 0, 1338, 1339, 1, 0, 0, 0, 1339, 1340, 6, 156, 32, 0, 1340, 329, 1, 0, 0, 0, 1341, 1342, 3, 66, 25, 0, 1342, 1343, 1, 0, 0, 0, 1343, 1344, 6, 157, 11, 0, 1344, 331, 1, 0, 0, 0, 1345, 1346, 3, 68, 26, 0, 1346, 1347, 1, 0, 0, 0, 1347, 1348, 6, 158, 11, 0, 1348, 333, 1, 0, 0, 0, 1349, 1350, 3, 70, 27, 0, 1350, 1351, 1, 0, 0, 0, 1351, 1352, 6, 159, 11, 0, 1352, 335, 1, 0, 0, 0, 1353, 1354, 3, 72, 28, 0, 1354, 1355, 1, 0, 0, 0, 1355, 1356, 6, 160, 16, 0, 1356, 1357, 6, 160, 12, 0, 1357, 337, 1, 0, 0, 0, 1358, 1359, 7, 1, 0, 0, 1359, 1360, 7, 9, 0, 0, 1360, 1361, 7, 15, 0, 0, 1361, 1362, 7, 7, 0, 0, 1362, 339, 1, 0, 0, 0, 1363, 1364, 3, 66, 25, 0, 1364, 1365, 1, 0, 0, 0, 1365, 1366, 6, 162, 11, 0, 1366, 341, 1, 0, 0, 0, 1367, 1368, 3, 68, 26, 0, 1368, 1369, 1, 0, 0, 0, 1369, 1370, 6, 163, 11, 0, 1370, 343, 1, 0, 0, 0, 1371, 1372, 3, 70, 27, 0, 1372, 1373, 1, 0, 0, 0, 1373, 1374, 6, 164, 11, 0, 1374, 345, 1, 0, 0, 0, 1375, 1376, 3, 178, 81, 0, 1376, 1377, 1, 0, 0, 0, 1377, 1378, 6, 165, 17, 0, 1378, 1379, 6, 165, 12, 0, 1379, 347, 1, 0, 0, 0, 1380, 1381, 3, 110, 47, 0, 1381, 1382, 1, 0, 0, 0, 1382, 1383, 6, 166, 18, 0, 1383, 349, 1, 0, 0, 0, 1384, 1390, 3, 84, 34, 0, 1385, 1390, 3, 74, 29, 0, 1386, 1390, 3, 116, 50, 0, 1387, 1390, 3, 76, 30, 0, 1388, 1390, 3, 90, 37, 0, 1389, 1384, 1, 0, 0, 0, 1389, 1385, 1, 0, 0, 0, 1389, 1386, 1, 0, 0, 0, 1389, 1387, 1, 0, 0, 0, 1389, 1388, 1, 0, 0, 0, 1390, 1391, 1, 0, 0, 0, 1391, 1389, 1, 0, 0, 0, 1391, 1392, 1, 0, 0, 0, 1392, 351, 1, 0, 0, 0, 1393, 1394, 3, 66, 25, 0, 1394, 1395, 1, 0, 0, 0, 1395, 1396, 6, 168, 11, 0, 1396, 353, 1, 0, 0, 0, 1397, 1398, 3, 68, 26, 0, 1398, 1399, 1, 0, 0, 0, 1399, 1400, 6, 169, 11, 0, 1400, 355, 1, 0, 0, 0, 1401, 1402, 3, 70, 27, 0, 1402, 1403, 1, 0, 0, 0, 1403, 1404, 6, 170, 11, 0, 1404, 357, 1, 0, 0, 0, 1405, 1406, 3, 72, 28, 0, 1406, 1407, 1, 0, 0, 0, 1407, 1408, 6, 171, 16, 0, 1408, 1409, 6, 171, 12, 0, 1409, 359, 1, 0, 0, 0, 1410, 1411, 3, 110, 47, 0, 1411, 1412, 1, 0, 0, 0, 1412, 1413, 6, 172, 18, 0, 1413, 361, 1, 0, 0, 0, 1414, 1415, 3, 112, 48, 0, 1415, 1416, 1, 0, 0, 0, 1416, 1417, 6, 173, 19, 0, 1417, 363, 1, 0, 0, 0, 1418, 1419, 3, 116, 50, 0, 1419, 1420, 1, 0, 0, 0, 1420, 1421, 6, 174, 23, 0, 1421, 365, 1, 0, 0, 0, 1422, 1423, 3, 278, 131, 0, 1423, 1424, 1, 0, 0, 0, 1424, 1425, 6, 175, 33, 0, 1425, 1426, 6, 175, 34, 0, 1426, 367, 1, 0, 0, 0, 1427, 1428, 3, 218, 101, 0, 1428, 1429, 1, 0, 0, 0, 1429, 1430, 6, 176, 21, 0, 1430, 369, 1, 0, 0, 0, 1431, 1432, 3, 94, 39, 0, 1432, 1433, 1, 0, 0, 0, 1433, 1434, 6, 177, 22, 0, 1434, 371, 1, 0, 0, 0, 1435, 1436, 3, 66, 25, 0, 1436, 1437, 1, 0, 0, 0, 1437, 1438, 6, 178, 11, 0, 1438, 373, 1, 0, 0, 0, 1439, 1440, 3, 68, 26, 0, 1440, 1441, 1, 0, 0, 0, 1441, 1442, 6, 179, 11, 0, 1442, 375, 1, 0, 0, 0, 1443, 1444, 3, 70, 27, 0, 1444, 1445, 1, 0, 0, 0, 1445, 1446, 6, 180, 11, 0, 1446, 377, 1, 0, 0, 0, 1447, 1448, 3, 72, 28, 0, 1448, 1449, 1, 0, 0, 0, 1449, 1450, 6, 181, 16, 0, 1450, 1451, 6, 181, 12, 0, 1451, 1452, 6, 181, 12, 0, 1452, 379, 1, 0, 0, 0, 1453, 1454, 3, 112, 48, 0, 1454, 1455, 1, 0, 0, 0, 1455, 1456, 6, 182, 19, 0, 1456, 381, 1, 0, 0, 0, 1457, 1458, 3, 116, 50, 0, 1458, 1459, 1, 0, 0, 0, 1459, 1460, 6, 183, 23, 0, 1460, 383, 1, 0, 0, 0, 1461, 1462, 3, 244, 114, 0, 1462, 1463, 1, 0, 0, 0, 1463, 1464, 6, 184, 26, 0, 1464, 385, 1, 0, 0, 0, 1465, 1466, 3, 66, 25, 0, 1466, 1467, 1, 0, 0, 0, 1467, 1468, 6, 185, 11, 0, 1468, 387, 1, 0, 0, 0, 1469, 1470, 3, 68, 26, 0, 1470, 1471, 1, 0, 0, 0, 1471, 1472, 6, 186, 11, 0, 1472, 389, 1, 0, 0, 0, 1473, 1474, 3, 70, 27, 0, 1474, 1475, 1, 0, 0, 0, 1475, 1476, 6, 187, 11, 0, 1476, 391, 1, 0, 0, 0, 1477, 1478, 3, 72, 28, 0, 1478, 1479, 1, 0, 0, 0, 1479, 1480, 6, 188, 16, 0, 1480, 1481, 6, 188, 12, 0, 1481, 393, 1, 0, 0, 0, 1482, 1483, 3, 54, 19, 0, 1483, 1484, 1, 0, 0, 0, 1484, 1485, 6, 189, 35, 0, 1485, 395, 1, 0, 0, 0, 1486, 1487, 3, 264, 124, 0, 1487, 1488, 1, 0, 0, 0, 1488, 1489, 6, 190, 36, 0, 1489, 397, 1, 0, 0, 0, 1490, 1491, 3, 278, 131, 0, 1491, 1492, 1, 0, 0, 0, 1492, 1493, 6, 191, 33, 0, 1493, 1494, 6, 191, 12, 0, 1494, 1495, 6, 191, 0, 0, 1495, 399, 1, 0, 0, 0, 1496, 1497, 7, 20, 0, 0, 1497, 1498, 7, 2, 0, 0, 1498, 1499, 7, 1, 0, 0, 1499, 1500, 7, 9, 0, 0, 1500, 1501, 7, 17, 0, 0, 1501, 1502, 1, 0, 0, 0, 1502, 1503, 6, 192, 12, 0, 1503, 1504, 6, 192, 0, 0, 1504, 401, 1, 0, 0, 0, 1505, 1506, 3, 180, 82, 0, 1506, 1507, 1, 0, 0, 0, 1507, 1508, 6, 193, 32, 0, 1508, 403, 1, 0, 0, 0, 1509, 1510, 3, 184, 84, 0, 1510, 1511, 1, 0, 0, 0, 1511, 1512, 6, 194, 31, 0, 1512, 405, 1, 0, 0, 0, 1513, 1514, 3, 66, 25, 0, 1514, 1515, 1, 0, 0, 0, 1515, 1516, 6, 195, 11, 0, 1516, 407, 1, 0, 0, 0, 1517, 1518, 3, 68, 26, 0, 1518, 1519, 1, 0, 0, 0, 1519, 1520, 6, 196, 11, 0, 1520, 409, 1, 0, 0, 0, 1521, 1522, 3, 70, 27, 0, 1522, 1523, 1, 0, 0, 0, 1523, 1524, 6, 197, 11, 0, 1524, 411, 1, 0, 0, 0, 1525, 1526, 3, 72, 28, 0, 1526, 1527, 1, 0, 0, 0, 1527, 1528, 6, 198, 16, 0, 1528, 1529, 6, 198, 12, 0, 1529, 413, 1, 0, 0, 0, 1530, 1531, 3, 218, 101, 0, 1531, 1532, 1, 0, 0, 0, 1532, 1533, 6, 199, 21, 0, 1533, 1534, 6, 199, 12, 0, 1534, 1535, 6, 199, 37, 0, 1535, 415, 1, 0, 0, 0, 1536, 1537, 3, 94, 39, 0, 1537, 1538, 1, 0, 0, 0, 1538, 1539, 6, 200, 22, 0, 1539, 1540, 6, 200, 12, 0, 1540, 1541, 6, 200, 37, 0, 1541, 417, 1, 0, 0, 0, 1542, 1543, 3, 66, 25, 0, 1543, 1544, 1, 0, 0, 0, 1544, 1545, 6, 201, 11, 0, 1545, 419, 1, 0, 0, 0, 1546, 1547, 3, 68, 26, 0, 1547, 1548, 1, 0, 0, 0, 1548, 1549, 6, 202, 11, 0, 1549, 421, 1, 0, 0, 0, 1550, 1551, 3, 70, 27, 0, 1551, 1552, 1, 0, 0, 0, 1552, 1553, 6, 203, 11, 0, 1553, 423, 1, 0, 0, 0, 1554, 1555, 3, 110, 47, 0, 1555, 1556, 1, 0, 0, 0, 1556, 1557, 6, 204, 18, 0, 1557, 1558, 6, 204, 12, 0, 1558, 1559, 6, 204, 9, 0, 1559, 425, 1, 0, 0, 0, 1560, 1561, 3, 112, 48, 0, 1561, 1562, 1, 0, 0, 0, 1562, 1563, 6, 205, 19, 0, 1563, 1564, 6, 205, 12, 0, 1564, 1565, 6, 205, 9, 0, 1565, 427, 1, 0, 0, 0, 1566, 1567, 3, 66, 25, 0, 1567, 1568, 1, 0, 0, 0, 1568, 1569, 6, 206, 11, 0, 1569, 429, 1, 0, 0, 0, 1570, 1571, 3, 68, 26, 0, 1571, 1572, 1, 0, 0, 0, 1572, 1573, 6, 207, 11, 0, 1573, 431, 1, 0, 0, 0, 1574, 1575, 3, 70, 27, 0, 1575, 1576, 1, 0, 0, 0, 1576, 1577, 6, 208, 11, 0, 1577, 433, 1, 0, 0, 0, 1578, 1579, 3, 184, 84, 0, 1579, 1580, 1, 0, 0, 0, 1580, 1581, 6, 209, 12, 0, 1581, 1582, 6, 209, 0, 0, 1582, 1583, 6, 209, 31, 0, 1583, 435, 1, 0, 0, 0, 1584, 1585, 3, 180, 82, 0, 1585, 1586, 1, 0, 0, 0, 1586, 1587, 6, 210, 12, 0, 1587, 1588, 6, 210, 0, 0, 1588, 1589, 6, 210, 32, 0, 1589, 437, 1, 0, 0, 0, 1590, 1591, 3, 100, 42, 0, 1591, 1592, 1, 0, 0, 0, 1592, 1593, 6, 211, 12, 0, 1593, 1594, 6, 211, 0, 0, 1594, 1595, 6, 211, 38, 0, 1595, 439, 1, 0, 0, 0, 1596, 1597, 3, 72, 28, 0, 1597, 1598, 1, 0, 0, 0, 1598, 1599, 6, 212, 16, 0, 1599, 1600, 6, 212, 12, 0, 1600, 441, 1, 0, 0, 0, 66, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 655, 665, 669, 672, 681, 683, 694, 713, 718, 727, 734, 739, 741, 752, 760, 763, 765, 770, 775, 781, 788, 793, 799, 802, 810, 814, 942, 947, 954, 956, 972, 977, 982, 984, 990, 1067, 1072, 1121, 1125, 1130, 1135, 1140, 1142, 1146, 1148, 1235, 1239, 1244, 1389, 1391, 39, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 14, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 70, 0, 5, 0, 0, 7, 29, 0, 7, 71, 0, 7, 38, 0, 7, 39, 0, 7, 36, 0, 7, 81, 0, 7, 30, 0, 7, 41, 0, 7, 53, 0, 7, 69, 0, 7, 85, 0, 5, 10, 0, 5, 7, 0, 7, 95, 0, 7, 94, 0, 7, 73, 0, 7, 72, 0, 7, 93, 0, 5, 12, 0, 7, 20, 0, 7, 89, 0, 5, 15, 0, 7, 33, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index f10881fcf0692..f04582e820e28 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -8,16 +8,14 @@ * 2.0. */ +import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.CharStream; -import org.antlr.v4.runtime.RuleContext; -import org.antlr.v4.runtime.RuntimeMetaData; -import org.antlr.v4.runtime.Vocabulary; -import org.antlr.v4.runtime.VocabularyImpl; -import org.antlr.v4.runtime.atn.ATN; -import org.antlr.v4.runtime.atn.ATNDeserializer; -import org.antlr.v4.runtime.atn.LexerATNSimulator; -import org.antlr.v4.runtime.atn.PredictionContextCache; +import org.antlr.v4.runtime.Token; +import org.antlr.v4.runtime.TokenStream; +import org.antlr.v4.runtime.*; +import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; +import org.antlr.v4.runtime.misc.*; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue", "this-escape"}) public class EsqlBaseLexer extends LexerConfig { @@ -27,90 +25,96 @@ public class EsqlBaseLexer extends LexerConfig { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, - LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, - WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, UNKNOWN_CMD=20, - LINE_COMMENT=21, MULTILINE_COMMENT=22, WS=23, COLON=24, PIPE=25, QUOTED_STRING=26, - INTEGER_LITERAL=27, DECIMAL_LITERAL=28, BY=29, AND=30, ASC=31, ASSIGN=32, - CAST_OP=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, IN=39, IS=40, - LAST=41, LIKE=42, LP=43, NOT=44, NULL=45, NULLS=46, OR=47, PARAM=48, RLIKE=49, - RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, GT=57, GTE=58, - PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, NAMED_OR_POSITIONAL_PARAM=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, - EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, - FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, - PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, - AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, - ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, - ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, - ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, - MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, - SHOW_WS=103, SETTING=104, SETTING_LINE_COMMENT=105, SETTTING_MULTILINE_COMMENT=106, - SETTING_WS=107, LOOKUP_LINE_COMMENT=108, LOOKUP_MULTILINE_COMMENT=109, - LOOKUP_WS=110, LOOKUP_FIELD_LINE_COMMENT=111, LOOKUP_FIELD_MULTILINE_COMMENT=112, - LOOKUP_FIELD_WS=113, METRICS_LINE_COMMENT=114, METRICS_MULTILINE_COMMENT=115, - METRICS_WS=116, CLOSING_METRICS_LINE_COMMENT=117, CLOSING_METRICS_MULTILINE_COMMENT=118, - CLOSING_METRICS_WS=119; + DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, + LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, + WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, DEV_JOIN=20, + DEV_JOIN_FULL=21, DEV_JOIN_LEFT=22, DEV_JOIN_RIGHT=23, DEV_JOIN_LOOKUP=24, + UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, PIPE=29, + QUOTED_STRING=30, INTEGER_LITERAL=31, DECIMAL_LITERAL=32, BY=33, AND=34, + ASC=35, ASSIGN=36, CAST_OP=37, COLON=38, COMMA=39, DESC=40, DOT=41, FALSE=42, + FIRST=43, IN=44, IS=45, LAST=46, LIKE=47, LP=48, NOT=49, NULL=50, NULLS=51, + OR=52, PARAM=53, RLIKE=54, RP=55, TRUE=56, EQ=57, CIEQ=58, NEQ=59, LT=60, + LTE=61, GT=62, GTE=63, PLUS=64, MINUS=65, ASTERISK=66, SLASH=67, PERCENT=68, + NAMED_OR_POSITIONAL_PARAM=69, OPENING_BRACKET=70, CLOSING_BRACKET=71, + UNQUOTED_IDENTIFIER=72, QUOTED_IDENTIFIER=73, EXPR_LINE_COMMENT=74, EXPR_MULTILINE_COMMENT=75, + EXPR_WS=76, EXPLAIN_WS=77, EXPLAIN_LINE_COMMENT=78, EXPLAIN_MULTILINE_COMMENT=79, + METADATA=80, UNQUOTED_SOURCE=81, FROM_LINE_COMMENT=82, FROM_MULTILINE_COMMENT=83, + FROM_WS=84, ID_PATTERN=85, PROJECT_LINE_COMMENT=86, PROJECT_MULTILINE_COMMENT=87, + PROJECT_WS=88, AS=89, RENAME_LINE_COMMENT=90, RENAME_MULTILINE_COMMENT=91, + RENAME_WS=92, ON=93, WITH=94, ENRICH_POLICY_NAME=95, ENRICH_LINE_COMMENT=96, + ENRICH_MULTILINE_COMMENT=97, ENRICH_WS=98, ENRICH_FIELD_LINE_COMMENT=99, + ENRICH_FIELD_MULTILINE_COMMENT=100, ENRICH_FIELD_WS=101, MVEXPAND_LINE_COMMENT=102, + MVEXPAND_MULTILINE_COMMENT=103, MVEXPAND_WS=104, INFO=105, SHOW_LINE_COMMENT=106, + SHOW_MULTILINE_COMMENT=107, SHOW_WS=108, SETTING=109, SETTING_LINE_COMMENT=110, + SETTTING_MULTILINE_COMMENT=111, SETTING_WS=112, LOOKUP_LINE_COMMENT=113, + LOOKUP_MULTILINE_COMMENT=114, LOOKUP_WS=115, LOOKUP_FIELD_LINE_COMMENT=116, + LOOKUP_FIELD_MULTILINE_COMMENT=117, LOOKUP_FIELD_WS=118, USING=119, JOIN_LINE_COMMENT=120, + JOIN_MULTILINE_COMMENT=121, JOIN_WS=122, METRICS_LINE_COMMENT=123, METRICS_MULTILINE_COMMENT=124, + METRICS_WS=125, CLOSING_METRICS_LINE_COMMENT=126, CLOSING_METRICS_MULTILINE_COMMENT=127, + CLOSING_METRICS_WS=128; public static final int - EXPRESSION_MODE=1, EXPLAIN_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, - ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10, - LOOKUP_MODE=11, LOOKUP_FIELD_MODE=12, METRICS_MODE=13, CLOSING_METRICS_MODE=14; + EXPRESSION_MODE=1, EXPLAIN_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, + ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10, + LOOKUP_MODE=11, LOOKUP_FIELD_MODE=12, JOIN_MODE=13, METRICS_MODE=14, CLOSING_METRICS_MODE=15; public static String[] channelNames = { "DEFAULT_TOKEN_CHANNEL", "HIDDEN" }; public static String[] modeNames = { - "DEFAULT_MODE", "EXPRESSION_MODE", "EXPLAIN_MODE", "FROM_MODE", "PROJECT_MODE", - "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", - "SETTING_MODE", "LOOKUP_MODE", "LOOKUP_FIELD_MODE", "METRICS_MODE", "CLOSING_METRICS_MODE" + "DEFAULT_MODE", "EXPRESSION_MODE", "EXPLAIN_MODE", "FROM_MODE", "PROJECT_MODE", + "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", + "SETTING_MODE", "LOOKUP_MODE", "LOOKUP_FIELD_MODE", "JOIN_MODE", "METRICS_MODE", + "CLOSING_METRICS_MODE" }; private static String[] makeRuleNames() { return new String[] { - "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", - "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", - "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", "LINE_COMMENT", - "MULTILINE_COMMENT", "WS", "COLON", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", - "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", - "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", - "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", - "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", - "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", - "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", - "PERCENT", "EXPRESSION_COLON", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", - "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", - "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", - "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", - "EXPLAIN_MULTILINE_COMMENT", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", - "FROM_COLON", "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", - "UNQUOTED_SOURCE", "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", - "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", - "PROJECT_PARAM", "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", - "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", - "RENAME_PARAM", "RENAME_NAMED_OR_POSITIONAL_PARAM", "AS", "RENAME_ID_PATTERN", - "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", - "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", - "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", - "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", - "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_ID_PATTERN", "ENRICH_FIELD_QUOTED_IDENTIFIER", - "ENRICH_FIELD_PARAM", "ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_PIPE", - "MVEXPAND_DOT", "MVEXPAND_PARAM", "MVEXPAND_NAMED_OR_POSITIONAL_PARAM", - "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING_CLOSING_BRACKET", "SETTING_COLON", - "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", - "LOOKUP_PIPE", "LOOKUP_COLON", "LOOKUP_COMMA", "LOOKUP_DOT", "LOOKUP_ON", - "LOOKUP_UNQUOTED_SOURCE", "LOOKUP_QUOTED_SOURCE", "LOOKUP_LINE_COMMENT", - "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_PIPE", "LOOKUP_FIELD_COMMA", - "LOOKUP_FIELD_DOT", "LOOKUP_FIELD_ID_PATTERN", "LOOKUP_FIELD_LINE_COMMENT", - "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "METRICS_PIPE", - "METRICS_UNQUOTED_SOURCE", "METRICS_QUOTED_SOURCE", "METRICS_LINE_COMMENT", - "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_COLON", "CLOSING_METRICS_COMMA", - "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", - "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", "CLOSING_METRICS_UNQUOTED_IDENTIFIER", + "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", + "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", + "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", "DEV_JOIN_FULL", + "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", "UNKNOWN_CMD", + "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "DIGIT", "LETTER", + "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", + "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", + "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", + "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", + "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", + "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", + "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", + "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_OPENING_BRACKET", + "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COLON", + "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", "UNQUOTED_SOURCE", + "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "PROJECT_PARAM", + "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", + "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", + "RENAME_PARAM", "RENAME_NAMED_OR_POSITIONAL_PARAM", "AS", "RENAME_ID_PATTERN", + "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", + "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", + "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", + "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", + "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_ID_PATTERN", "ENRICH_FIELD_QUOTED_IDENTIFIER", + "ENRICH_FIELD_PARAM", "ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_PIPE", + "MVEXPAND_DOT", "MVEXPAND_PARAM", "MVEXPAND_NAMED_OR_POSITIONAL_PARAM", + "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING_CLOSING_BRACKET", "SETTING_COLON", + "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", + "LOOKUP_PIPE", "LOOKUP_COLON", "LOOKUP_COMMA", "LOOKUP_DOT", "LOOKUP_ON", + "LOOKUP_UNQUOTED_SOURCE", "LOOKUP_QUOTED_SOURCE", "LOOKUP_LINE_COMMENT", + "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_PIPE", "LOOKUP_FIELD_COMMA", + "LOOKUP_FIELD_DOT", "LOOKUP_FIELD_ID_PATTERN", "LOOKUP_FIELD_LINE_COMMENT", + "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "JOIN_PIPE", "JOIN_JOIN", + "JOIN_AS", "JOIN_ON", "USING", "JOIN_UNQUOTED_IDENTIFER", "JOIN_QUOTED_IDENTIFIER", + "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_PIPE", + "METRICS_UNQUOTED_SOURCE", "METRICS_QUOTED_SOURCE", "METRICS_LINE_COMMENT", + "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_COLON", "CLOSING_METRICS_COMMA", + "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", "CLOSING_METRICS_UNQUOTED_IDENTIFIER", "CLOSING_METRICS_BY", "CLOSING_METRICS_PIPE" }; } @@ -118,45 +122,49 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { - null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", - "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - "':'", "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", - "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", - "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", - "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", - "'+'", "'-'", "'*'", "'/'", "'%'", null, null, "']'", null, null, null, - null, null, null, null, null, "'metadata'", null, null, null, null, null, - null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, - null, null, null, null, null, null, null, null, "'info'" + null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", + "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", + "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, + null, null, null, null, null, "'|'", null, null, null, "'by'", "'and'", + "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", "'first'", + "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", + "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", + "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, null, + "']'", null, null, null, null, null, null, null, null, "'metadata'", + null, null, null, null, null, null, null, null, "'as'", null, null, null, + "'on'", "'with'", null, null, null, null, null, null, null, null, null, + null, "'info'", null, null, null, null, null, null, null, null, null, + null, null, null, null, "'USING'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { - null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", - "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "COLON", "PIPE", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", - "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", - "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", - "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", - "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", - "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", - "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", - "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", + "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", + "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", + "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", + "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", + "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", + "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", + "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", + "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", + "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", + "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", + "LOOKUP_FIELD_WS", "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", + "JOIN_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", + "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" }; } @@ -228,23 +236,31 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return DEV_LOOKUP_sempred((RuleContext)_localctx, predIndex); case 18: return DEV_METRICS_sempred((RuleContext)_localctx, predIndex); - case 73: - return EXPRESSION_COLON_sempred((RuleContext)_localctx, predIndex); - case 106: + case 19: + return DEV_JOIN_sempred((RuleContext)_localctx, predIndex); + case 20: + return DEV_JOIN_FULL_sempred((RuleContext)_localctx, predIndex); + case 21: + return DEV_JOIN_LEFT_sempred((RuleContext)_localctx, predIndex); + case 22: + return DEV_JOIN_RIGHT_sempred((RuleContext)_localctx, predIndex); + case 23: + return DEV_JOIN_LOOKUP_sempred((RuleContext)_localctx, predIndex); + case 110: return PROJECT_PARAM_sempred((RuleContext)_localctx, predIndex); - case 107: + case 111: return PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 118: + case 122: return RENAME_PARAM_sempred((RuleContext)_localctx, predIndex); - case 119: + case 123: return RENAME_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 142: + case 146: return ENRICH_FIELD_PARAM_sempred((RuleContext)_localctx, predIndex); - case 143: + case 147: return ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 149: + case 153: return MVEXPAND_PARAM_sempred((RuleContext)_localctx, predIndex); - case 150: + case 154: return MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); } return true; @@ -270,1034 +286,1142 @@ private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { } return true; } - private boolean EXPRESSION_COLON_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_JOIN_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 3: return this.isDevVersion(); } return true; } - private boolean PROJECT_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_JOIN_FULL_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 4: return this.isDevVersion(); } return true; } - private boolean PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_JOIN_LEFT_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 5: return this.isDevVersion(); } return true; } - private boolean RENAME_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_JOIN_RIGHT_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 6: return this.isDevVersion(); } return true; } - private boolean RENAME_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_JOIN_LOOKUP_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 7: return this.isDevVersion(); } return true; } - private boolean ENRICH_FIELD_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean PROJECT_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 8: return this.isDevVersion(); } return true; } - private boolean ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 9: return this.isDevVersion(); } return true; } - private boolean MVEXPAND_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean RENAME_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 10: return this.isDevVersion(); } return true; } - private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean RENAME_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 11: return this.isDevVersion(); } return true; } + private boolean ENRICH_FIELD_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 12: + return this.isDevVersion(); + } + return true; + } + private boolean ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 13: + return this.isDevVersion(); + } + return true; + } + private boolean MVEXPAND_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 14: + return this.isDevVersion(); + } + return true; + } + private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 15: + return this.isDevVersion(); + } + return true; + } public static final String _serializedATN = - "\u0004\u0000w\u05cc\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002\u0002\u0007\u0002"+ - "\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002\u0005\u0007\u0005"+ - "\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002\b\u0007\b\u0002"+ - "\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002\f\u0007\f\u0002"+ - "\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007\u000f\u0002\u0010"+ - "\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007\u0012\u0002\u0013"+ - "\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007\u0015\u0002\u0016"+ - "\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007\u0018\u0002\u0019"+ - "\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007\u001b\u0002\u001c"+ - "\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007\u001e\u0002\u001f"+ - "\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002#\u0007"+ - "#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002(\u0007"+ - "(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002-\u0007"+ - "-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u00022\u0007"+ - "2\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u00027\u0007"+ - "7\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002<\u0007"+ - "<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007@\u0002A\u0007"+ - "A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002F\u0007"+ - "F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002K\u0007"+ - "K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002P\u0007"+ - "P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002U\u0007"+ - "U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002Z\u0007"+ - "Z\u0002[\u0007[\u0002\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002_\u0007"+ - "_\u0002`\u0007`\u0002a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002d\u0007"+ - "d\u0002e\u0007e\u0002f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002i\u0007"+ - "i\u0002j\u0007j\u0002k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002n\u0007"+ - "n\u0002o\u0007o\u0002p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002s\u0007"+ - "s\u0002t\u0007t\u0002u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002x\u0007"+ - "x\u0002y\u0007y\u0002z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002}\u0007"+ - "}\u0002~\u0007~\u0002\u007f\u0007\u007f\u0002\u0080\u0007\u0080\u0002"+ - "\u0081\u0007\u0081\u0002\u0082\u0007\u0082\u0002\u0083\u0007\u0083\u0002"+ - "\u0084\u0007\u0084\u0002\u0085\u0007\u0085\u0002\u0086\u0007\u0086\u0002"+ - "\u0087\u0007\u0087\u0002\u0088\u0007\u0088\u0002\u0089\u0007\u0089\u0002"+ - "\u008a\u0007\u008a\u0002\u008b\u0007\u008b\u0002\u008c\u0007\u008c\u0002"+ - "\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007\u008f\u0002"+ - "\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007\u0092\u0002"+ - "\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007\u0095\u0002"+ - "\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007\u0098\u0002"+ - "\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007\u009b\u0002"+ - "\u009c\u0007\u009c\u0002\u009d\u0007\u009d\u0002\u009e\u0007\u009e\u0002"+ - "\u009f\u0007\u009f\u0002\u00a0\u0007\u00a0\u0002\u00a1\u0007\u00a1\u0002"+ - "\u00a2\u0007\u00a2\u0002\u00a3\u0007\u00a3\u0002\u00a4\u0007\u00a4\u0002"+ - "\u00a5\u0007\u00a5\u0002\u00a6\u0007\u00a6\u0002\u00a7\u0007\u00a7\u0002"+ - "\u00a8\u0007\u00a8\u0002\u00a9\u0007\u00a9\u0002\u00aa\u0007\u00aa\u0002"+ - "\u00ab\u0007\u00ab\u0002\u00ac\u0007\u00ac\u0002\u00ad\u0007\u00ad\u0002"+ - "\u00ae\u0007\u00ae\u0002\u00af\u0007\u00af\u0002\u00b0\u0007\u00b0\u0002"+ - "\u00b1\u0007\u00b1\u0002\u00b2\u0007\u00b2\u0002\u00b3\u0007\u00b3\u0002"+ - "\u00b4\u0007\u00b4\u0002\u00b5\u0007\u00b5\u0002\u00b6\u0007\u00b6\u0002"+ - "\u00b7\u0007\u00b7\u0002\u00b8\u0007\u00b8\u0002\u00b9\u0007\u00b9\u0002"+ - "\u00ba\u0007\u00ba\u0002\u00bb\u0007\u00bb\u0002\u00bc\u0007\u00bc\u0002"+ - "\u00bd\u0007\u00bd\u0002\u00be\u0007\u00be\u0002\u00bf\u0007\u00bf\u0002"+ - "\u00c0\u0007\u00c0\u0002\u00c1\u0007\u00c1\u0002\u00c2\u0007\u00c2\u0002"+ - "\u00c3\u0007\u00c3\u0002\u00c4\u0007\u00c4\u0002\u00c5\u0007\u00c5\u0002"+ - "\u00c6\u0007\u00c6\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0004\u0000\u0080\u0641\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\uffff\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ + "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ + "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ + "\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007"+ + "\u000f\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007"+ + "\u0012\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007"+ + "\u0015\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007"+ + "\u0018\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007"+ + "\u001b\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007"+ + "\u001e\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007"+ + "\"\u0002#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007"+ + "\'\u0002(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007"+ + ",\u0002-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u0007"+ + "1\u00022\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u0007"+ + "6\u00027\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007"+ + ";\u0002<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007"+ + "@\u0002A\u0007A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007"+ + "E\u0002F\u0007F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007"+ + "J\u0002K\u0007K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007"+ + "O\u0002P\u0007P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007"+ + "T\u0002U\u0007U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007"+ + "Y\u0002Z\u0007Z\u0002[\u0007[\u0002\\\u0007\\\u0002]\u0007]\u0002^\u0007"+ + "^\u0002_\u0007_\u0002`\u0007`\u0002a\u0007a\u0002b\u0007b\u0002c\u0007"+ + "c\u0002d\u0007d\u0002e\u0007e\u0002f\u0007f\u0002g\u0007g\u0002h\u0007"+ + "h\u0002i\u0007i\u0002j\u0007j\u0002k\u0007k\u0002l\u0007l\u0002m\u0007"+ + "m\u0002n\u0007n\u0002o\u0007o\u0002p\u0007p\u0002q\u0007q\u0002r\u0007"+ + "r\u0002s\u0007s\u0002t\u0007t\u0002u\u0007u\u0002v\u0007v\u0002w\u0007"+ + "w\u0002x\u0007x\u0002y\u0007y\u0002z\u0007z\u0002{\u0007{\u0002|\u0007"+ + "|\u0002}\u0007}\u0002~\u0007~\u0002\u007f\u0007\u007f\u0002\u0080\u0007"+ + "\u0080\u0002\u0081\u0007\u0081\u0002\u0082\u0007\u0082\u0002\u0083\u0007"+ + "\u0083\u0002\u0084\u0007\u0084\u0002\u0085\u0007\u0085\u0002\u0086\u0007"+ + "\u0086\u0002\u0087\u0007\u0087\u0002\u0088\u0007\u0088\u0002\u0089\u0007"+ + "\u0089\u0002\u008a\u0007\u008a\u0002\u008b\u0007\u008b\u0002\u008c\u0007"+ + "\u008c\u0002\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007"+ + "\u008f\u0002\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007"+ + "\u0092\u0002\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007"+ + "\u0095\u0002\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007"+ + "\u0098\u0002\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007"+ + "\u009b\u0002\u009c\u0007\u009c\u0002\u009d\u0007\u009d\u0002\u009e\u0007"+ + "\u009e\u0002\u009f\u0007\u009f\u0002\u00a0\u0007\u00a0\u0002\u00a1\u0007"+ + "\u00a1\u0002\u00a2\u0007\u00a2\u0002\u00a3\u0007\u00a3\u0002\u00a4\u0007"+ + "\u00a4\u0002\u00a5\u0007\u00a5\u0002\u00a6\u0007\u00a6\u0002\u00a7\u0007"+ + "\u00a7\u0002\u00a8\u0007\u00a8\u0002\u00a9\u0007\u00a9\u0002\u00aa\u0007"+ + "\u00aa\u0002\u00ab\u0007\u00ab\u0002\u00ac\u0007\u00ac\u0002\u00ad\u0007"+ + "\u00ad\u0002\u00ae\u0007\u00ae\u0002\u00af\u0007\u00af\u0002\u00b0\u0007"+ + "\u00b0\u0002\u00b1\u0007\u00b1\u0002\u00b2\u0007\u00b2\u0002\u00b3\u0007"+ + "\u00b3\u0002\u00b4\u0007\u00b4\u0002\u00b5\u0007\u00b5\u0002\u00b6\u0007"+ + "\u00b6\u0002\u00b7\u0007\u00b7\u0002\u00b8\u0007\u00b8\u0002\u00b9\u0007"+ + "\u00b9\u0002\u00ba\u0007\u00ba\u0002\u00bb\u0007\u00bb\u0002\u00bc\u0007"+ + "\u00bc\u0002\u00bd\u0007\u00bd\u0002\u00be\u0007\u00be\u0002\u00bf\u0007"+ + "\u00bf\u0002\u00c0\u0007\u00c0\u0002\u00c1\u0007\u00c1\u0002\u00c2\u0007"+ + "\u00c2\u0002\u00c3\u0007\u00c3\u0002\u00c4\u0007\u00c4\u0002\u00c5\u0007"+ + "\u00c5\u0002\u00c6\u0007\u00c6\u0002\u00c7\u0007\u00c7\u0002\u00c8\u0007"+ + "\u00c8\u0002\u00c9\u0007\u00c9\u0002\u00ca\u0007\u00ca\u0002\u00cb\u0007"+ + "\u00cb\u0002\u00cc\u0007\u00cc\u0002\u00cd\u0007\u00cd\u0002\u00ce\u0007"+ + "\u00ce\u0002\u00cf\u0007\u00cf\u0002\u00d0\u0007\u00d0\u0002\u00d1\u0007"+ + "\u00d1\u0002\u00d2\u0007\u00d2\u0002\u00d3\u0007\u00d3\u0002\u00d4\u0007"+ + "\u00d4\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001"+ "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001"+ "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001"+ + "\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b"+ + "\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001"+ "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ - "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012"+ - "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+ - "\u0001\u0012\u0001\u0012\u0001\u0013\u0004\u0013\u0244\b\u0013\u000b\u0013"+ - "\f\u0013\u0245\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014"+ - "\u0001\u0014\u0005\u0014\u024e\b\u0014\n\u0014\f\u0014\u0251\t\u0014\u0001"+ - "\u0014\u0003\u0014\u0254\b\u0014\u0001\u0014\u0003\u0014\u0257\b\u0014"+ - "\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015"+ - "\u0001\u0015\u0005\u0015\u0260\b\u0015\n\u0015\f\u0015\u0263\t\u0015\u0001"+ - "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0004"+ - "\u0016\u026b\b\u0016\u000b\u0016\f\u0016\u026c\u0001\u0016\u0001\u0016"+ - "\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018"+ - "\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b"+ - "\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0003\u001d"+ - "\u0282\b\u001d\u0001\u001d\u0004\u001d\u0285\b\u001d\u000b\u001d\f\u001d"+ - "\u0286\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001"+ - " \u0003 \u0290\b \u0001!\u0001!\u0001\"\u0001\"\u0001\"\u0003\"\u0297"+ - "\b\"\u0001#\u0001#\u0001#\u0005#\u029c\b#\n#\f#\u029f\t#\u0001#\u0001"+ - "#\u0001#\u0001#\u0001#\u0001#\u0005#\u02a7\b#\n#\f#\u02aa\t#\u0001#\u0001"+ - "#\u0001#\u0001#\u0001#\u0003#\u02b1\b#\u0001#\u0003#\u02b4\b#\u0003#\u02b6"+ - "\b#\u0001$\u0004$\u02b9\b$\u000b$\f$\u02ba\u0001%\u0004%\u02be\b%\u000b"+ - "%\f%\u02bf\u0001%\u0001%\u0005%\u02c4\b%\n%\f%\u02c7\t%\u0001%\u0001%"+ - "\u0004%\u02cb\b%\u000b%\f%\u02cc\u0001%\u0004%\u02d0\b%\u000b%\f%\u02d1"+ - "\u0001%\u0001%\u0005%\u02d6\b%\n%\f%\u02d9\t%\u0003%\u02db\b%\u0001%\u0001"+ - "%\u0001%\u0001%\u0004%\u02e1\b%\u000b%\f%\u02e2\u0001%\u0001%\u0003%\u02e7"+ - "\b%\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0001\'\u0001(\u0001"+ - "(\u0001(\u0001(\u0001)\u0001)\u0001*\u0001*\u0001*\u0001+\u0001+\u0001"+ - ",\u0001,\u0001,\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0001.\u0001"+ - ".\u0001.\u0001.\u0001/\u0001/\u0001/\u0001/\u0001/\u0001/\u00010\u0001"+ - "0\u00010\u00011\u00011\u00011\u00012\u00012\u00012\u00012\u00012\u0001"+ - "3\u00013\u00013\u00013\u00013\u00014\u00014\u00015\u00015\u00015\u0001"+ - "5\u00016\u00016\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u0001"+ - "7\u00017\u00018\u00018\u00018\u00019\u00019\u0001:\u0001:\u0001:\u0001"+ - ":\u0001:\u0001:\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001"+ - "=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0001@\u0001"+ - "@\u0001A\u0001A\u0001A\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001"+ - "D\u0001E\u0001E\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001"+ - "I\u0001I\u0001I\u0001I\u0001J\u0001J\u0001J\u0001J\u0001K\u0001K\u0001"+ - "K\u0003K\u036a\bK\u0001K\u0005K\u036d\bK\nK\fK\u0370\tK\u0001K\u0001K"+ - "\u0004K\u0374\bK\u000bK\fK\u0375\u0003K\u0378\bK\u0001L\u0001L\u0001L"+ - "\u0001L\u0001L\u0001M\u0001M\u0001M\u0001M\u0001M\u0001N\u0001N\u0005"+ - "N\u0386\bN\nN\fN\u0389\tN\u0001N\u0001N\u0003N\u038d\bN\u0001N\u0004N"+ - "\u0390\bN\u000bN\fN\u0391\u0003N\u0394\bN\u0001O\u0001O\u0004O\u0398\b"+ - "O\u000bO\fO\u0399\u0001O\u0001O\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001"+ - "Q\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001S\u0001T\u0001"+ - "T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001U\u0001U\u0001U\u0001V\u0001"+ - "V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001X\u0001"+ - "X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001"+ - "[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001"+ - "]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001"+ - "_\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0003`\u03e7\b`\u0001"+ - "a\u0004a\u03ea\ba\u000ba\fa\u03eb\u0001b\u0001b\u0001b\u0001b\u0001c\u0001"+ - "c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001"+ - "e\u0001f\u0001f\u0001f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001g\u0001"+ - "h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001"+ - "j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001"+ - "l\u0001l\u0003l\u041d\bl\u0001m\u0001m\u0003m\u0421\bm\u0001m\u0005m\u0424"+ - "\bm\nm\fm\u0427\tm\u0001m\u0001m\u0003m\u042b\bm\u0001m\u0004m\u042e\b"+ - "m\u000bm\fm\u042f\u0003m\u0432\bm\u0001n\u0001n\u0004n\u0436\bn\u000b"+ - "n\fn\u0437\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001p\u0001"+ - "q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001r\u0001r\u0001s\u0001"+ - "s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001"+ - "u\u0001v\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001"+ - "w\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001"+ - "z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001"+ - "}\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001~\u0001"+ - "\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001"+ - "\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001"+ - "\u0081\u0001\u0081\u0001\u0082\u0004\u0082\u048d\b\u0082\u000b\u0082\f"+ - "\u0082\u048e\u0001\u0082\u0001\u0082\u0003\u0082\u0493\b\u0082\u0001\u0082"+ - "\u0004\u0082\u0496\b\u0082\u000b\u0082\f\u0082\u0497\u0001\u0083\u0001"+ - "\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ - "\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086\u0001"+ - "\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001\u0087\u0001"+ - "\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001"+ - "\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001"+ - "\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ - "\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001"+ - "\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ - "\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001"+ - "\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001"+ - "\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001"+ - "\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001"+ - "\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001"+ - "\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001"+ - "\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001"+ - "\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001"+ - "\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001"+ - "\u009b\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001"+ - "\u009c\u0001\u009c\u0001\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001"+ - "\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001"+ - "\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u00a0\u0001\u00a0\u0001"+ - "\u00a0\u0001\u00a0\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001"+ - "\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001"+ - "\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0004\u00a3\u0529\b\u00a3\u000b"+ - "\u00a3\f\u00a3\u052a\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001"+ + "\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001"+ + "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ + "\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001"+ + "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001"+ + "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ + "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001"+ + "\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001"+ + "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ + "\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001"+ + "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001"+ + "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001"+ + "\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0004"+ + "\u0018\u028e\b\u0018\u000b\u0018\f\u0018\u028f\u0001\u0018\u0001\u0018"+ + "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0005\u0019\u0298\b\u0019"+ + "\n\u0019\f\u0019\u029b\t\u0019\u0001\u0019\u0003\u0019\u029e\b\u0019\u0001"+ + "\u0019\u0003\u0019\u02a1\b\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001"+ + "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u02aa\b\u001a\n"+ + "\u001a\f\u001a\u02ad\t\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001"+ + "\u001a\u0001\u001a\u0001\u001b\u0004\u001b\u02b5\b\u001b\u000b\u001b\f"+ + "\u001b\u02b6\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001c"+ + "\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001f"+ + "\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001!\u0001!\u0003!\u02ca\b!\u0001"+ + "!\u0004!\u02cd\b!\u000b!\f!\u02ce\u0001\"\u0001\"\u0001#\u0001#\u0001"+ + "$\u0001$\u0001$\u0003$\u02d8\b$\u0001%\u0001%\u0001&\u0001&\u0001&\u0003"+ + "&\u02df\b&\u0001\'\u0001\'\u0001\'\u0005\'\u02e4\b\'\n\'\f\'\u02e7\t\'"+ + "\u0001\'\u0001\'\u0001\'\u0001\'\u0001\'\u0001\'\u0005\'\u02ef\b\'\n\'"+ + "\f\'\u02f2\t\'\u0001\'\u0001\'\u0001\'\u0001\'\u0001\'\u0003\'\u02f9\b"+ + "\'\u0001\'\u0003\'\u02fc\b\'\u0003\'\u02fe\b\'\u0001(\u0004(\u0301\b("+ + "\u000b(\f(\u0302\u0001)\u0004)\u0306\b)\u000b)\f)\u0307\u0001)\u0001)"+ + "\u0005)\u030c\b)\n)\f)\u030f\t)\u0001)\u0001)\u0004)\u0313\b)\u000b)\f"+ + ")\u0314\u0001)\u0004)\u0318\b)\u000b)\f)\u0319\u0001)\u0001)\u0005)\u031e"+ + "\b)\n)\f)\u0321\t)\u0003)\u0323\b)\u0001)\u0001)\u0001)\u0001)\u0004)"+ + "\u0329\b)\u000b)\f)\u032a\u0001)\u0001)\u0003)\u032f\b)\u0001*\u0001*"+ + "\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0001"+ + "-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u00011\u0001"+ + "1\u00011\u00011\u00011\u00012\u00012\u00013\u00013\u00013\u00013\u0001"+ + "3\u00013\u00014\u00014\u00014\u00014\u00014\u00014\u00015\u00015\u0001"+ + "5\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u00017\u00018\u0001"+ + "8\u00018\u00018\u00018\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0001"+ + ";\u0001;\u0001;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001"+ + "<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001?\u0001?\u0001?\u0001?\u0001"+ + "?\u0001?\u0001@\u0001@\u0001A\u0001A\u0001A\u0001A\u0001A\u0001B\u0001"+ + "B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001E\u0001"+ + "F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001H\u0001I\u0001I\u0001"+ + "J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001M\u0001M\u0001N\u0001N\u0001"+ + "N\u0001N\u0001O\u0001O\u0001O\u0003O\u03af\bO\u0001O\u0005O\u03b2\bO\n"+ + "O\fO\u03b5\tO\u0001O\u0001O\u0004O\u03b9\bO\u000bO\fO\u03ba\u0003O\u03bd"+ + "\bO\u0001P\u0001P\u0001P\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001"+ + "Q\u0001R\u0001R\u0005R\u03cb\bR\nR\fR\u03ce\tR\u0001R\u0001R\u0003R\u03d2"+ + "\bR\u0001R\u0004R\u03d5\bR\u000bR\fR\u03d6\u0003R\u03d9\bR\u0001S\u0001"+ + "S\u0004S\u03dd\bS\u000bS\fS\u03de\u0001S\u0001S\u0001T\u0001T\u0001U\u0001"+ + "U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001"+ + "W\u0001X\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ + "Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001"+ + "\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001"+ + "^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001"+ + "a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001"+ + "c\u0001c\u0001c\u0001c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0003"+ + "d\u042c\bd\u0001e\u0004e\u042f\be\u000be\fe\u0430\u0001f\u0001f\u0001"+ + "f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001"+ + "i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001"+ + "k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001"+ + "m\u0001n\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001"+ + "o\u0001p\u0001p\u0001p\u0001p\u0003p\u0462\bp\u0001q\u0001q\u0003q\u0466"+ + "\bq\u0001q\u0005q\u0469\bq\nq\fq\u046c\tq\u0001q\u0001q\u0003q\u0470\b"+ + "q\u0001q\u0004q\u0473\bq\u000bq\fq\u0474\u0003q\u0477\bq\u0001r\u0001"+ + "r\u0004r\u047b\br\u000br\fr\u047c\u0001s\u0001s\u0001s\u0001s\u0001t\u0001"+ + "t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001"+ + "v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001"+ + "y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001z\u0001{\u0001"+ + "{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001"+ + "}\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001"+ + "\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001"+ + "\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001"+ + "\u0082\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001"+ + "\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ + "\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0086\u0004"+ + "\u0086\u04d2\b\u0086\u000b\u0086\f\u0086\u04d3\u0001\u0086\u0001\u0086"+ + "\u0003\u0086\u04d8\b\u0086\u0001\u0086\u0004\u0086\u04db\b\u0086\u000b"+ + "\u0086\f\u0086\u04dc\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0087\u0001"+ + "\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001"+ + "\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001"+ + "\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ + "\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001"+ + "\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ + "\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090\u0001"+ + "\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001"+ + "\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001"+ + "\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001"+ + "\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001"+ + "\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001"+ + "\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001"+ + "\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001"+ + "\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001"+ + "\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001"+ + "\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001"+ + "\u009e\u0001\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001\u009f\u0001"+ + "\u009f\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001"+ + "\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a2\u0001"+ + "\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001"+ + "\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5\u0001"+ "\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001"+ "\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001"+ - "\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001"+ - "\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ - "\u00aa\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001"+ - "\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001"+ - "\u00ad\u0001\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001"+ - "\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001"+ - "\u00b0\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001"+ - "\u00b1\u0001\u00b1\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001"+ - "\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001"+ - "\u00b4\u0001\u00b4\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001"+ - "\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001"+ - "\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001"+ - "\u00b8\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001"+ - "\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001"+ - "\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bc\u0001"+ - "\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001"+ - "\u00bd\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001"+ - "\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001"+ - "\u00bf\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001"+ - "\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001"+ - "\u00c2\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001"+ - "\u00c3\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001"+ - "\u00c4\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001"+ - "\u00c5\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0002"+ - "\u0261\u02a8\u0000\u00c7\u000f\u0001\u0011\u0002\u0013\u0003\u0015\u0004"+ - "\u0017\u0005\u0019\u0006\u001b\u0007\u001d\b\u001f\t!\n#\u000b%\f\'\r"+ - ")\u000e+\u000f-\u0010/\u00111\u00123\u00135\u00147\u00159\u0016;\u0017"+ - "=\u0018?\u0019A\u0000C\u0000E\u0000G\u0000I\u0000K\u0000M\u0000O\u0000"+ - "Q\u0000S\u0000U\u001aW\u001bY\u001c[\u001d]\u001e_\u001fa c!e\"g#i$k%"+ - "m&o\'q(s)u*w+y,{-}.\u007f/\u00810\u00831\u00852\u00873\u00894\u008b5\u008d"+ - "6\u008f7\u00918\u00939\u0095:\u0097;\u0099<\u009b=\u009d>\u009f?\u00a1"+ - "\u0000\u00a3\u0000\u00a5@\u00a7A\u00a9B\u00abC\u00ad\u0000\u00afD\u00b1"+ - "E\u00b3F\u00b5G\u00b7\u0000\u00b9\u0000\u00bbH\u00bdI\u00bfJ\u00c1\u0000"+ - "\u00c3\u0000\u00c5\u0000\u00c7\u0000\u00c9\u0000\u00cb\u0000\u00cdK\u00cf"+ - "\u0000\u00d1L\u00d3\u0000\u00d5\u0000\u00d7M\u00d9N\u00dbO\u00dd\u0000"+ - "\u00df\u0000\u00e1\u0000\u00e3\u0000\u00e5\u0000\u00e7\u0000\u00e9\u0000"+ - "\u00ebP\u00edQ\u00efR\u00f1S\u00f3\u0000\u00f5\u0000\u00f7\u0000\u00f9"+ - "\u0000\u00fb\u0000\u00fd\u0000\u00ffT\u0101\u0000\u0103U\u0105V\u0107"+ - "W\u0109\u0000\u010b\u0000\u010dX\u010fY\u0111\u0000\u0113Z\u0115\u0000"+ - "\u0117[\u0119\\\u011b]\u011d\u0000\u011f\u0000\u0121\u0000\u0123\u0000"+ - "\u0125\u0000\u0127\u0000\u0129\u0000\u012b\u0000\u012d\u0000\u012f^\u0131"+ - "_\u0133`\u0135\u0000\u0137\u0000\u0139\u0000\u013b\u0000\u013d\u0000\u013f"+ - "\u0000\u0141a\u0143b\u0145c\u0147\u0000\u0149d\u014be\u014df\u014fg\u0151"+ - "\u0000\u0153\u0000\u0155h\u0157i\u0159j\u015bk\u015d\u0000\u015f\u0000"+ - "\u0161\u0000\u0163\u0000\u0165\u0000\u0167\u0000\u0169\u0000\u016bl\u016d"+ - "m\u016fn\u0171\u0000\u0173\u0000\u0175\u0000\u0177\u0000\u0179o\u017b"+ - "p\u017dq\u017f\u0000\u0181\u0000\u0183\u0000\u0185r\u0187s\u0189t\u018b"+ - "\u0000\u018d\u0000\u018fu\u0191v\u0193w\u0195\u0000\u0197\u0000\u0199"+ - "\u0000\u019b\u0000\u000f\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007"+ - "\b\t\n\u000b\f\r\u000e#\u0002\u0000DDdd\u0002\u0000IIii\u0002\u0000SS"+ - "ss\u0002\u0000EEee\u0002\u0000CCcc\u0002\u0000TTtt\u0002\u0000RRrr\u0002"+ - "\u0000OOoo\u0002\u0000PPpp\u0002\u0000NNnn\u0002\u0000HHhh\u0002\u0000"+ - "VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002\u0000XXxx\u0002\u0000FFff\u0002"+ - "\u0000MMmm\u0002\u0000GGgg\u0002\u0000KKkk\u0002\u0000WWww\u0002\u0000"+ - "UUuu\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r"+ - "\r \u0001\u000009\u0002\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000"+ - "\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000"+ - "YYyy\u000b\u0000\t\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b\u0000"+ - "\t\n\r\r \"#,,//::<<>?\\\\||\u05e8\u0000\u000f\u0001\u0000\u0000\u0000"+ - "\u0000\u0011\u0001\u0000\u0000\u0000\u0000\u0013\u0001\u0000\u0000\u0000"+ - "\u0000\u0015\u0001\u0000\u0000\u0000\u0000\u0017\u0001\u0000\u0000\u0000"+ - "\u0000\u0019\u0001\u0000\u0000\u0000\u0000\u001b\u0001\u0000\u0000\u0000"+ - "\u0000\u001d\u0001\u0000\u0000\u0000\u0000\u001f\u0001\u0000\u0000\u0000"+ - "\u0000!\u0001\u0000\u0000\u0000\u0000#\u0001\u0000\u0000\u0000\u0000%"+ - "\u0001\u0000\u0000\u0000\u0000\'\u0001\u0000\u0000\u0000\u0000)\u0001"+ - "\u0000\u0000\u0000\u0000+\u0001\u0000\u0000\u0000\u0000-\u0001\u0000\u0000"+ - "\u0000\u0000/\u0001\u0000\u0000\u0000\u00001\u0001\u0000\u0000\u0000\u0000"+ - "3\u0001\u0000\u0000\u0000\u00005\u0001\u0000\u0000\u0000\u00007\u0001"+ - "\u0000\u0000\u0000\u00009\u0001\u0000\u0000\u0000\u0000;\u0001\u0000\u0000"+ - "\u0000\u0000=\u0001\u0000\u0000\u0000\u0001?\u0001\u0000\u0000\u0000\u0001"+ - "U\u0001\u0000\u0000\u0000\u0001W\u0001\u0000\u0000\u0000\u0001Y\u0001"+ - "\u0000\u0000\u0000\u0001[\u0001\u0000\u0000\u0000\u0001]\u0001\u0000\u0000"+ - "\u0000\u0001_\u0001\u0000\u0000\u0000\u0001a\u0001\u0000\u0000\u0000\u0001"+ - "c\u0001\u0000\u0000\u0000\u0001e\u0001\u0000\u0000\u0000\u0001g\u0001"+ - "\u0000\u0000\u0000\u0001i\u0001\u0000\u0000\u0000\u0001k\u0001\u0000\u0000"+ - "\u0000\u0001m\u0001\u0000\u0000\u0000\u0001o\u0001\u0000\u0000\u0000\u0001"+ - "q\u0001\u0000\u0000\u0000\u0001s\u0001\u0000\u0000\u0000\u0001u\u0001"+ - "\u0000\u0000\u0000\u0001w\u0001\u0000\u0000\u0000\u0001y\u0001\u0000\u0000"+ - "\u0000\u0001{\u0001\u0000\u0000\u0000\u0001}\u0001\u0000\u0000\u0000\u0001"+ - "\u007f\u0001\u0000\u0000\u0000\u0001\u0081\u0001\u0000\u0000\u0000\u0001"+ - "\u0083\u0001\u0000\u0000\u0000\u0001\u0085\u0001\u0000\u0000\u0000\u0001"+ - "\u0087\u0001\u0000\u0000\u0000\u0001\u0089\u0001\u0000\u0000\u0000\u0001"+ - "\u008b\u0001\u0000\u0000\u0000\u0001\u008d\u0001\u0000\u0000\u0000\u0001"+ - "\u008f\u0001\u0000\u0000\u0000\u0001\u0091\u0001\u0000\u0000\u0000\u0001"+ - "\u0093\u0001\u0000\u0000\u0000\u0001\u0095\u0001\u0000\u0000\u0000\u0001"+ - "\u0097\u0001\u0000\u0000\u0000\u0001\u0099\u0001\u0000\u0000\u0000\u0001"+ - "\u009b\u0001\u0000\u0000\u0000\u0001\u009d\u0001\u0000\u0000\u0000\u0001"+ - "\u009f\u0001\u0000\u0000\u0000\u0001\u00a1\u0001\u0000\u0000\u0000\u0001"+ - "\u00a3\u0001\u0000\u0000\u0000\u0001\u00a5\u0001\u0000\u0000\u0000\u0001"+ - "\u00a7\u0001\u0000\u0000\u0000\u0001\u00a9\u0001\u0000\u0000\u0000\u0001"+ - "\u00ab\u0001\u0000\u0000\u0000\u0001\u00af\u0001\u0000\u0000\u0000\u0001"+ - "\u00b1\u0001\u0000\u0000\u0000\u0001\u00b3\u0001\u0000\u0000\u0000\u0001"+ - "\u00b5\u0001\u0000\u0000\u0000\u0002\u00b7\u0001\u0000\u0000\u0000\u0002"+ - "\u00b9\u0001\u0000\u0000\u0000\u0002\u00bb\u0001\u0000\u0000\u0000\u0002"+ - "\u00bd\u0001\u0000\u0000\u0000\u0002\u00bf\u0001\u0000\u0000\u0000\u0003"+ - "\u00c1\u0001\u0000\u0000\u0000\u0003\u00c3\u0001\u0000\u0000\u0000\u0003"+ - "\u00c5\u0001\u0000\u0000\u0000\u0003\u00c7\u0001\u0000\u0000\u0000\u0003"+ - "\u00c9\u0001\u0000\u0000\u0000\u0003\u00cb\u0001\u0000\u0000\u0000\u0003"+ - "\u00cd\u0001\u0000\u0000\u0000\u0003\u00d1\u0001\u0000\u0000\u0000\u0003"+ - "\u00d3\u0001\u0000\u0000\u0000\u0003\u00d5\u0001\u0000\u0000\u0000\u0003"+ - "\u00d7\u0001\u0000\u0000\u0000\u0003\u00d9\u0001\u0000\u0000\u0000\u0003"+ - "\u00db\u0001\u0000\u0000\u0000\u0004\u00dd\u0001\u0000\u0000\u0000\u0004"+ - "\u00df\u0001\u0000\u0000\u0000\u0004\u00e1\u0001\u0000\u0000\u0000\u0004"+ - "\u00e3\u0001\u0000\u0000\u0000\u0004\u00e5\u0001\u0000\u0000\u0000\u0004"+ - "\u00eb\u0001\u0000\u0000\u0000\u0004\u00ed\u0001\u0000\u0000\u0000\u0004"+ - "\u00ef\u0001\u0000\u0000\u0000\u0004\u00f1\u0001\u0000\u0000\u0000\u0005"+ - "\u00f3\u0001\u0000\u0000\u0000\u0005\u00f5\u0001\u0000\u0000\u0000\u0005"+ - "\u00f7\u0001\u0000\u0000\u0000\u0005\u00f9\u0001\u0000\u0000\u0000\u0005"+ - "\u00fb\u0001\u0000\u0000\u0000\u0005\u00fd\u0001\u0000\u0000\u0000\u0005"+ - "\u00ff\u0001\u0000\u0000\u0000\u0005\u0101\u0001\u0000\u0000\u0000\u0005"+ - "\u0103\u0001\u0000\u0000\u0000\u0005\u0105\u0001\u0000\u0000\u0000\u0005"+ - "\u0107\u0001\u0000\u0000\u0000\u0006\u0109\u0001\u0000\u0000\u0000\u0006"+ - "\u010b\u0001\u0000\u0000\u0000\u0006\u010d\u0001\u0000\u0000\u0000\u0006"+ - "\u010f\u0001\u0000\u0000\u0000\u0006\u0113\u0001\u0000\u0000\u0000\u0006"+ - "\u0115\u0001\u0000\u0000\u0000\u0006\u0117\u0001\u0000\u0000\u0000\u0006"+ - "\u0119\u0001\u0000\u0000\u0000\u0006\u011b\u0001\u0000\u0000\u0000\u0007"+ - "\u011d\u0001\u0000\u0000\u0000\u0007\u011f\u0001\u0000\u0000\u0000\u0007"+ - "\u0121\u0001\u0000\u0000\u0000\u0007\u0123\u0001\u0000\u0000\u0000\u0007"+ - "\u0125\u0001\u0000\u0000\u0000\u0007\u0127\u0001\u0000\u0000\u0000\u0007"+ - "\u0129\u0001\u0000\u0000\u0000\u0007\u012b\u0001\u0000\u0000\u0000\u0007"+ - "\u012d\u0001\u0000\u0000\u0000\u0007\u012f\u0001\u0000\u0000\u0000\u0007"+ - "\u0131\u0001\u0000\u0000\u0000\u0007\u0133\u0001\u0000\u0000\u0000\b\u0135"+ - "\u0001\u0000\u0000\u0000\b\u0137\u0001\u0000\u0000\u0000\b\u0139\u0001"+ - "\u0000\u0000\u0000\b\u013b\u0001\u0000\u0000\u0000\b\u013d\u0001\u0000"+ - "\u0000\u0000\b\u013f\u0001\u0000\u0000\u0000\b\u0141\u0001\u0000\u0000"+ - "\u0000\b\u0143\u0001\u0000\u0000\u0000\b\u0145\u0001\u0000\u0000\u0000"+ - "\t\u0147\u0001\u0000\u0000\u0000\t\u0149\u0001\u0000\u0000\u0000\t\u014b"+ - "\u0001\u0000\u0000\u0000\t\u014d\u0001\u0000\u0000\u0000\t\u014f\u0001"+ - "\u0000\u0000\u0000\n\u0151\u0001\u0000\u0000\u0000\n\u0153\u0001\u0000"+ - "\u0000\u0000\n\u0155\u0001\u0000\u0000\u0000\n\u0157\u0001\u0000\u0000"+ - "\u0000\n\u0159\u0001\u0000\u0000\u0000\n\u015b\u0001\u0000\u0000\u0000"+ - "\u000b\u015d\u0001\u0000\u0000\u0000\u000b\u015f\u0001\u0000\u0000\u0000"+ - "\u000b\u0161\u0001\u0000\u0000\u0000\u000b\u0163\u0001\u0000\u0000\u0000"+ - "\u000b\u0165\u0001\u0000\u0000\u0000\u000b\u0167\u0001\u0000\u0000\u0000"+ - "\u000b\u0169\u0001\u0000\u0000\u0000\u000b\u016b\u0001\u0000\u0000\u0000"+ - "\u000b\u016d\u0001\u0000\u0000\u0000\u000b\u016f\u0001\u0000\u0000\u0000"+ - "\f\u0171\u0001\u0000\u0000\u0000\f\u0173\u0001\u0000\u0000\u0000\f\u0175"+ - "\u0001\u0000\u0000\u0000\f\u0177\u0001\u0000\u0000\u0000\f\u0179\u0001"+ - "\u0000\u0000\u0000\f\u017b\u0001\u0000\u0000\u0000\f\u017d\u0001\u0000"+ - "\u0000\u0000\r\u017f\u0001\u0000\u0000\u0000\r\u0181\u0001\u0000\u0000"+ - "\u0000\r\u0183\u0001\u0000\u0000\u0000\r\u0185\u0001\u0000\u0000\u0000"+ - "\r\u0187\u0001\u0000\u0000\u0000\r\u0189\u0001\u0000\u0000\u0000\u000e"+ - "\u018b\u0001\u0000\u0000\u0000\u000e\u018d\u0001\u0000\u0000\u0000\u000e"+ - "\u018f\u0001\u0000\u0000\u0000\u000e\u0191\u0001\u0000\u0000\u0000\u000e"+ - "\u0193\u0001\u0000\u0000\u0000\u000e\u0195\u0001\u0000\u0000\u0000\u000e"+ - "\u0197\u0001\u0000\u0000\u0000\u000e\u0199\u0001\u0000\u0000\u0000\u000e"+ - "\u019b\u0001\u0000\u0000\u0000\u000f\u019d\u0001\u0000\u0000\u0000\u0011"+ - "\u01a7\u0001\u0000\u0000\u0000\u0013\u01ae\u0001\u0000\u0000\u0000\u0015"+ - "\u01b7\u0001\u0000\u0000\u0000\u0017\u01be\u0001\u0000\u0000\u0000\u0019"+ - "\u01c8\u0001\u0000\u0000\u0000\u001b\u01cf\u0001\u0000\u0000\u0000\u001d"+ - "\u01d6\u0001\u0000\u0000\u0000\u001f\u01dd\u0001\u0000\u0000\u0000!\u01e5"+ - "\u0001\u0000\u0000\u0000#\u01f1\u0001\u0000\u0000\u0000%\u01fa\u0001\u0000"+ - "\u0000\u0000\'\u0200\u0001\u0000\u0000\u0000)\u0207\u0001\u0000\u0000"+ - "\u0000+\u020e\u0001\u0000\u0000\u0000-\u0216\u0001\u0000\u0000\u0000/"+ - "\u021e\u0001\u0000\u0000\u00001\u022d\u0001\u0000\u0000\u00003\u0237\u0001"+ - "\u0000\u0000\u00005\u0243\u0001\u0000\u0000\u00007\u0249\u0001\u0000\u0000"+ - "\u00009\u025a\u0001\u0000\u0000\u0000;\u026a\u0001\u0000\u0000\u0000="+ - "\u0270\u0001\u0000\u0000\u0000?\u0272\u0001\u0000\u0000\u0000A\u0276\u0001"+ - "\u0000\u0000\u0000C\u0278\u0001\u0000\u0000\u0000E\u027a\u0001\u0000\u0000"+ - "\u0000G\u027d\u0001\u0000\u0000\u0000I\u027f\u0001\u0000\u0000\u0000K"+ - "\u0288\u0001\u0000\u0000\u0000M\u028a\u0001\u0000\u0000\u0000O\u028f\u0001"+ - "\u0000\u0000\u0000Q\u0291\u0001\u0000\u0000\u0000S\u0296\u0001\u0000\u0000"+ - "\u0000U\u02b5\u0001\u0000\u0000\u0000W\u02b8\u0001\u0000\u0000\u0000Y"+ - "\u02e6\u0001\u0000\u0000\u0000[\u02e8\u0001\u0000\u0000\u0000]\u02eb\u0001"+ - "\u0000\u0000\u0000_\u02ef\u0001\u0000\u0000\u0000a\u02f3\u0001\u0000\u0000"+ - "\u0000c\u02f5\u0001\u0000\u0000\u0000e\u02f8\u0001\u0000\u0000\u0000g"+ - "\u02fa\u0001\u0000\u0000\u0000i\u02ff\u0001\u0000\u0000\u0000k\u0301\u0001"+ - "\u0000\u0000\u0000m\u0307\u0001\u0000\u0000\u0000o\u030d\u0001\u0000\u0000"+ - "\u0000q\u0310\u0001\u0000\u0000\u0000s\u0313\u0001\u0000\u0000\u0000u"+ - "\u0318\u0001\u0000\u0000\u0000w\u031d\u0001\u0000\u0000\u0000y\u031f\u0001"+ - "\u0000\u0000\u0000{\u0323\u0001\u0000\u0000\u0000}\u0328\u0001\u0000\u0000"+ - "\u0000\u007f\u032e\u0001\u0000\u0000\u0000\u0081\u0331\u0001\u0000\u0000"+ - "\u0000\u0083\u0333\u0001\u0000\u0000\u0000\u0085\u0339\u0001\u0000\u0000"+ - "\u0000\u0087\u033b\u0001\u0000\u0000\u0000\u0089\u0340\u0001\u0000\u0000"+ - "\u0000\u008b\u0343\u0001\u0000\u0000\u0000\u008d\u0346\u0001\u0000\u0000"+ - "\u0000\u008f\u0349\u0001\u0000\u0000\u0000\u0091\u034b\u0001\u0000\u0000"+ - "\u0000\u0093\u034e\u0001\u0000\u0000\u0000\u0095\u0350\u0001\u0000\u0000"+ - "\u0000\u0097\u0353\u0001\u0000\u0000\u0000\u0099\u0355\u0001\u0000\u0000"+ - "\u0000\u009b\u0357\u0001\u0000\u0000\u0000\u009d\u0359\u0001\u0000\u0000"+ - "\u0000\u009f\u035b\u0001\u0000\u0000\u0000\u00a1\u035d\u0001\u0000\u0000"+ - "\u0000\u00a3\u0362\u0001\u0000\u0000\u0000\u00a5\u0377\u0001\u0000\u0000"+ - "\u0000\u00a7\u0379\u0001\u0000\u0000\u0000\u00a9\u037e\u0001\u0000\u0000"+ - "\u0000\u00ab\u0393\u0001\u0000\u0000\u0000\u00ad\u0395\u0001\u0000\u0000"+ - "\u0000\u00af\u039d\u0001\u0000\u0000\u0000\u00b1\u039f\u0001\u0000\u0000"+ - "\u0000\u00b3\u03a3\u0001\u0000\u0000\u0000\u00b5\u03a7\u0001\u0000\u0000"+ - "\u0000\u00b7\u03ab\u0001\u0000\u0000\u0000\u00b9\u03b0\u0001\u0000\u0000"+ - "\u0000\u00bb\u03b5\u0001\u0000\u0000\u0000\u00bd\u03b9\u0001\u0000\u0000"+ - "\u0000\u00bf\u03bd\u0001\u0000\u0000\u0000\u00c1\u03c1\u0001\u0000\u0000"+ - "\u0000\u00c3\u03c6\u0001\u0000\u0000\u0000\u00c5\u03ca\u0001\u0000\u0000"+ - "\u0000\u00c7\u03ce\u0001\u0000\u0000\u0000\u00c9\u03d2\u0001\u0000\u0000"+ - "\u0000\u00cb\u03d6\u0001\u0000\u0000\u0000\u00cd\u03da\u0001\u0000\u0000"+ - "\u0000\u00cf\u03e6\u0001\u0000\u0000\u0000\u00d1\u03e9\u0001\u0000\u0000"+ - "\u0000\u00d3\u03ed\u0001\u0000\u0000\u0000\u00d5\u03f1\u0001\u0000\u0000"+ - "\u0000\u00d7\u03f5\u0001\u0000\u0000\u0000\u00d9\u03f9\u0001\u0000\u0000"+ - "\u0000\u00db\u03fd\u0001\u0000\u0000\u0000\u00dd\u0401\u0001\u0000\u0000"+ - "\u0000\u00df\u0406\u0001\u0000\u0000\u0000\u00e1\u040a\u0001\u0000\u0000"+ - "\u0000\u00e3\u040e\u0001\u0000\u0000\u0000\u00e5\u0413\u0001\u0000\u0000"+ - "\u0000\u00e7\u041c\u0001\u0000\u0000\u0000\u00e9\u0431\u0001\u0000\u0000"+ - "\u0000\u00eb\u0435\u0001\u0000\u0000\u0000\u00ed\u0439\u0001\u0000\u0000"+ - "\u0000\u00ef\u043d\u0001\u0000\u0000\u0000\u00f1\u0441\u0001\u0000\u0000"+ - "\u0000\u00f3\u0445\u0001\u0000\u0000\u0000\u00f5\u044a\u0001\u0000\u0000"+ - "\u0000\u00f7\u044e\u0001\u0000\u0000\u0000\u00f9\u0452\u0001\u0000\u0000"+ - "\u0000\u00fb\u0456\u0001\u0000\u0000\u0000\u00fd\u045b\u0001\u0000\u0000"+ - "\u0000\u00ff\u0460\u0001\u0000\u0000\u0000\u0101\u0463\u0001\u0000\u0000"+ - "\u0000\u0103\u0467\u0001\u0000\u0000\u0000\u0105\u046b\u0001\u0000\u0000"+ - "\u0000\u0107\u046f\u0001\u0000\u0000\u0000\u0109\u0473\u0001\u0000\u0000"+ - "\u0000\u010b\u0478\u0001\u0000\u0000\u0000\u010d\u047d\u0001\u0000\u0000"+ - "\u0000\u010f\u0482\u0001\u0000\u0000\u0000\u0111\u0489\u0001\u0000\u0000"+ - "\u0000\u0113\u0492\u0001\u0000\u0000\u0000\u0115\u0499\u0001\u0000\u0000"+ - "\u0000\u0117\u049d\u0001\u0000\u0000\u0000\u0119\u04a1\u0001\u0000\u0000"+ - "\u0000\u011b\u04a5\u0001\u0000\u0000\u0000\u011d\u04a9\u0001\u0000\u0000"+ - "\u0000\u011f\u04af\u0001\u0000\u0000\u0000\u0121\u04b3\u0001\u0000\u0000"+ - "\u0000\u0123\u04b7\u0001\u0000\u0000\u0000\u0125\u04bb\u0001\u0000\u0000"+ - "\u0000\u0127\u04bf\u0001\u0000\u0000\u0000\u0129\u04c3\u0001\u0000\u0000"+ - "\u0000\u012b\u04c7\u0001\u0000\u0000\u0000\u012d\u04cc\u0001\u0000\u0000"+ - "\u0000\u012f\u04d1\u0001\u0000\u0000\u0000\u0131\u04d5\u0001\u0000\u0000"+ - "\u0000\u0133\u04d9\u0001\u0000\u0000\u0000\u0135\u04dd\u0001\u0000\u0000"+ - "\u0000\u0137\u04e2\u0001\u0000\u0000\u0000\u0139\u04e6\u0001\u0000\u0000"+ - "\u0000\u013b\u04eb\u0001\u0000\u0000\u0000\u013d\u04f0\u0001\u0000\u0000"+ - "\u0000\u013f\u04f4\u0001\u0000\u0000\u0000\u0141\u04f8\u0001\u0000\u0000"+ - "\u0000\u0143\u04fc\u0001\u0000\u0000\u0000\u0145\u0500\u0001\u0000\u0000"+ - "\u0000\u0147\u0504\u0001\u0000\u0000\u0000\u0149\u0509\u0001\u0000\u0000"+ - "\u0000\u014b\u050e\u0001\u0000\u0000\u0000\u014d\u0512\u0001\u0000\u0000"+ - "\u0000\u014f\u0516\u0001\u0000\u0000\u0000\u0151\u051a\u0001\u0000\u0000"+ - "\u0000\u0153\u051f\u0001\u0000\u0000\u0000\u0155\u0528\u0001\u0000\u0000"+ - "\u0000\u0157\u052c\u0001\u0000\u0000\u0000\u0159\u0530\u0001\u0000\u0000"+ - "\u0000\u015b\u0534\u0001\u0000\u0000\u0000\u015d\u0538\u0001\u0000\u0000"+ - "\u0000\u015f\u053d\u0001\u0000\u0000\u0000\u0161\u0541\u0001\u0000\u0000"+ - "\u0000\u0163\u0545\u0001\u0000\u0000\u0000\u0165\u0549\u0001\u0000\u0000"+ - "\u0000\u0167\u054e\u0001\u0000\u0000\u0000\u0169\u0552\u0001\u0000\u0000"+ - "\u0000\u016b\u0556\u0001\u0000\u0000\u0000\u016d\u055a\u0001\u0000\u0000"+ - "\u0000\u016f\u055e\u0001\u0000\u0000\u0000\u0171\u0562\u0001\u0000\u0000"+ - "\u0000\u0173\u0568\u0001\u0000\u0000\u0000\u0175\u056c\u0001\u0000\u0000"+ - "\u0000\u0177\u0570\u0001\u0000\u0000\u0000\u0179\u0574\u0001\u0000\u0000"+ - "\u0000\u017b\u0578\u0001\u0000\u0000\u0000\u017d\u057c\u0001\u0000\u0000"+ - "\u0000\u017f\u0580\u0001\u0000\u0000\u0000\u0181\u0585\u0001\u0000\u0000"+ - "\u0000\u0183\u058b\u0001\u0000\u0000\u0000\u0185\u0591\u0001\u0000\u0000"+ - "\u0000\u0187\u0595\u0001\u0000\u0000\u0000\u0189\u0599\u0001\u0000\u0000"+ - "\u0000\u018b\u059d\u0001\u0000\u0000\u0000\u018d\u05a3\u0001\u0000\u0000"+ - "\u0000\u018f\u05a9\u0001\u0000\u0000\u0000\u0191\u05ad\u0001\u0000\u0000"+ - "\u0000\u0193\u05b1\u0001\u0000\u0000\u0000\u0195\u05b5\u0001\u0000\u0000"+ - "\u0000\u0197\u05bb\u0001\u0000\u0000\u0000\u0199\u05c1\u0001\u0000\u0000"+ - "\u0000\u019b\u05c7\u0001\u0000\u0000\u0000\u019d\u019e\u0007\u0000\u0000"+ - "\u0000\u019e\u019f\u0007\u0001\u0000\u0000\u019f\u01a0\u0007\u0002\u0000"+ - "\u0000\u01a0\u01a1\u0007\u0002\u0000\u0000\u01a1\u01a2\u0007\u0003\u0000"+ - "\u0000\u01a2\u01a3\u0007\u0004\u0000\u0000\u01a3\u01a4\u0007\u0005\u0000"+ - "\u0000\u01a4\u01a5\u0001\u0000\u0000\u0000\u01a5\u01a6\u0006\u0000\u0000"+ - "\u0000\u01a6\u0010\u0001\u0000\u0000\u0000\u01a7\u01a8\u0007\u0000\u0000"+ - "\u0000\u01a8\u01a9\u0007\u0006\u0000\u0000\u01a9\u01aa\u0007\u0007\u0000"+ - "\u0000\u01aa\u01ab\u0007\b\u0000\u0000\u01ab\u01ac\u0001\u0000\u0000\u0000"+ - "\u01ac\u01ad\u0006\u0001\u0001\u0000\u01ad\u0012\u0001\u0000\u0000\u0000"+ - "\u01ae\u01af\u0007\u0003\u0000\u0000\u01af\u01b0\u0007\t\u0000\u0000\u01b0"+ - "\u01b1\u0007\u0006\u0000\u0000\u01b1\u01b2\u0007\u0001\u0000\u0000\u01b2"+ - "\u01b3\u0007\u0004\u0000\u0000\u01b3\u01b4\u0007\n\u0000\u0000\u01b4\u01b5"+ - "\u0001\u0000\u0000\u0000\u01b5\u01b6\u0006\u0002\u0002\u0000\u01b6\u0014"+ - "\u0001\u0000\u0000\u0000\u01b7\u01b8\u0007\u0003\u0000\u0000\u01b8\u01b9"+ - "\u0007\u000b\u0000\u0000\u01b9\u01ba\u0007\f\u0000\u0000\u01ba\u01bb\u0007"+ - "\r\u0000\u0000\u01bb\u01bc\u0001\u0000\u0000\u0000\u01bc\u01bd\u0006\u0003"+ - "\u0000\u0000\u01bd\u0016\u0001\u0000\u0000\u0000\u01be\u01bf\u0007\u0003"+ - "\u0000\u0000\u01bf\u01c0\u0007\u000e\u0000\u0000\u01c0\u01c1\u0007\b\u0000"+ - "\u0000\u01c1\u01c2\u0007\r\u0000\u0000\u01c2\u01c3\u0007\f\u0000\u0000"+ - "\u01c3\u01c4\u0007\u0001\u0000\u0000\u01c4\u01c5\u0007\t\u0000\u0000\u01c5"+ - "\u01c6\u0001\u0000\u0000\u0000\u01c6\u01c7\u0006\u0004\u0003\u0000\u01c7"+ - "\u0018\u0001\u0000\u0000\u0000\u01c8\u01c9\u0007\u000f\u0000\u0000\u01c9"+ - "\u01ca\u0007\u0006\u0000\u0000\u01ca\u01cb\u0007\u0007\u0000\u0000\u01cb"+ - "\u01cc\u0007\u0010\u0000\u0000\u01cc\u01cd\u0001\u0000\u0000\u0000\u01cd"+ - "\u01ce\u0006\u0005\u0004\u0000\u01ce\u001a\u0001\u0000\u0000\u0000\u01cf"+ - "\u01d0\u0007\u0011\u0000\u0000\u01d0\u01d1\u0007\u0006\u0000\u0000\u01d1"+ - "\u01d2\u0007\u0007\u0000\u0000\u01d2\u01d3\u0007\u0012\u0000\u0000\u01d3"+ - "\u01d4\u0001\u0000\u0000\u0000\u01d4\u01d5\u0006\u0006\u0000\u0000\u01d5"+ - "\u001c\u0001\u0000\u0000\u0000\u01d6\u01d7\u0007\u0012\u0000\u0000\u01d7"+ - "\u01d8\u0007\u0003\u0000\u0000\u01d8\u01d9\u0007\u0003\u0000\u0000\u01d9"+ - "\u01da\u0007\b\u0000\u0000\u01da\u01db\u0001\u0000\u0000\u0000\u01db\u01dc"+ - "\u0006\u0007\u0001\u0000\u01dc\u001e\u0001\u0000\u0000\u0000\u01dd\u01de"+ - "\u0007\r\u0000\u0000\u01de\u01df\u0007\u0001\u0000\u0000\u01df\u01e0\u0007"+ - "\u0010\u0000\u0000\u01e0\u01e1\u0007\u0001\u0000\u0000\u01e1\u01e2\u0007"+ - "\u0005\u0000\u0000\u01e2\u01e3\u0001\u0000\u0000\u0000\u01e3\u01e4\u0006"+ - "\b\u0000\u0000\u01e4 \u0001\u0000\u0000\u0000\u01e5\u01e6\u0007\u0010"+ - "\u0000\u0000\u01e6\u01e7\u0007\u000b\u0000\u0000\u01e7\u01e8\u0005_\u0000"+ - "\u0000\u01e8\u01e9\u0007\u0003\u0000\u0000\u01e9\u01ea\u0007\u000e\u0000"+ - "\u0000\u01ea\u01eb\u0007\b\u0000\u0000\u01eb\u01ec\u0007\f\u0000\u0000"+ - "\u01ec\u01ed\u0007\t\u0000\u0000\u01ed\u01ee\u0007\u0000\u0000\u0000\u01ee"+ - "\u01ef\u0001\u0000\u0000\u0000\u01ef\u01f0\u0006\t\u0005\u0000\u01f0\""+ - "\u0001\u0000\u0000\u0000\u01f1\u01f2\u0007\u0006\u0000\u0000\u01f2\u01f3"+ - "\u0007\u0003\u0000\u0000\u01f3\u01f4\u0007\t\u0000\u0000\u01f4\u01f5\u0007"+ - "\f\u0000\u0000\u01f5\u01f6\u0007\u0010\u0000\u0000\u01f6\u01f7\u0007\u0003"+ - "\u0000\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8\u01f9\u0006\n\u0006"+ - "\u0000\u01f9$\u0001\u0000\u0000\u0000\u01fa\u01fb\u0007\u0006\u0000\u0000"+ - "\u01fb\u01fc\u0007\u0007\u0000\u0000\u01fc\u01fd\u0007\u0013\u0000\u0000"+ - "\u01fd\u01fe\u0001\u0000\u0000\u0000\u01fe\u01ff\u0006\u000b\u0000\u0000"+ - "\u01ff&\u0001\u0000\u0000\u0000\u0200\u0201\u0007\u0002\u0000\u0000\u0201"+ - "\u0202\u0007\n\u0000\u0000\u0202\u0203\u0007\u0007\u0000\u0000\u0203\u0204"+ - "\u0007\u0013\u0000\u0000\u0204\u0205\u0001\u0000\u0000\u0000\u0205\u0206"+ - "\u0006\f\u0007\u0000\u0206(\u0001\u0000\u0000\u0000\u0207\u0208\u0007"+ - "\u0002\u0000\u0000\u0208\u0209\u0007\u0007\u0000\u0000\u0209\u020a\u0007"+ - "\u0006\u0000\u0000\u020a\u020b\u0007\u0005\u0000\u0000\u020b\u020c\u0001"+ - "\u0000\u0000\u0000\u020c\u020d\u0006\r\u0000\u0000\u020d*\u0001\u0000"+ - "\u0000\u0000\u020e\u020f\u0007\u0002\u0000\u0000\u020f\u0210\u0007\u0005"+ - "\u0000\u0000\u0210\u0211\u0007\f\u0000\u0000\u0211\u0212\u0007\u0005\u0000"+ - "\u0000\u0212\u0213\u0007\u0002\u0000\u0000\u0213\u0214\u0001\u0000\u0000"+ - "\u0000\u0214\u0215\u0006\u000e\u0000\u0000\u0215,\u0001\u0000\u0000\u0000"+ - "\u0216\u0217\u0007\u0013\u0000\u0000\u0217\u0218\u0007\n\u0000\u0000\u0218"+ - "\u0219\u0007\u0003\u0000\u0000\u0219\u021a\u0007\u0006\u0000\u0000\u021a"+ - "\u021b\u0007\u0003\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c"+ - "\u021d\u0006\u000f\u0000\u0000\u021d.\u0001\u0000\u0000\u0000\u021e\u021f"+ - "\u0004\u0010\u0000\u0000\u021f\u0220\u0007\u0001\u0000\u0000\u0220\u0221"+ - "\u0007\t\u0000\u0000\u0221\u0222\u0007\r\u0000\u0000\u0222\u0223\u0007"+ - "\u0001\u0000\u0000\u0223\u0224\u0007\t\u0000\u0000\u0224\u0225\u0007\u0003"+ - "\u0000\u0000\u0225\u0226\u0007\u0002\u0000\u0000\u0226\u0227\u0007\u0005"+ - "\u0000\u0000\u0227\u0228\u0007\f\u0000\u0000\u0228\u0229\u0007\u0005\u0000"+ - "\u0000\u0229\u022a\u0007\u0002\u0000\u0000\u022a\u022b\u0001\u0000\u0000"+ - "\u0000\u022b\u022c\u0006\u0010\u0000\u0000\u022c0\u0001\u0000\u0000\u0000"+ - "\u022d\u022e\u0004\u0011\u0001\u0000\u022e\u022f\u0007\r\u0000\u0000\u022f"+ - "\u0230\u0007\u0007\u0000\u0000\u0230\u0231\u0007\u0007\u0000\u0000\u0231"+ - "\u0232\u0007\u0012\u0000\u0000\u0232\u0233\u0007\u0014\u0000\u0000\u0233"+ - "\u0234\u0007\b\u0000\u0000\u0234\u0235\u0001\u0000\u0000\u0000\u0235\u0236"+ - "\u0006\u0011\b\u0000\u02362\u0001\u0000\u0000\u0000\u0237\u0238\u0004"+ - "\u0012\u0002\u0000\u0238\u0239\u0007\u0010\u0000\u0000\u0239\u023a\u0007"+ - "\u0003\u0000\u0000\u023a\u023b\u0007\u0005\u0000\u0000\u023b\u023c\u0007"+ - "\u0006\u0000\u0000\u023c\u023d\u0007\u0001\u0000\u0000\u023d\u023e\u0007"+ - "\u0004\u0000\u0000\u023e\u023f\u0007\u0002\u0000\u0000\u023f\u0240\u0001"+ - "\u0000\u0000\u0000\u0240\u0241\u0006\u0012\t\u0000\u02414\u0001\u0000"+ - "\u0000\u0000\u0242\u0244\b\u0015\u0000\u0000\u0243\u0242\u0001\u0000\u0000"+ - "\u0000\u0244\u0245\u0001\u0000\u0000\u0000\u0245\u0243\u0001\u0000\u0000"+ - "\u0000\u0245\u0246\u0001\u0000\u0000\u0000\u0246\u0247\u0001\u0000\u0000"+ - "\u0000\u0247\u0248\u0006\u0013\u0000\u0000\u02486\u0001\u0000\u0000\u0000"+ - "\u0249\u024a\u0005/\u0000\u0000\u024a\u024b\u0005/\u0000\u0000\u024b\u024f"+ - "\u0001\u0000\u0000\u0000\u024c\u024e\b\u0016\u0000\u0000\u024d\u024c\u0001"+ - "\u0000\u0000\u0000\u024e\u0251\u0001\u0000\u0000\u0000\u024f\u024d\u0001"+ - "\u0000\u0000\u0000\u024f\u0250\u0001\u0000\u0000\u0000\u0250\u0253\u0001"+ - "\u0000\u0000\u0000\u0251\u024f\u0001\u0000\u0000\u0000\u0252\u0254\u0005"+ - "\r\u0000\u0000\u0253\u0252\u0001\u0000\u0000\u0000\u0253\u0254\u0001\u0000"+ - "\u0000\u0000\u0254\u0256\u0001\u0000\u0000\u0000\u0255\u0257\u0005\n\u0000"+ - "\u0000\u0256\u0255\u0001\u0000\u0000\u0000\u0256\u0257\u0001\u0000\u0000"+ - "\u0000\u0257\u0258\u0001\u0000\u0000\u0000\u0258\u0259\u0006\u0014\n\u0000"+ - "\u02598\u0001\u0000\u0000\u0000\u025a\u025b\u0005/\u0000\u0000\u025b\u025c"+ - "\u0005*\u0000\u0000\u025c\u0261\u0001\u0000\u0000\u0000\u025d\u0260\u0003"+ - "9\u0015\u0000\u025e\u0260\t\u0000\u0000\u0000\u025f\u025d\u0001\u0000"+ - "\u0000\u0000\u025f\u025e\u0001\u0000\u0000\u0000\u0260\u0263\u0001\u0000"+ - "\u0000\u0000\u0261\u0262\u0001\u0000\u0000\u0000\u0261\u025f\u0001\u0000"+ - "\u0000\u0000\u0262\u0264\u0001\u0000\u0000\u0000\u0263\u0261\u0001\u0000"+ - "\u0000\u0000\u0264\u0265\u0005*\u0000\u0000\u0265\u0266\u0005/\u0000\u0000"+ - "\u0266\u0267\u0001\u0000\u0000\u0000\u0267\u0268\u0006\u0015\n\u0000\u0268"+ - ":\u0001\u0000\u0000\u0000\u0269\u026b\u0007\u0017\u0000\u0000\u026a\u0269"+ - "\u0001\u0000\u0000\u0000\u026b\u026c\u0001\u0000\u0000\u0000\u026c\u026a"+ - "\u0001\u0000\u0000\u0000\u026c\u026d\u0001\u0000\u0000\u0000\u026d\u026e"+ - "\u0001\u0000\u0000\u0000\u026e\u026f\u0006\u0016\n\u0000\u026f<\u0001"+ - "\u0000\u0000\u0000\u0270\u0271\u0005:\u0000\u0000\u0271>\u0001\u0000\u0000"+ - "\u0000\u0272\u0273\u0005|\u0000\u0000\u0273\u0274\u0001\u0000\u0000\u0000"+ - "\u0274\u0275\u0006\u0018\u000b\u0000\u0275@\u0001\u0000\u0000\u0000\u0276"+ - "\u0277\u0007\u0018\u0000\u0000\u0277B\u0001\u0000\u0000\u0000\u0278\u0279"+ - "\u0007\u0019\u0000\u0000\u0279D\u0001\u0000\u0000\u0000\u027a\u027b\u0005"+ - "\\\u0000\u0000\u027b\u027c\u0007\u001a\u0000\u0000\u027cF\u0001\u0000"+ - "\u0000\u0000\u027d\u027e\b\u001b\u0000\u0000\u027eH\u0001\u0000\u0000"+ - "\u0000\u027f\u0281\u0007\u0003\u0000\u0000\u0280\u0282\u0007\u001c\u0000"+ - "\u0000\u0281\u0280\u0001\u0000\u0000\u0000\u0281\u0282\u0001\u0000\u0000"+ - "\u0000\u0282\u0284\u0001\u0000\u0000\u0000\u0283\u0285\u0003A\u0019\u0000"+ - "\u0284\u0283\u0001\u0000\u0000\u0000\u0285\u0286\u0001\u0000\u0000\u0000"+ - "\u0286\u0284\u0001\u0000\u0000\u0000\u0286\u0287\u0001\u0000\u0000\u0000"+ - "\u0287J\u0001\u0000\u0000\u0000\u0288\u0289\u0005@\u0000\u0000\u0289L"+ - "\u0001\u0000\u0000\u0000\u028a\u028b\u0005`\u0000\u0000\u028bN\u0001\u0000"+ - "\u0000\u0000\u028c\u0290\b\u001d\u0000\u0000\u028d\u028e\u0005`\u0000"+ - "\u0000\u028e\u0290\u0005`\u0000\u0000\u028f\u028c\u0001\u0000\u0000\u0000"+ - "\u028f\u028d\u0001\u0000\u0000\u0000\u0290P\u0001\u0000\u0000\u0000\u0291"+ - "\u0292\u0005_\u0000\u0000\u0292R\u0001\u0000\u0000\u0000\u0293\u0297\u0003"+ - "C\u001a\u0000\u0294\u0297\u0003A\u0019\u0000\u0295\u0297\u0003Q!\u0000"+ - "\u0296\u0293\u0001\u0000\u0000\u0000\u0296\u0294\u0001\u0000\u0000\u0000"+ - "\u0296\u0295\u0001\u0000\u0000\u0000\u0297T\u0001\u0000\u0000\u0000\u0298"+ - "\u029d\u0005\"\u0000\u0000\u0299\u029c\u0003E\u001b\u0000\u029a\u029c"+ - "\u0003G\u001c\u0000\u029b\u0299\u0001\u0000\u0000\u0000\u029b\u029a\u0001"+ - "\u0000\u0000\u0000\u029c\u029f\u0001\u0000\u0000\u0000\u029d\u029b\u0001"+ - "\u0000\u0000\u0000\u029d\u029e\u0001\u0000\u0000\u0000\u029e\u02a0\u0001"+ - "\u0000\u0000\u0000\u029f\u029d\u0001\u0000\u0000\u0000\u02a0\u02b6\u0005"+ - "\"\u0000\u0000\u02a1\u02a2\u0005\"\u0000\u0000\u02a2\u02a3\u0005\"\u0000"+ - "\u0000\u02a3\u02a4\u0005\"\u0000\u0000\u02a4\u02a8\u0001\u0000\u0000\u0000"+ - "\u02a5\u02a7\b\u0016\u0000\u0000\u02a6\u02a5\u0001\u0000\u0000\u0000\u02a7"+ - "\u02aa\u0001\u0000\u0000\u0000\u02a8\u02a9\u0001\u0000\u0000\u0000\u02a8"+ - "\u02a6\u0001\u0000\u0000\u0000\u02a9\u02ab\u0001\u0000\u0000\u0000\u02aa"+ - "\u02a8\u0001\u0000\u0000\u0000\u02ab\u02ac\u0005\"\u0000\u0000\u02ac\u02ad"+ - "\u0005\"\u0000\u0000\u02ad\u02ae\u0005\"\u0000\u0000\u02ae\u02b0\u0001"+ - "\u0000\u0000\u0000\u02af\u02b1\u0005\"\u0000\u0000\u02b0\u02af\u0001\u0000"+ - "\u0000\u0000\u02b0\u02b1\u0001\u0000\u0000\u0000\u02b1\u02b3\u0001\u0000"+ - "\u0000\u0000\u02b2\u02b4\u0005\"\u0000\u0000\u02b3\u02b2\u0001\u0000\u0000"+ - "\u0000\u02b3\u02b4\u0001\u0000\u0000\u0000\u02b4\u02b6\u0001\u0000\u0000"+ - "\u0000\u02b5\u0298\u0001\u0000\u0000\u0000\u02b5\u02a1\u0001\u0000\u0000"+ - "\u0000\u02b6V\u0001\u0000\u0000\u0000\u02b7\u02b9\u0003A\u0019\u0000\u02b8"+ - "\u02b7\u0001\u0000\u0000\u0000\u02b9\u02ba\u0001\u0000\u0000\u0000\u02ba"+ - "\u02b8\u0001\u0000\u0000\u0000\u02ba\u02bb\u0001\u0000\u0000\u0000\u02bb"+ - "X\u0001\u0000\u0000\u0000\u02bc\u02be\u0003A\u0019\u0000\u02bd\u02bc\u0001"+ - "\u0000\u0000\u0000\u02be\u02bf\u0001\u0000\u0000\u0000\u02bf\u02bd\u0001"+ - "\u0000\u0000\u0000\u02bf\u02c0\u0001\u0000\u0000\u0000\u02c0\u02c1\u0001"+ - "\u0000\u0000\u0000\u02c1\u02c5\u0003i-\u0000\u02c2\u02c4\u0003A\u0019"+ - "\u0000\u02c3\u02c2\u0001\u0000\u0000\u0000\u02c4\u02c7\u0001\u0000\u0000"+ - "\u0000\u02c5\u02c3\u0001\u0000\u0000\u0000\u02c5\u02c6\u0001\u0000\u0000"+ - "\u0000\u02c6\u02e7\u0001\u0000\u0000\u0000\u02c7\u02c5\u0001\u0000\u0000"+ - "\u0000\u02c8\u02ca\u0003i-\u0000\u02c9\u02cb\u0003A\u0019\u0000\u02ca"+ - "\u02c9\u0001\u0000\u0000\u0000\u02cb\u02cc\u0001\u0000\u0000\u0000\u02cc"+ - "\u02ca\u0001\u0000\u0000\u0000\u02cc\u02cd\u0001\u0000\u0000\u0000\u02cd"+ - "\u02e7\u0001\u0000\u0000\u0000\u02ce\u02d0\u0003A\u0019\u0000\u02cf\u02ce"+ - "\u0001\u0000\u0000\u0000\u02d0\u02d1\u0001\u0000\u0000\u0000\u02d1\u02cf"+ - "\u0001\u0000\u0000\u0000\u02d1\u02d2\u0001\u0000\u0000\u0000\u02d2\u02da"+ - "\u0001\u0000\u0000\u0000\u02d3\u02d7\u0003i-\u0000\u02d4\u02d6\u0003A"+ - "\u0019\u0000\u02d5\u02d4\u0001\u0000\u0000\u0000\u02d6\u02d9\u0001\u0000"+ - "\u0000\u0000\u02d7\u02d5\u0001\u0000\u0000\u0000\u02d7\u02d8\u0001\u0000"+ - "\u0000\u0000\u02d8\u02db\u0001\u0000\u0000\u0000\u02d9\u02d7\u0001\u0000"+ - "\u0000\u0000\u02da\u02d3\u0001\u0000\u0000\u0000\u02da\u02db\u0001\u0000"+ - "\u0000\u0000\u02db\u02dc\u0001\u0000\u0000\u0000\u02dc\u02dd\u0003I\u001d"+ - "\u0000\u02dd\u02e7\u0001\u0000\u0000\u0000\u02de\u02e0\u0003i-\u0000\u02df"+ - "\u02e1\u0003A\u0019\u0000\u02e0\u02df\u0001\u0000\u0000\u0000\u02e1\u02e2"+ - "\u0001\u0000\u0000\u0000\u02e2\u02e0\u0001\u0000\u0000\u0000\u02e2\u02e3"+ - "\u0001\u0000\u0000\u0000\u02e3\u02e4\u0001\u0000\u0000\u0000\u02e4\u02e5"+ - "\u0003I\u001d\u0000\u02e5\u02e7\u0001\u0000\u0000\u0000\u02e6\u02bd\u0001"+ - "\u0000\u0000\u0000\u02e6\u02c8\u0001\u0000\u0000\u0000\u02e6\u02cf\u0001"+ - "\u0000\u0000\u0000\u02e6\u02de\u0001\u0000\u0000\u0000\u02e7Z\u0001\u0000"+ - "\u0000\u0000\u02e8\u02e9\u0007\u001e\u0000\u0000\u02e9\u02ea\u0007\u001f"+ - "\u0000\u0000\u02ea\\\u0001\u0000\u0000\u0000\u02eb\u02ec\u0007\f\u0000"+ - "\u0000\u02ec\u02ed\u0007\t\u0000\u0000\u02ed\u02ee\u0007\u0000\u0000\u0000"+ - "\u02ee^\u0001\u0000\u0000\u0000\u02ef\u02f0\u0007\f\u0000\u0000\u02f0"+ - "\u02f1\u0007\u0002\u0000\u0000\u02f1\u02f2\u0007\u0004\u0000\u0000\u02f2"+ - "`\u0001\u0000\u0000\u0000\u02f3\u02f4\u0005=\u0000\u0000\u02f4b\u0001"+ - "\u0000\u0000\u0000\u02f5\u02f6\u0005:\u0000\u0000\u02f6\u02f7\u0005:\u0000"+ - "\u0000\u02f7d\u0001\u0000\u0000\u0000\u02f8\u02f9\u0005,\u0000\u0000\u02f9"+ - "f\u0001\u0000\u0000\u0000\u02fa\u02fb\u0007\u0000\u0000\u0000\u02fb\u02fc"+ - "\u0007\u0003\u0000\u0000\u02fc\u02fd\u0007\u0002\u0000\u0000\u02fd\u02fe"+ - "\u0007\u0004\u0000\u0000\u02feh\u0001\u0000\u0000\u0000\u02ff\u0300\u0005"+ - ".\u0000\u0000\u0300j\u0001\u0000\u0000\u0000\u0301\u0302\u0007\u000f\u0000"+ - "\u0000\u0302\u0303\u0007\f\u0000\u0000\u0303\u0304\u0007\r\u0000\u0000"+ - "\u0304\u0305\u0007\u0002\u0000\u0000\u0305\u0306\u0007\u0003\u0000\u0000"+ - "\u0306l\u0001\u0000\u0000\u0000\u0307\u0308\u0007\u000f\u0000\u0000\u0308"+ - "\u0309\u0007\u0001\u0000\u0000\u0309\u030a\u0007\u0006\u0000\u0000\u030a"+ - "\u030b\u0007\u0002\u0000\u0000\u030b\u030c\u0007\u0005\u0000\u0000\u030c"+ - "n\u0001\u0000\u0000\u0000\u030d\u030e\u0007\u0001\u0000\u0000\u030e\u030f"+ - "\u0007\t\u0000\u0000\u030fp\u0001\u0000\u0000\u0000\u0310\u0311\u0007"+ - "\u0001\u0000\u0000\u0311\u0312\u0007\u0002\u0000\u0000\u0312r\u0001\u0000"+ - "\u0000\u0000\u0313\u0314\u0007\r\u0000\u0000\u0314\u0315\u0007\f\u0000"+ - "\u0000\u0315\u0316\u0007\u0002\u0000\u0000\u0316\u0317\u0007\u0005\u0000"+ - "\u0000\u0317t\u0001\u0000\u0000\u0000\u0318\u0319\u0007\r\u0000\u0000"+ - "\u0319\u031a\u0007\u0001\u0000\u0000\u031a\u031b\u0007\u0012\u0000\u0000"+ - "\u031b\u031c\u0007\u0003\u0000\u0000\u031cv\u0001\u0000\u0000\u0000\u031d"+ - "\u031e\u0005(\u0000\u0000\u031ex\u0001\u0000\u0000\u0000\u031f\u0320\u0007"+ - "\t\u0000\u0000\u0320\u0321\u0007\u0007\u0000\u0000\u0321\u0322\u0007\u0005"+ - "\u0000\u0000\u0322z\u0001\u0000\u0000\u0000\u0323\u0324\u0007\t\u0000"+ - "\u0000\u0324\u0325\u0007\u0014\u0000\u0000\u0325\u0326\u0007\r\u0000\u0000"+ - "\u0326\u0327\u0007\r\u0000\u0000\u0327|\u0001\u0000\u0000\u0000\u0328"+ - "\u0329\u0007\t\u0000\u0000\u0329\u032a\u0007\u0014\u0000\u0000\u032a\u032b"+ - "\u0007\r\u0000\u0000\u032b\u032c\u0007\r\u0000\u0000\u032c\u032d\u0007"+ - "\u0002\u0000\u0000\u032d~\u0001\u0000\u0000\u0000\u032e\u032f\u0007\u0007"+ - "\u0000\u0000\u032f\u0330\u0007\u0006\u0000\u0000\u0330\u0080\u0001\u0000"+ - "\u0000\u0000\u0331\u0332\u0005?\u0000\u0000\u0332\u0082\u0001\u0000\u0000"+ - "\u0000\u0333\u0334\u0007\u0006\u0000\u0000\u0334\u0335\u0007\r\u0000\u0000"+ - "\u0335\u0336\u0007\u0001\u0000\u0000\u0336\u0337\u0007\u0012\u0000\u0000"+ - "\u0337\u0338\u0007\u0003\u0000\u0000\u0338\u0084\u0001\u0000\u0000\u0000"+ - "\u0339\u033a\u0005)\u0000\u0000\u033a\u0086\u0001\u0000\u0000\u0000\u033b"+ - "\u033c\u0007\u0005\u0000\u0000\u033c\u033d\u0007\u0006\u0000\u0000\u033d"+ - "\u033e\u0007\u0014\u0000\u0000\u033e\u033f\u0007\u0003\u0000\u0000\u033f"+ - "\u0088\u0001\u0000\u0000\u0000\u0340\u0341\u0005=\u0000\u0000\u0341\u0342"+ - "\u0005=\u0000\u0000\u0342\u008a\u0001\u0000\u0000\u0000\u0343\u0344\u0005"+ - "=\u0000\u0000\u0344\u0345\u0005~\u0000\u0000\u0345\u008c\u0001\u0000\u0000"+ - "\u0000\u0346\u0347\u0005!\u0000\u0000\u0347\u0348\u0005=\u0000\u0000\u0348"+ - "\u008e\u0001\u0000\u0000\u0000\u0349\u034a\u0005<\u0000\u0000\u034a\u0090"+ - "\u0001\u0000\u0000\u0000\u034b\u034c\u0005<\u0000\u0000\u034c\u034d\u0005"+ - "=\u0000\u0000\u034d\u0092\u0001\u0000\u0000\u0000\u034e\u034f\u0005>\u0000"+ - "\u0000\u034f\u0094\u0001\u0000\u0000\u0000\u0350\u0351\u0005>\u0000\u0000"+ - "\u0351\u0352\u0005=\u0000\u0000\u0352\u0096\u0001\u0000\u0000\u0000\u0353"+ - "\u0354\u0005+\u0000\u0000\u0354\u0098\u0001\u0000\u0000\u0000\u0355\u0356"+ - "\u0005-\u0000\u0000\u0356\u009a\u0001\u0000\u0000\u0000\u0357\u0358\u0005"+ - "*\u0000\u0000\u0358\u009c\u0001\u0000\u0000\u0000\u0359\u035a\u0005/\u0000"+ - "\u0000\u035a\u009e\u0001\u0000\u0000\u0000\u035b\u035c\u0005%\u0000\u0000"+ - "\u035c\u00a0\u0001\u0000\u0000\u0000\u035d\u035e\u0004I\u0003\u0000\u035e"+ - "\u035f\u0003=\u0017\u0000\u035f\u0360\u0001\u0000\u0000\u0000\u0360\u0361"+ - "\u0006I\f\u0000\u0361\u00a2\u0001\u0000\u0000\u0000\u0362\u0363\u0003"+ - "-\u000f\u0000\u0363\u0364\u0001\u0000\u0000\u0000\u0364\u0365\u0006J\r"+ - "\u0000\u0365\u00a4\u0001\u0000\u0000\u0000\u0366\u0369\u0003\u00819\u0000"+ - "\u0367\u036a\u0003C\u001a\u0000\u0368\u036a\u0003Q!\u0000\u0369\u0367"+ - "\u0001\u0000\u0000\u0000\u0369\u0368\u0001\u0000\u0000\u0000\u036a\u036e"+ - "\u0001\u0000\u0000\u0000\u036b\u036d\u0003S\"\u0000\u036c\u036b\u0001"+ - "\u0000\u0000\u0000\u036d\u0370\u0001\u0000\u0000\u0000\u036e\u036c\u0001"+ - "\u0000\u0000\u0000\u036e\u036f\u0001\u0000\u0000\u0000\u036f\u0378\u0001"+ - "\u0000\u0000\u0000\u0370\u036e\u0001\u0000\u0000\u0000\u0371\u0373\u0003"+ - "\u00819\u0000\u0372\u0374\u0003A\u0019\u0000\u0373\u0372\u0001\u0000\u0000"+ - "\u0000\u0374\u0375\u0001\u0000\u0000\u0000\u0375\u0373\u0001\u0000\u0000"+ - "\u0000\u0375\u0376\u0001\u0000\u0000\u0000\u0376\u0378\u0001\u0000\u0000"+ - "\u0000\u0377\u0366\u0001\u0000\u0000\u0000\u0377\u0371\u0001\u0000\u0000"+ - "\u0000\u0378\u00a6\u0001\u0000\u0000\u0000\u0379\u037a\u0005[\u0000\u0000"+ - "\u037a\u037b\u0001\u0000\u0000\u0000\u037b\u037c\u0006L\u0000\u0000\u037c"+ - "\u037d\u0006L\u0000\u0000\u037d\u00a8\u0001\u0000\u0000\u0000\u037e\u037f"+ - "\u0005]\u0000\u0000\u037f\u0380\u0001\u0000\u0000\u0000\u0380\u0381\u0006"+ - "M\u000b\u0000\u0381\u0382\u0006M\u000b\u0000\u0382\u00aa\u0001\u0000\u0000"+ - "\u0000\u0383\u0387\u0003C\u001a\u0000\u0384\u0386\u0003S\"\u0000\u0385"+ - "\u0384\u0001\u0000\u0000\u0000\u0386\u0389\u0001\u0000\u0000\u0000\u0387"+ - "\u0385\u0001\u0000\u0000\u0000\u0387\u0388\u0001\u0000\u0000\u0000\u0388"+ - "\u0394\u0001\u0000\u0000\u0000\u0389\u0387\u0001\u0000\u0000\u0000\u038a"+ - "\u038d\u0003Q!\u0000\u038b\u038d\u0003K\u001e\u0000\u038c\u038a\u0001"+ - "\u0000\u0000\u0000\u038c\u038b\u0001\u0000\u0000\u0000\u038d\u038f\u0001"+ - "\u0000\u0000\u0000\u038e\u0390\u0003S\"\u0000\u038f\u038e\u0001\u0000"+ - "\u0000\u0000\u0390\u0391\u0001\u0000\u0000\u0000\u0391\u038f\u0001\u0000"+ - "\u0000\u0000\u0391\u0392\u0001\u0000\u0000\u0000\u0392\u0394\u0001\u0000"+ - "\u0000\u0000\u0393\u0383\u0001\u0000\u0000\u0000\u0393\u038c\u0001\u0000"+ - "\u0000\u0000\u0394\u00ac\u0001\u0000\u0000\u0000\u0395\u0397\u0003M\u001f"+ - "\u0000\u0396\u0398\u0003O \u0000\u0397\u0396\u0001\u0000\u0000\u0000\u0398"+ - "\u0399\u0001\u0000\u0000\u0000\u0399\u0397\u0001\u0000\u0000\u0000\u0399"+ - "\u039a\u0001\u0000\u0000\u0000\u039a\u039b\u0001\u0000\u0000\u0000\u039b"+ - "\u039c\u0003M\u001f\u0000\u039c\u00ae\u0001\u0000\u0000\u0000\u039d\u039e"+ - "\u0003\u00adO\u0000\u039e\u00b0\u0001\u0000\u0000\u0000\u039f\u03a0\u0003"+ - "7\u0014\u0000\u03a0\u03a1\u0001\u0000\u0000\u0000\u03a1\u03a2\u0006Q\n"+ - "\u0000\u03a2\u00b2\u0001\u0000\u0000\u0000\u03a3\u03a4\u00039\u0015\u0000"+ - "\u03a4\u03a5\u0001\u0000\u0000\u0000\u03a5\u03a6\u0006R\n\u0000\u03a6"+ - "\u00b4\u0001\u0000\u0000\u0000\u03a7\u03a8\u0003;\u0016\u0000\u03a8\u03a9"+ - "\u0001\u0000\u0000\u0000\u03a9\u03aa\u0006S\n\u0000\u03aa\u00b6\u0001"+ - "\u0000\u0000\u0000\u03ab\u03ac\u0003\u00a7L\u0000\u03ac\u03ad\u0001\u0000"+ - "\u0000\u0000\u03ad\u03ae\u0006T\u000e\u0000\u03ae\u03af\u0006T\u000f\u0000"+ - "\u03af\u00b8\u0001\u0000\u0000\u0000\u03b0\u03b1\u0003?\u0018\u0000\u03b1"+ - "\u03b2\u0001\u0000\u0000\u0000\u03b2\u03b3\u0006U\u0010\u0000\u03b3\u03b4"+ - "\u0006U\u000b\u0000\u03b4\u00ba\u0001\u0000\u0000\u0000\u03b5\u03b6\u0003"+ - ";\u0016\u0000\u03b6\u03b7\u0001\u0000\u0000\u0000\u03b7\u03b8\u0006V\n"+ - "\u0000\u03b8\u00bc\u0001\u0000\u0000\u0000\u03b9\u03ba\u00037\u0014\u0000"+ - "\u03ba\u03bb\u0001\u0000\u0000\u0000\u03bb\u03bc\u0006W\n\u0000\u03bc"+ - "\u00be\u0001\u0000\u0000\u0000\u03bd\u03be\u00039\u0015\u0000\u03be\u03bf"+ - "\u0001\u0000\u0000\u0000\u03bf\u03c0\u0006X\n\u0000\u03c0\u00c0\u0001"+ - "\u0000\u0000\u0000\u03c1\u03c2\u0003?\u0018\u0000\u03c2\u03c3\u0001\u0000"+ - "\u0000\u0000\u03c3\u03c4\u0006Y\u0010\u0000\u03c4\u03c5\u0006Y\u000b\u0000"+ - "\u03c5\u00c2\u0001\u0000\u0000\u0000\u03c6\u03c7\u0003\u00a7L\u0000\u03c7"+ - "\u03c8\u0001\u0000\u0000\u0000\u03c8\u03c9\u0006Z\u000e\u0000\u03c9\u00c4"+ - "\u0001\u0000\u0000\u0000\u03ca\u03cb\u0003\u00a9M\u0000\u03cb\u03cc\u0001"+ - "\u0000\u0000\u0000\u03cc\u03cd\u0006[\u0011\u0000\u03cd\u00c6\u0001\u0000"+ - "\u0000\u0000\u03ce\u03cf\u0003=\u0017\u0000\u03cf\u03d0\u0001\u0000\u0000"+ - "\u0000\u03d0\u03d1\u0006\\\f\u0000\u03d1\u00c8\u0001\u0000\u0000\u0000"+ - "\u03d2\u03d3\u0003e+\u0000\u03d3\u03d4\u0001\u0000\u0000\u0000\u03d4\u03d5"+ - "\u0006]\u0012\u0000\u03d5\u00ca\u0001\u0000\u0000\u0000\u03d6\u03d7\u0003"+ - "a)\u0000\u03d7\u03d8\u0001\u0000\u0000\u0000\u03d8\u03d9\u0006^\u0013"+ - "\u0000\u03d9\u00cc\u0001\u0000\u0000\u0000\u03da\u03db\u0007\u0010\u0000"+ - "\u0000\u03db\u03dc\u0007\u0003\u0000\u0000\u03dc\u03dd\u0007\u0005\u0000"+ - "\u0000\u03dd\u03de\u0007\f\u0000\u0000\u03de\u03df\u0007\u0000\u0000\u0000"+ - "\u03df\u03e0\u0007\f\u0000\u0000\u03e0\u03e1\u0007\u0005\u0000\u0000\u03e1"+ - "\u03e2\u0007\f\u0000\u0000\u03e2\u00ce\u0001\u0000\u0000\u0000\u03e3\u03e7"+ - "\b \u0000\u0000\u03e4\u03e5\u0005/\u0000\u0000\u03e5\u03e7\b!\u0000\u0000"+ - "\u03e6\u03e3\u0001\u0000\u0000\u0000\u03e6\u03e4\u0001\u0000\u0000\u0000"+ - "\u03e7\u00d0\u0001\u0000\u0000\u0000\u03e8\u03ea\u0003\u00cf`\u0000\u03e9"+ - "\u03e8\u0001\u0000\u0000\u0000\u03ea\u03eb\u0001\u0000\u0000\u0000\u03eb"+ - "\u03e9\u0001\u0000\u0000\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec"+ - "\u00d2\u0001\u0000\u0000\u0000\u03ed\u03ee\u0003\u00d1a\u0000\u03ee\u03ef"+ - "\u0001\u0000\u0000\u0000\u03ef\u03f0\u0006b\u0014\u0000\u03f0\u00d4\u0001"+ - "\u0000\u0000\u0000\u03f1\u03f2\u0003U#\u0000\u03f2\u03f3\u0001\u0000\u0000"+ - "\u0000\u03f3\u03f4\u0006c\u0015\u0000\u03f4\u00d6\u0001\u0000\u0000\u0000"+ - "\u03f5\u03f6\u00037\u0014\u0000\u03f6\u03f7\u0001\u0000\u0000\u0000\u03f7"+ - "\u03f8\u0006d\n\u0000\u03f8\u00d8\u0001\u0000\u0000\u0000\u03f9\u03fa"+ - "\u00039\u0015\u0000\u03fa\u03fb\u0001\u0000\u0000\u0000\u03fb\u03fc\u0006"+ - "e\n\u0000\u03fc\u00da\u0001\u0000\u0000\u0000\u03fd\u03fe\u0003;\u0016"+ - "\u0000\u03fe\u03ff\u0001\u0000\u0000\u0000\u03ff\u0400\u0006f\n\u0000"+ - "\u0400\u00dc\u0001\u0000\u0000\u0000\u0401\u0402\u0003?\u0018\u0000\u0402"+ - "\u0403\u0001\u0000\u0000\u0000\u0403\u0404\u0006g\u0010\u0000\u0404\u0405"+ - "\u0006g\u000b\u0000\u0405\u00de\u0001\u0000\u0000\u0000\u0406\u0407\u0003"+ - "i-\u0000\u0407\u0408\u0001\u0000\u0000\u0000\u0408\u0409\u0006h\u0016"+ - "\u0000\u0409\u00e0\u0001\u0000\u0000\u0000\u040a\u040b\u0003e+\u0000\u040b"+ - "\u040c\u0001\u0000\u0000\u0000\u040c\u040d\u0006i\u0012\u0000\u040d\u00e2"+ - "\u0001\u0000\u0000\u0000\u040e\u040f\u0004j\u0004\u0000\u040f\u0410\u0003"+ - "\u00819\u0000\u0410\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006j\u0017"+ - "\u0000\u0412\u00e4\u0001\u0000\u0000\u0000\u0413\u0414\u0004k\u0005\u0000"+ - "\u0414\u0415\u0003\u00a5K\u0000\u0415\u0416\u0001\u0000\u0000\u0000\u0416"+ - "\u0417\u0006k\u0018\u0000\u0417\u00e6\u0001\u0000\u0000\u0000\u0418\u041d"+ - "\u0003C\u001a\u0000\u0419\u041d\u0003A\u0019\u0000\u041a\u041d\u0003Q"+ - "!\u0000\u041b\u041d\u0003\u009bF\u0000\u041c\u0418\u0001\u0000\u0000\u0000"+ - "\u041c\u0419\u0001\u0000\u0000\u0000\u041c\u041a\u0001\u0000\u0000\u0000"+ - "\u041c\u041b\u0001\u0000\u0000\u0000\u041d\u00e8\u0001\u0000\u0000\u0000"+ - "\u041e\u0421\u0003C\u001a\u0000\u041f\u0421\u0003\u009bF\u0000\u0420\u041e"+ - "\u0001\u0000\u0000\u0000\u0420\u041f\u0001\u0000\u0000\u0000\u0421\u0425"+ - "\u0001\u0000\u0000\u0000\u0422\u0424\u0003\u00e7l\u0000\u0423\u0422\u0001"+ - "\u0000\u0000\u0000\u0424\u0427\u0001\u0000\u0000\u0000\u0425\u0423\u0001"+ - "\u0000\u0000\u0000\u0425\u0426\u0001\u0000\u0000\u0000\u0426\u0432\u0001"+ - "\u0000\u0000\u0000\u0427\u0425\u0001\u0000\u0000\u0000\u0428\u042b\u0003"+ - "Q!\u0000\u0429\u042b\u0003K\u001e\u0000\u042a\u0428\u0001\u0000\u0000"+ - "\u0000\u042a\u0429\u0001\u0000\u0000\u0000\u042b\u042d\u0001\u0000\u0000"+ - "\u0000\u042c\u042e\u0003\u00e7l\u0000\u042d\u042c\u0001\u0000\u0000\u0000"+ - "\u042e\u042f\u0001\u0000\u0000\u0000\u042f\u042d\u0001\u0000\u0000\u0000"+ - "\u042f\u0430\u0001\u0000\u0000\u0000\u0430\u0432\u0001\u0000\u0000\u0000"+ - "\u0431\u0420\u0001\u0000\u0000\u0000\u0431\u042a\u0001\u0000\u0000\u0000"+ - "\u0432\u00ea\u0001\u0000\u0000\u0000\u0433\u0436\u0003\u00e9m\u0000\u0434"+ - "\u0436\u0003\u00adO\u0000\u0435\u0433\u0001\u0000\u0000\u0000\u0435\u0434"+ - "\u0001\u0000\u0000\u0000\u0436\u0437\u0001\u0000\u0000\u0000\u0437\u0435"+ - "\u0001\u0000\u0000\u0000\u0437\u0438\u0001\u0000\u0000\u0000\u0438\u00ec"+ - "\u0001\u0000\u0000\u0000\u0439\u043a\u00037\u0014\u0000\u043a\u043b\u0001"+ - "\u0000\u0000\u0000\u043b\u043c\u0006o\n\u0000\u043c\u00ee\u0001\u0000"+ - "\u0000\u0000\u043d\u043e\u00039\u0015\u0000\u043e\u043f\u0001\u0000\u0000"+ - "\u0000\u043f\u0440\u0006p\n\u0000\u0440\u00f0\u0001\u0000\u0000\u0000"+ - "\u0441\u0442\u0003;\u0016\u0000\u0442\u0443\u0001\u0000\u0000\u0000\u0443"+ - "\u0444\u0006q\n\u0000\u0444\u00f2\u0001\u0000\u0000\u0000\u0445\u0446"+ - "\u0003?\u0018\u0000\u0446\u0447\u0001\u0000\u0000\u0000\u0447\u0448\u0006"+ - "r\u0010\u0000\u0448\u0449\u0006r\u000b\u0000\u0449\u00f4\u0001\u0000\u0000"+ - "\u0000\u044a\u044b\u0003a)\u0000\u044b\u044c\u0001\u0000\u0000\u0000\u044c"+ - "\u044d\u0006s\u0013\u0000\u044d\u00f6\u0001\u0000\u0000\u0000\u044e\u044f"+ - "\u0003e+\u0000\u044f\u0450\u0001\u0000\u0000\u0000\u0450\u0451\u0006t"+ - "\u0012\u0000\u0451\u00f8\u0001\u0000\u0000\u0000\u0452\u0453\u0003i-\u0000"+ - "\u0453\u0454\u0001\u0000\u0000\u0000\u0454\u0455\u0006u\u0016\u0000\u0455"+ - "\u00fa\u0001\u0000\u0000\u0000\u0456\u0457\u0004v\u0006\u0000\u0457\u0458"+ - "\u0003\u00819\u0000\u0458\u0459\u0001\u0000\u0000\u0000\u0459\u045a\u0006"+ - "v\u0017\u0000\u045a\u00fc\u0001\u0000\u0000\u0000\u045b\u045c\u0004w\u0007"+ - "\u0000\u045c\u045d\u0003\u00a5K\u0000\u045d\u045e\u0001\u0000\u0000\u0000"+ - "\u045e\u045f\u0006w\u0018\u0000\u045f\u00fe\u0001\u0000\u0000\u0000\u0460"+ - "\u0461\u0007\f\u0000\u0000\u0461\u0462\u0007\u0002\u0000\u0000\u0462\u0100"+ - "\u0001\u0000\u0000\u0000\u0463\u0464\u0003\u00ebn\u0000\u0464\u0465\u0001"+ - "\u0000\u0000\u0000\u0465\u0466\u0006y\u0019\u0000\u0466\u0102\u0001\u0000"+ - "\u0000\u0000\u0467\u0468\u00037\u0014\u0000\u0468\u0469\u0001\u0000\u0000"+ - "\u0000\u0469\u046a\u0006z\n\u0000\u046a\u0104\u0001\u0000\u0000\u0000"+ - "\u046b\u046c\u00039\u0015\u0000\u046c\u046d\u0001\u0000\u0000\u0000\u046d"+ - "\u046e\u0006{\n\u0000\u046e\u0106\u0001\u0000\u0000\u0000\u046f\u0470"+ - "\u0003;\u0016\u0000\u0470\u0471\u0001\u0000\u0000\u0000\u0471\u0472\u0006"+ - "|\n\u0000\u0472\u0108\u0001\u0000\u0000\u0000\u0473\u0474\u0003?\u0018"+ - "\u0000\u0474\u0475\u0001\u0000\u0000\u0000\u0475\u0476\u0006}\u0010\u0000"+ - "\u0476\u0477\u0006}\u000b\u0000\u0477\u010a\u0001\u0000\u0000\u0000\u0478"+ - "\u0479\u0003\u00a7L\u0000\u0479\u047a\u0001\u0000\u0000\u0000\u047a\u047b"+ - "\u0006~\u000e\u0000\u047b\u047c\u0006~\u001a\u0000\u047c\u010c\u0001\u0000"+ - "\u0000\u0000\u047d\u047e\u0007\u0007\u0000\u0000\u047e\u047f\u0007\t\u0000"+ - "\u0000\u047f\u0480\u0001\u0000\u0000\u0000\u0480\u0481\u0006\u007f\u001b"+ - "\u0000\u0481\u010e\u0001\u0000\u0000\u0000\u0482\u0483\u0007\u0013\u0000"+ - "\u0000\u0483\u0484\u0007\u0001\u0000\u0000\u0484\u0485\u0007\u0005\u0000"+ - "\u0000\u0485\u0486\u0007\n\u0000\u0000\u0486\u0487\u0001\u0000\u0000\u0000"+ - "\u0487\u0488\u0006\u0080\u001b\u0000\u0488\u0110\u0001\u0000\u0000\u0000"+ - "\u0489\u048a\b\"\u0000\u0000\u048a\u0112\u0001\u0000\u0000\u0000\u048b"+ - "\u048d\u0003\u0111\u0081\u0000\u048c\u048b\u0001\u0000\u0000\u0000\u048d"+ - "\u048e\u0001\u0000\u0000\u0000\u048e\u048c\u0001\u0000\u0000\u0000\u048e"+ - "\u048f\u0001\u0000\u0000\u0000\u048f\u0490\u0001\u0000\u0000\u0000\u0490"+ - "\u0491\u0003=\u0017\u0000\u0491\u0493\u0001\u0000\u0000\u0000\u0492\u048c"+ - "\u0001\u0000\u0000\u0000\u0492\u0493\u0001\u0000\u0000\u0000\u0493\u0495"+ - "\u0001\u0000\u0000\u0000\u0494\u0496\u0003\u0111\u0081\u0000\u0495\u0494"+ - "\u0001\u0000\u0000\u0000\u0496\u0497\u0001\u0000\u0000\u0000\u0497\u0495"+ - "\u0001\u0000\u0000\u0000\u0497\u0498\u0001\u0000\u0000\u0000\u0498\u0114"+ - "\u0001\u0000\u0000\u0000\u0499\u049a\u0003\u0113\u0082\u0000\u049a\u049b"+ - "\u0001\u0000\u0000\u0000\u049b\u049c\u0006\u0083\u001c\u0000\u049c\u0116"+ - "\u0001\u0000\u0000\u0000\u049d\u049e\u00037\u0014\u0000\u049e\u049f\u0001"+ - "\u0000\u0000\u0000\u049f\u04a0\u0006\u0084\n\u0000\u04a0\u0118\u0001\u0000"+ - "\u0000\u0000\u04a1\u04a2\u00039\u0015\u0000\u04a2\u04a3\u0001\u0000\u0000"+ - "\u0000\u04a3\u04a4\u0006\u0085\n\u0000\u04a4\u011a\u0001\u0000\u0000\u0000"+ - "\u04a5\u04a6\u0003;\u0016\u0000\u04a6\u04a7\u0001\u0000\u0000\u0000\u04a7"+ - "\u04a8\u0006\u0086\n\u0000\u04a8\u011c\u0001\u0000\u0000\u0000\u04a9\u04aa"+ - "\u0003?\u0018\u0000\u04aa\u04ab\u0001\u0000\u0000\u0000\u04ab\u04ac\u0006"+ - "\u0087\u0010\u0000\u04ac\u04ad\u0006\u0087\u000b\u0000\u04ad\u04ae\u0006"+ - "\u0087\u000b\u0000\u04ae\u011e\u0001\u0000\u0000\u0000\u04af\u04b0\u0003"+ - "a)\u0000\u04b0\u04b1\u0001\u0000\u0000\u0000\u04b1\u04b2\u0006\u0088\u0013"+ - "\u0000\u04b2\u0120\u0001\u0000\u0000\u0000\u04b3\u04b4\u0003e+\u0000\u04b4"+ - "\u04b5\u0001\u0000\u0000\u0000\u04b5\u04b6\u0006\u0089\u0012\u0000\u04b6"+ - "\u0122\u0001\u0000\u0000\u0000\u04b7\u04b8\u0003i-\u0000\u04b8\u04b9\u0001"+ - "\u0000\u0000\u0000\u04b9\u04ba\u0006\u008a\u0016\u0000\u04ba\u0124\u0001"+ - "\u0000\u0000\u0000\u04bb\u04bc\u0003\u010f\u0080\u0000\u04bc\u04bd\u0001"+ - "\u0000\u0000\u0000\u04bd\u04be\u0006\u008b\u001d\u0000\u04be\u0126\u0001"+ - "\u0000\u0000\u0000\u04bf\u04c0\u0003\u00ebn\u0000\u04c0\u04c1\u0001\u0000"+ - "\u0000\u0000\u04c1\u04c2\u0006\u008c\u0019\u0000\u04c2\u0128\u0001\u0000"+ - "\u0000\u0000\u04c3\u04c4\u0003\u00afP\u0000\u04c4\u04c5\u0001\u0000\u0000"+ - "\u0000\u04c5\u04c6\u0006\u008d\u001e\u0000\u04c6\u012a\u0001\u0000\u0000"+ - "\u0000\u04c7\u04c8\u0004\u008e\b\u0000\u04c8\u04c9\u0003\u00819\u0000"+ - "\u04c9\u04ca\u0001\u0000\u0000\u0000\u04ca\u04cb\u0006\u008e\u0017\u0000"+ - "\u04cb\u012c\u0001\u0000\u0000\u0000\u04cc\u04cd\u0004\u008f\t\u0000\u04cd"+ - "\u04ce\u0003\u00a5K\u0000\u04ce\u04cf\u0001\u0000\u0000\u0000\u04cf\u04d0"+ - "\u0006\u008f\u0018\u0000\u04d0\u012e\u0001\u0000\u0000\u0000\u04d1\u04d2"+ - "\u00037\u0014\u0000\u04d2\u04d3\u0001\u0000\u0000\u0000\u04d3\u04d4\u0006"+ - "\u0090\n\u0000\u04d4\u0130\u0001\u0000\u0000\u0000\u04d5\u04d6\u00039"+ - "\u0015\u0000\u04d6\u04d7\u0001\u0000\u0000\u0000\u04d7\u04d8\u0006\u0091"+ - "\n\u0000\u04d8\u0132\u0001\u0000\u0000\u0000\u04d9\u04da\u0003;\u0016"+ - "\u0000\u04da\u04db\u0001\u0000\u0000\u0000\u04db\u04dc\u0006\u0092\n\u0000"+ - "\u04dc\u0134\u0001\u0000\u0000\u0000\u04dd\u04de\u0003?\u0018\u0000\u04de"+ - "\u04df\u0001\u0000\u0000\u0000\u04df\u04e0\u0006\u0093\u0010\u0000\u04e0"+ - "\u04e1\u0006\u0093\u000b\u0000\u04e1\u0136\u0001\u0000\u0000\u0000\u04e2"+ - "\u04e3\u0003i-\u0000\u04e3\u04e4\u0001\u0000\u0000\u0000\u04e4\u04e5\u0006"+ - "\u0094\u0016\u0000\u04e5\u0138\u0001\u0000\u0000\u0000\u04e6\u04e7\u0004"+ - "\u0095\n\u0000\u04e7\u04e8\u0003\u00819\u0000\u04e8\u04e9\u0001\u0000"+ - "\u0000\u0000\u04e9\u04ea\u0006\u0095\u0017\u0000\u04ea\u013a\u0001\u0000"+ - "\u0000\u0000\u04eb\u04ec\u0004\u0096\u000b\u0000\u04ec\u04ed\u0003\u00a5"+ - "K\u0000\u04ed\u04ee\u0001\u0000\u0000\u0000\u04ee\u04ef\u0006\u0096\u0018"+ - "\u0000\u04ef\u013c\u0001\u0000\u0000\u0000\u04f0\u04f1\u0003\u00afP\u0000"+ - "\u04f1\u04f2\u0001\u0000\u0000\u0000\u04f2\u04f3\u0006\u0097\u001e\u0000"+ - "\u04f3\u013e\u0001\u0000\u0000\u0000\u04f4\u04f5\u0003\u00abN\u0000\u04f5"+ - "\u04f6\u0001\u0000\u0000\u0000\u04f6\u04f7\u0006\u0098\u001f\u0000\u04f7"+ - "\u0140\u0001\u0000\u0000\u0000\u04f8\u04f9\u00037\u0014\u0000\u04f9\u04fa"+ - "\u0001\u0000\u0000\u0000\u04fa\u04fb\u0006\u0099\n\u0000\u04fb\u0142\u0001"+ - "\u0000\u0000\u0000\u04fc\u04fd\u00039\u0015\u0000\u04fd\u04fe\u0001\u0000"+ - "\u0000\u0000\u04fe\u04ff\u0006\u009a\n\u0000\u04ff\u0144\u0001\u0000\u0000"+ - "\u0000\u0500\u0501\u0003;\u0016\u0000\u0501\u0502\u0001\u0000\u0000\u0000"+ - "\u0502\u0503\u0006\u009b\n\u0000\u0503\u0146\u0001\u0000\u0000\u0000\u0504"+ - "\u0505\u0003?\u0018\u0000\u0505\u0506\u0001\u0000\u0000\u0000\u0506\u0507"+ - "\u0006\u009c\u0010\u0000\u0507\u0508\u0006\u009c\u000b\u0000\u0508\u0148"+ - "\u0001\u0000\u0000\u0000\u0509\u050a\u0007\u0001\u0000\u0000\u050a\u050b"+ - "\u0007\t\u0000\u0000\u050b\u050c\u0007\u000f\u0000\u0000\u050c\u050d\u0007"+ - "\u0007\u0000\u0000\u050d\u014a\u0001\u0000\u0000\u0000\u050e\u050f\u0003"+ - "7\u0014\u0000\u050f\u0510\u0001\u0000\u0000\u0000\u0510\u0511\u0006\u009e"+ - "\n\u0000\u0511\u014c\u0001\u0000\u0000\u0000\u0512\u0513\u00039\u0015"+ - "\u0000\u0513\u0514\u0001\u0000\u0000\u0000\u0514\u0515\u0006\u009f\n\u0000"+ - "\u0515\u014e\u0001\u0000\u0000\u0000\u0516\u0517\u0003;\u0016\u0000\u0517"+ - "\u0518\u0001\u0000\u0000\u0000\u0518\u0519\u0006\u00a0\n\u0000\u0519\u0150"+ - "\u0001\u0000\u0000\u0000\u051a\u051b\u0003\u00a9M\u0000\u051b\u051c\u0001"+ - "\u0000\u0000\u0000\u051c\u051d\u0006\u00a1\u0011\u0000\u051d\u051e\u0006"+ - "\u00a1\u000b\u0000\u051e\u0152\u0001\u0000\u0000\u0000\u051f\u0520\u0003"+ - "=\u0017\u0000\u0520\u0521\u0001\u0000\u0000\u0000\u0521\u0522\u0006\u00a2"+ - "\f\u0000\u0522\u0154\u0001\u0000\u0000\u0000\u0523\u0529\u0003K\u001e"+ - "\u0000\u0524\u0529\u0003A\u0019\u0000\u0525\u0529\u0003i-\u0000\u0526"+ - "\u0529\u0003C\u001a\u0000\u0527\u0529\u0003Q!\u0000\u0528\u0523\u0001"+ - "\u0000\u0000\u0000\u0528\u0524\u0001\u0000\u0000\u0000\u0528\u0525\u0001"+ - "\u0000\u0000\u0000\u0528\u0526\u0001\u0000\u0000\u0000\u0528\u0527\u0001"+ - "\u0000\u0000\u0000\u0529\u052a\u0001\u0000\u0000\u0000\u052a\u0528\u0001"+ - "\u0000\u0000\u0000\u052a\u052b\u0001\u0000\u0000\u0000\u052b\u0156\u0001"+ - "\u0000\u0000\u0000\u052c\u052d\u00037\u0014\u0000\u052d\u052e\u0001\u0000"+ - "\u0000\u0000\u052e\u052f\u0006\u00a4\n\u0000\u052f\u0158\u0001\u0000\u0000"+ - "\u0000\u0530\u0531\u00039\u0015\u0000\u0531\u0532\u0001\u0000\u0000\u0000"+ - "\u0532\u0533\u0006\u00a5\n\u0000\u0533\u015a\u0001\u0000\u0000\u0000\u0534"+ - "\u0535\u0003;\u0016\u0000\u0535\u0536\u0001\u0000\u0000\u0000\u0536\u0537"+ - "\u0006\u00a6\n\u0000\u0537\u015c\u0001\u0000\u0000\u0000\u0538\u0539\u0003"+ - "?\u0018\u0000\u0539\u053a\u0001\u0000\u0000\u0000\u053a\u053b\u0006\u00a7"+ - "\u0010\u0000\u053b\u053c\u0006\u00a7\u000b\u0000\u053c\u015e\u0001\u0000"+ - "\u0000\u0000\u053d\u053e\u0003=\u0017\u0000\u053e\u053f\u0001\u0000\u0000"+ - "\u0000\u053f\u0540\u0006\u00a8\f\u0000\u0540\u0160\u0001\u0000\u0000\u0000"+ - "\u0541\u0542\u0003e+\u0000\u0542\u0543\u0001\u0000\u0000\u0000\u0543\u0544"+ - "\u0006\u00a9\u0012\u0000\u0544\u0162\u0001\u0000\u0000\u0000\u0545\u0546"+ - "\u0003i-\u0000\u0546\u0547\u0001\u0000\u0000\u0000\u0547\u0548\u0006\u00aa"+ - "\u0016\u0000\u0548\u0164\u0001\u0000\u0000\u0000\u0549\u054a\u0003\u010d"+ - "\u007f\u0000\u054a\u054b\u0001\u0000\u0000\u0000\u054b\u054c\u0006\u00ab"+ - " \u0000\u054c\u054d\u0006\u00ab!\u0000\u054d\u0166\u0001\u0000\u0000\u0000"+ - "\u054e\u054f\u0003\u00d1a\u0000\u054f\u0550\u0001\u0000\u0000\u0000\u0550"+ - "\u0551\u0006\u00ac\u0014\u0000\u0551\u0168\u0001\u0000\u0000\u0000\u0552"+ - "\u0553\u0003U#\u0000\u0553\u0554\u0001\u0000\u0000\u0000\u0554\u0555\u0006"+ - "\u00ad\u0015\u0000\u0555\u016a\u0001\u0000\u0000\u0000\u0556\u0557\u0003"+ - "7\u0014\u0000\u0557\u0558\u0001\u0000\u0000\u0000\u0558\u0559\u0006\u00ae"+ - "\n\u0000\u0559\u016c\u0001\u0000\u0000\u0000\u055a\u055b\u00039\u0015"+ - "\u0000\u055b\u055c\u0001\u0000\u0000\u0000\u055c\u055d\u0006\u00af\n\u0000"+ - "\u055d\u016e\u0001\u0000\u0000\u0000\u055e\u055f\u0003;\u0016\u0000\u055f"+ - "\u0560\u0001\u0000\u0000\u0000\u0560\u0561\u0006\u00b0\n\u0000\u0561\u0170"+ - "\u0001\u0000\u0000\u0000\u0562\u0563\u0003?\u0018\u0000\u0563\u0564\u0001"+ - "\u0000\u0000\u0000\u0564\u0565\u0006\u00b1\u0010\u0000\u0565\u0566\u0006"+ - "\u00b1\u000b\u0000\u0566\u0567\u0006\u00b1\u000b\u0000\u0567\u0172\u0001"+ - "\u0000\u0000\u0000\u0568\u0569\u0003e+\u0000\u0569\u056a\u0001\u0000\u0000"+ - "\u0000\u056a\u056b\u0006\u00b2\u0012\u0000\u056b\u0174\u0001\u0000\u0000"+ - "\u0000\u056c\u056d\u0003i-\u0000\u056d\u056e\u0001\u0000\u0000\u0000\u056e"+ - "\u056f\u0006\u00b3\u0016\u0000\u056f\u0176\u0001\u0000\u0000\u0000\u0570"+ - "\u0571\u0003\u00ebn\u0000\u0571\u0572\u0001\u0000\u0000\u0000\u0572\u0573"+ - "\u0006\u00b4\u0019\u0000\u0573\u0178\u0001\u0000\u0000\u0000\u0574\u0575"+ - "\u00037\u0014\u0000\u0575\u0576\u0001\u0000\u0000\u0000\u0576\u0577\u0006"+ - "\u00b5\n\u0000\u0577\u017a\u0001\u0000\u0000\u0000\u0578\u0579\u00039"+ - "\u0015\u0000\u0579\u057a\u0001\u0000\u0000\u0000\u057a\u057b\u0006\u00b6"+ - "\n\u0000\u057b\u017c\u0001\u0000\u0000\u0000\u057c\u057d\u0003;\u0016"+ - "\u0000\u057d\u057e\u0001\u0000\u0000\u0000\u057e\u057f\u0006\u00b7\n\u0000"+ - "\u057f\u017e\u0001\u0000\u0000\u0000\u0580\u0581\u0003?\u0018\u0000\u0581"+ - "\u0582\u0001\u0000\u0000\u0000\u0582\u0583\u0006\u00b8\u0010\u0000\u0583"+ - "\u0584\u0006\u00b8\u000b\u0000\u0584\u0180\u0001\u0000\u0000\u0000\u0585"+ - "\u0586\u0003\u00d1a\u0000\u0586\u0587\u0001\u0000\u0000\u0000\u0587\u0588"+ - "\u0006\u00b9\u0014\u0000\u0588\u0589\u0006\u00b9\u000b\u0000\u0589\u058a"+ - "\u0006\u00b9\"\u0000\u058a\u0182\u0001\u0000\u0000\u0000\u058b\u058c\u0003"+ - "U#\u0000\u058c\u058d\u0001\u0000\u0000\u0000\u058d\u058e\u0006\u00ba\u0015"+ - "\u0000\u058e\u058f\u0006\u00ba\u000b\u0000\u058f\u0590\u0006\u00ba\"\u0000"+ - "\u0590\u0184\u0001\u0000\u0000\u0000\u0591\u0592\u00037\u0014\u0000\u0592"+ - "\u0593\u0001\u0000\u0000\u0000\u0593\u0594\u0006\u00bb\n\u0000\u0594\u0186"+ - "\u0001\u0000\u0000\u0000\u0595\u0596\u00039\u0015\u0000\u0596\u0597\u0001"+ - "\u0000\u0000\u0000\u0597\u0598\u0006\u00bc\n\u0000\u0598\u0188\u0001\u0000"+ - "\u0000\u0000\u0599\u059a\u0003;\u0016\u0000\u059a\u059b\u0001\u0000\u0000"+ - "\u0000\u059b\u059c\u0006\u00bd\n\u0000\u059c\u018a\u0001\u0000\u0000\u0000"+ - "\u059d\u059e\u0003=\u0017\u0000\u059e\u059f\u0001\u0000\u0000\u0000\u059f"+ - "\u05a0\u0006\u00be\f\u0000\u05a0\u05a1\u0006\u00be\u000b\u0000\u05a1\u05a2"+ - "\u0006\u00be\t\u0000\u05a2\u018c\u0001\u0000\u0000\u0000\u05a3\u05a4\u0003"+ - "e+\u0000\u05a4\u05a5\u0001\u0000\u0000\u0000\u05a5\u05a6\u0006\u00bf\u0012"+ - "\u0000\u05a6\u05a7\u0006\u00bf\u000b\u0000\u05a7\u05a8\u0006\u00bf\t\u0000"+ - "\u05a8\u018e\u0001\u0000\u0000\u0000\u05a9\u05aa\u00037\u0014\u0000\u05aa"+ - "\u05ab\u0001\u0000\u0000\u0000\u05ab\u05ac\u0006\u00c0\n\u0000\u05ac\u0190"+ - "\u0001\u0000\u0000\u0000\u05ad\u05ae\u00039\u0015\u0000\u05ae\u05af\u0001"+ - "\u0000\u0000\u0000\u05af\u05b0\u0006\u00c1\n\u0000\u05b0\u0192\u0001\u0000"+ - "\u0000\u0000\u05b1\u05b2\u0003;\u0016\u0000\u05b2\u05b3\u0001\u0000\u0000"+ - "\u0000\u05b3\u05b4\u0006\u00c2\n\u0000\u05b4\u0194\u0001\u0000\u0000\u0000"+ - "\u05b5\u05b6\u0003\u00afP\u0000\u05b6\u05b7\u0001\u0000\u0000\u0000\u05b7"+ - "\u05b8\u0006\u00c3\u000b\u0000\u05b8\u05b9\u0006\u00c3\u0000\u0000\u05b9"+ - "\u05ba\u0006\u00c3\u001e\u0000\u05ba\u0196\u0001\u0000\u0000\u0000\u05bb"+ - "\u05bc\u0003\u00abN\u0000\u05bc\u05bd\u0001\u0000\u0000\u0000\u05bd\u05be"+ - "\u0006\u00c4\u000b\u0000\u05be\u05bf\u0006\u00c4\u0000\u0000\u05bf\u05c0"+ - "\u0006\u00c4\u001f\u0000\u05c0\u0198\u0001\u0000\u0000\u0000\u05c1\u05c2"+ - "\u0003[&\u0000\u05c2\u05c3\u0001\u0000\u0000\u0000\u05c3\u05c4\u0006\u00c5"+ - "\u000b\u0000\u05c4\u05c5\u0006\u00c5\u0000\u0000\u05c5\u05c6\u0006\u00c5"+ - "#\u0000\u05c6\u019a\u0001\u0000\u0000\u0000\u05c7\u05c8\u0003?\u0018\u0000"+ - "\u05c8\u05c9\u0001\u0000\u0000\u0000\u05c9\u05ca\u0006\u00c6\u0010\u0000"+ - "\u05ca\u05cb\u0006\u00c6\u000b\u0000\u05cb\u019c\u0001\u0000\u0000\u0000"+ - "A\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e"+ - "\u0245\u024f\u0253\u0256\u025f\u0261\u026c\u0281\u0286\u028f\u0296\u029b"+ - "\u029d\u02a8\u02b0\u02b3\u02b5\u02ba\u02bf\u02c5\u02cc\u02d1\u02d7\u02da"+ - "\u02e2\u02e6\u0369\u036e\u0375\u0377\u0387\u038c\u0391\u0393\u0399\u03e6"+ - "\u03eb\u041c\u0420\u0425\u042a\u042f\u0431\u0435\u0437\u048e\u0492\u0497"+ - "\u0528\u052a$\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005"+ - "\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t"+ - "\u0000\u0005\u000b\u0000\u0005\r\u0000\u0000\u0001\u0000\u0004\u0000\u0000"+ - "\u0007\u0018\u0000\u0007\u0010\u0000\u0007A\u0000\u0005\u0000\u0000\u0007"+ - "\u0019\u0000\u0007B\u0000\u0007\"\u0000\u0007 \u0000\u0007L\u0000\u0007"+ - "\u001a\u0000\u0007$\u0000\u00070\u0000\u0007@\u0000\u0007P\u0000\u0005"+ - "\n\u0000\u0005\u0007\u0000\u0007Z\u0000\u0007Y\u0000\u0007D\u0000\u0007"+ - "C\u0000\u0007X\u0000\u0005\f\u0000\u0005\u000e\u0000\u0007\u001d\u0000"; + "\u00a7\u0004\u00a7\u056e\b\u00a7\u000b\u00a7\f\u00a7\u056f\u0001\u00a8"+ + "\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001\u00a9\u0001\u00a9"+ + "\u0001\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00ab"+ + "\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac"+ + "\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad"+ + "\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001\u00af"+ + "\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001\u00b0"+ + "\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b2"+ + "\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001\u00b3"+ + "\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b5"+ + "\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6"+ + "\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001\u00b7"+ + "\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9"+ + "\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba"+ + "\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bc"+ + "\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd"+ + "\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be"+ + "\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf"+ + "\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0"+ + "\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001\u00c1\u0001\u00c1"+ + "\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c3"+ + "\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4\u0001\u00c4"+ + "\u0001\u00c4\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c6"+ + "\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c7\u0001\u00c7"+ + "\u0001\u00c7\u0001\u00c7\u0001\u00c7\u0001\u00c7\u0001\u00c8\u0001\u00c8"+ + "\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c9\u0001\u00c9"+ + "\u0001\u00c9\u0001\u00c9\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001\u00ca"+ + "\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cc\u0001\u00cc"+ + "\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cd\u0001\u00cd"+ + "\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00ce\u0001\u00ce"+ + "\u0001\u00ce\u0001\u00ce\u0001\u00cf\u0001\u00cf\u0001\u00cf\u0001\u00cf"+ + "\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d1\u0001\u00d1"+ + "\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d2\u0001\u00d2"+ + "\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d3\u0001\u00d3"+ + "\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d4\u0001\u00d4"+ + "\u0001\u00d4\u0001\u00d4\u0001\u00d4\u0002\u02ab\u02f0\u0000\u00d5\u0010"+ + "\u0001\u0012\u0002\u0014\u0003\u0016\u0004\u0018\u0005\u001a\u0006\u001c"+ + "\u0007\u001e\b \t\"\n$\u000b&\f(\r*\u000e,\u000f.\u00100\u00112\u0012"+ + "4\u00136\u00148\u0015:\u0016<\u0017>\u0018@\u0019B\u001aD\u001bF\u001c"+ + "H\u001dJ\u0000L\u0000N\u0000P\u0000R\u0000T\u0000V\u0000X\u0000Z\u0000"+ + "\\\u0000^\u001e`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+z,|-~.\u0080/\u00820\u0084"+ + "1\u00862\u00883\u008a4\u008c5\u008e6\u00907\u00928\u00949\u0096:\u0098"+ + ";\u009a<\u009c=\u009e>\u00a0?\u00a2@\u00a4A\u00a6B\u00a8C\u00aaD\u00ac"+ + "\u0000\u00aeE\u00b0F\u00b2G\u00b4H\u00b6\u0000\u00b8I\u00baJ\u00bcK\u00be"+ + "L\u00c0\u0000\u00c2\u0000\u00c4M\u00c6N\u00c8O\u00ca\u0000\u00cc\u0000"+ + "\u00ce\u0000\u00d0\u0000\u00d2\u0000\u00d4\u0000\u00d6P\u00d8\u0000\u00da"+ + "Q\u00dc\u0000\u00de\u0000\u00e0R\u00e2S\u00e4T\u00e6\u0000\u00e8\u0000"+ + "\u00ea\u0000\u00ec\u0000\u00ee\u0000\u00f0\u0000\u00f2\u0000\u00f4U\u00f6"+ + "V\u00f8W\u00faX\u00fc\u0000\u00fe\u0000\u0100\u0000\u0102\u0000\u0104"+ + "\u0000\u0106\u0000\u0108Y\u010a\u0000\u010cZ\u010e[\u0110\\\u0112\u0000"+ + "\u0114\u0000\u0116]\u0118^\u011a\u0000\u011c_\u011e\u0000\u0120`\u0122"+ + "a\u0124b\u0126\u0000\u0128\u0000\u012a\u0000\u012c\u0000\u012e\u0000\u0130"+ + "\u0000\u0132\u0000\u0134\u0000\u0136\u0000\u0138c\u013ad\u013ce\u013e"+ + "\u0000\u0140\u0000\u0142\u0000\u0144\u0000\u0146\u0000\u0148\u0000\u014a"+ + "f\u014cg\u014eh\u0150\u0000\u0152i\u0154j\u0156k\u0158l\u015a\u0000\u015c"+ + "\u0000\u015em\u0160n\u0162o\u0164p\u0166\u0000\u0168\u0000\u016a\u0000"+ + "\u016c\u0000\u016e\u0000\u0170\u0000\u0172\u0000\u0174q\u0176r\u0178s"+ + "\u017a\u0000\u017c\u0000\u017e\u0000\u0180\u0000\u0182t\u0184u\u0186v"+ + "\u0188\u0000\u018a\u0000\u018c\u0000\u018e\u0000\u0190w\u0192\u0000\u0194"+ + "\u0000\u0196x\u0198y\u019az\u019c\u0000\u019e\u0000\u01a0\u0000\u01a2"+ + "{\u01a4|\u01a6}\u01a8\u0000\u01aa\u0000\u01ac~\u01ae\u007f\u01b0\u0080"+ + "\u01b2\u0000\u01b4\u0000\u01b6\u0000\u01b8\u0000\u0010\u0000\u0001\u0002"+ + "\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f$\u0002\u0000"+ + "DDdd\u0002\u0000IIii\u0002\u0000SSss\u0002\u0000EEee\u0002\u0000CCcc\u0002"+ + "\u0000TTtt\u0002\u0000RRrr\u0002\u0000OOoo\u0002\u0000PPpp\u0002\u0000"+ + "NNnn\u0002\u0000HHhh\u0002\u0000VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002"+ + "\u0000XXxx\u0002\u0000FFff\u0002\u0000MMmm\u0002\u0000GGgg\u0002\u0000"+ + "KKkk\u0002\u0000WWww\u0002\u0000UUuu\u0002\u0000JJjj\u0006\u0000\t\n\r"+ + "\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002"+ + "\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002"+ + "\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000YYyy\u000b\u0000\t"+ + "\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,"+ + "//::<<>?\\\\||\u065c\u0000\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001"+ + "\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001"+ + "\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001"+ + "\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001"+ + "\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000"+ + "\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000"+ + "\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,"+ + "\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u00000\u0001\u0000"+ + "\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000"+ + "\u00006\u0001\u0000\u0000\u0000\u00008\u0001\u0000\u0000\u0000\u0000:"+ + "\u0001\u0000\u0000\u0000\u0000<\u0001\u0000\u0000\u0000\u0000>\u0001\u0000"+ + "\u0000\u0000\u0000@\u0001\u0000\u0000\u0000\u0000B\u0001\u0000\u0000\u0000"+ + "\u0000D\u0001\u0000\u0000\u0000\u0000F\u0001\u0000\u0000\u0000\u0001H"+ + "\u0001\u0000\u0000\u0000\u0001^\u0001\u0000\u0000\u0000\u0001`\u0001\u0000"+ + "\u0000\u0000\u0001b\u0001\u0000\u0000\u0000\u0001d\u0001\u0000\u0000\u0000"+ + "\u0001f\u0001\u0000\u0000\u0000\u0001h\u0001\u0000\u0000\u0000\u0001j"+ + "\u0001\u0000\u0000\u0000\u0001l\u0001\u0000\u0000\u0000\u0001n\u0001\u0000"+ + "\u0000\u0000\u0001p\u0001\u0000\u0000\u0000\u0001r\u0001\u0000\u0000\u0000"+ + "\u0001t\u0001\u0000\u0000\u0000\u0001v\u0001\u0000\u0000\u0000\u0001x"+ + "\u0001\u0000\u0000\u0000\u0001z\u0001\u0000\u0000\u0000\u0001|\u0001\u0000"+ + "\u0000\u0000\u0001~\u0001\u0000\u0000\u0000\u0001\u0080\u0001\u0000\u0000"+ + "\u0000\u0001\u0082\u0001\u0000\u0000\u0000\u0001\u0084\u0001\u0000\u0000"+ + "\u0000\u0001\u0086\u0001\u0000\u0000\u0000\u0001\u0088\u0001\u0000\u0000"+ + "\u0000\u0001\u008a\u0001\u0000\u0000\u0000\u0001\u008c\u0001\u0000\u0000"+ + "\u0000\u0001\u008e\u0001\u0000\u0000\u0000\u0001\u0090\u0001\u0000\u0000"+ + "\u0000\u0001\u0092\u0001\u0000\u0000\u0000\u0001\u0094\u0001\u0000\u0000"+ + "\u0000\u0001\u0096\u0001\u0000\u0000\u0000\u0001\u0098\u0001\u0000\u0000"+ + "\u0000\u0001\u009a\u0001\u0000\u0000\u0000\u0001\u009c\u0001\u0000\u0000"+ + "\u0000\u0001\u009e\u0001\u0000\u0000\u0000\u0001\u00a0\u0001\u0000\u0000"+ + "\u0000\u0001\u00a2\u0001\u0000\u0000\u0000\u0001\u00a4\u0001\u0000\u0000"+ + "\u0000\u0001\u00a6\u0001\u0000\u0000\u0000\u0001\u00a8\u0001\u0000\u0000"+ + "\u0000\u0001\u00aa\u0001\u0000\u0000\u0000\u0001\u00ac\u0001\u0000\u0000"+ + "\u0000\u0001\u00ae\u0001\u0000\u0000\u0000\u0001\u00b0\u0001\u0000\u0000"+ + "\u0000\u0001\u00b2\u0001\u0000\u0000\u0000\u0001\u00b4\u0001\u0000\u0000"+ + "\u0000\u0001\u00b8\u0001\u0000\u0000\u0000\u0001\u00ba\u0001\u0000\u0000"+ + "\u0000\u0001\u00bc\u0001\u0000\u0000\u0000\u0001\u00be\u0001\u0000\u0000"+ + "\u0000\u0002\u00c0\u0001\u0000\u0000\u0000\u0002\u00c2\u0001\u0000\u0000"+ + "\u0000\u0002\u00c4\u0001\u0000\u0000\u0000\u0002\u00c6\u0001\u0000\u0000"+ + "\u0000\u0002\u00c8\u0001\u0000\u0000\u0000\u0003\u00ca\u0001\u0000\u0000"+ + "\u0000\u0003\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce\u0001\u0000\u0000"+ + "\u0000\u0003\u00d0\u0001\u0000\u0000\u0000\u0003\u00d2\u0001\u0000\u0000"+ + "\u0000\u0003\u00d4\u0001\u0000\u0000\u0000\u0003\u00d6\u0001\u0000\u0000"+ + "\u0000\u0003\u00da\u0001\u0000\u0000\u0000\u0003\u00dc\u0001\u0000\u0000"+ + "\u0000\u0003\u00de\u0001\u0000\u0000\u0000\u0003\u00e0\u0001\u0000\u0000"+ + "\u0000\u0003\u00e2\u0001\u0000\u0000\u0000\u0003\u00e4\u0001\u0000\u0000"+ + "\u0000\u0004\u00e6\u0001\u0000\u0000\u0000\u0004\u00e8\u0001\u0000\u0000"+ + "\u0000\u0004\u00ea\u0001\u0000\u0000\u0000\u0004\u00ec\u0001\u0000\u0000"+ + "\u0000\u0004\u00ee\u0001\u0000\u0000\u0000\u0004\u00f4\u0001\u0000\u0000"+ + "\u0000\u0004\u00f6\u0001\u0000\u0000\u0000\u0004\u00f8\u0001\u0000\u0000"+ + "\u0000\u0004\u00fa\u0001\u0000\u0000\u0000\u0005\u00fc\u0001\u0000\u0000"+ + "\u0000\u0005\u00fe\u0001\u0000\u0000\u0000\u0005\u0100\u0001\u0000\u0000"+ + "\u0000\u0005\u0102\u0001\u0000\u0000\u0000\u0005\u0104\u0001\u0000\u0000"+ + "\u0000\u0005\u0106\u0001\u0000\u0000\u0000\u0005\u0108\u0001\u0000\u0000"+ + "\u0000\u0005\u010a\u0001\u0000\u0000\u0000\u0005\u010c\u0001\u0000\u0000"+ + "\u0000\u0005\u010e\u0001\u0000\u0000\u0000\u0005\u0110\u0001\u0000\u0000"+ + "\u0000\u0006\u0112\u0001\u0000\u0000\u0000\u0006\u0114\u0001\u0000\u0000"+ + "\u0000\u0006\u0116\u0001\u0000\u0000\u0000\u0006\u0118\u0001\u0000\u0000"+ + "\u0000\u0006\u011c\u0001\u0000\u0000\u0000\u0006\u011e\u0001\u0000\u0000"+ + "\u0000\u0006\u0120\u0001\u0000\u0000\u0000\u0006\u0122\u0001\u0000\u0000"+ + "\u0000\u0006\u0124\u0001\u0000\u0000\u0000\u0007\u0126\u0001\u0000\u0000"+ + "\u0000\u0007\u0128\u0001\u0000\u0000\u0000\u0007\u012a\u0001\u0000\u0000"+ + "\u0000\u0007\u012c\u0001\u0000\u0000\u0000\u0007\u012e\u0001\u0000\u0000"+ + "\u0000\u0007\u0130\u0001\u0000\u0000\u0000\u0007\u0132\u0001\u0000\u0000"+ + "\u0000\u0007\u0134\u0001\u0000\u0000\u0000\u0007\u0136\u0001\u0000\u0000"+ + "\u0000\u0007\u0138\u0001\u0000\u0000\u0000\u0007\u013a\u0001\u0000\u0000"+ + "\u0000\u0007\u013c\u0001\u0000\u0000\u0000\b\u013e\u0001\u0000\u0000\u0000"+ + "\b\u0140\u0001\u0000\u0000\u0000\b\u0142\u0001\u0000\u0000\u0000\b\u0144"+ + "\u0001\u0000\u0000\u0000\b\u0146\u0001\u0000\u0000\u0000\b\u0148\u0001"+ + "\u0000\u0000\u0000\b\u014a\u0001\u0000\u0000\u0000\b\u014c\u0001\u0000"+ + "\u0000\u0000\b\u014e\u0001\u0000\u0000\u0000\t\u0150\u0001\u0000\u0000"+ + "\u0000\t\u0152\u0001\u0000\u0000\u0000\t\u0154\u0001\u0000\u0000\u0000"+ + "\t\u0156\u0001\u0000\u0000\u0000\t\u0158\u0001\u0000\u0000\u0000\n\u015a"+ + "\u0001\u0000\u0000\u0000\n\u015c\u0001\u0000\u0000\u0000\n\u015e\u0001"+ + "\u0000\u0000\u0000\n\u0160\u0001\u0000\u0000\u0000\n\u0162\u0001\u0000"+ + "\u0000\u0000\n\u0164\u0001\u0000\u0000\u0000\u000b\u0166\u0001\u0000\u0000"+ + "\u0000\u000b\u0168\u0001\u0000\u0000\u0000\u000b\u016a\u0001\u0000\u0000"+ + "\u0000\u000b\u016c\u0001\u0000\u0000\u0000\u000b\u016e\u0001\u0000\u0000"+ + "\u0000\u000b\u0170\u0001\u0000\u0000\u0000\u000b\u0172\u0001\u0000\u0000"+ + "\u0000\u000b\u0174\u0001\u0000\u0000\u0000\u000b\u0176\u0001\u0000\u0000"+ + "\u0000\u000b\u0178\u0001\u0000\u0000\u0000\f\u017a\u0001\u0000\u0000\u0000"+ + "\f\u017c\u0001\u0000\u0000\u0000\f\u017e\u0001\u0000\u0000\u0000\f\u0180"+ + "\u0001\u0000\u0000\u0000\f\u0182\u0001\u0000\u0000\u0000\f\u0184\u0001"+ + "\u0000\u0000\u0000\f\u0186\u0001\u0000\u0000\u0000\r\u0188\u0001\u0000"+ + "\u0000\u0000\r\u018a\u0001\u0000\u0000\u0000\r\u018c\u0001\u0000\u0000"+ + "\u0000\r\u018e\u0001\u0000\u0000\u0000\r\u0190\u0001\u0000\u0000\u0000"+ + "\r\u0192\u0001\u0000\u0000\u0000\r\u0194\u0001\u0000\u0000\u0000\r\u0196"+ + "\u0001\u0000\u0000\u0000\r\u0198\u0001\u0000\u0000\u0000\r\u019a\u0001"+ + "\u0000\u0000\u0000\u000e\u019c\u0001\u0000\u0000\u0000\u000e\u019e\u0001"+ + "\u0000\u0000\u0000\u000e\u01a0\u0001\u0000\u0000\u0000\u000e\u01a2\u0001"+ + "\u0000\u0000\u0000\u000e\u01a4\u0001\u0000\u0000\u0000\u000e\u01a6\u0001"+ + "\u0000\u0000\u0000\u000f\u01a8\u0001\u0000\u0000\u0000\u000f\u01aa\u0001"+ + "\u0000\u0000\u0000\u000f\u01ac\u0001\u0000\u0000\u0000\u000f\u01ae\u0001"+ + "\u0000\u0000\u0000\u000f\u01b0\u0001\u0000\u0000\u0000\u000f\u01b2\u0001"+ + "\u0000\u0000\u0000\u000f\u01b4\u0001\u0000\u0000\u0000\u000f\u01b6\u0001"+ + "\u0000\u0000\u0000\u000f\u01b8\u0001\u0000\u0000\u0000\u0010\u01ba\u0001"+ + "\u0000\u0000\u0000\u0012\u01c4\u0001\u0000\u0000\u0000\u0014\u01cb\u0001"+ + "\u0000\u0000\u0000\u0016\u01d4\u0001\u0000\u0000\u0000\u0018\u01db\u0001"+ + "\u0000\u0000\u0000\u001a\u01e5\u0001\u0000\u0000\u0000\u001c\u01ec\u0001"+ + "\u0000\u0000\u0000\u001e\u01f3\u0001\u0000\u0000\u0000 \u01fa\u0001\u0000"+ + "\u0000\u0000\"\u0202\u0001\u0000\u0000\u0000$\u020e\u0001\u0000\u0000"+ + "\u0000&\u0217\u0001\u0000\u0000\u0000(\u021d\u0001\u0000\u0000\u0000*"+ + "\u0224\u0001\u0000\u0000\u0000,\u022b\u0001\u0000\u0000\u0000.\u0233\u0001"+ + "\u0000\u0000\u00000\u023b\u0001\u0000\u0000\u00002\u024a\u0001\u0000\u0000"+ + "\u00004\u0256\u0001\u0000\u0000\u00006\u0261\u0001\u0000\u0000\u00008"+ + "\u0269\u0001\u0000\u0000\u0000:\u0271\u0001\u0000\u0000\u0000<\u0279\u0001"+ + "\u0000\u0000\u0000>\u0282\u0001\u0000\u0000\u0000@\u028d\u0001\u0000\u0000"+ + "\u0000B\u0293\u0001\u0000\u0000\u0000D\u02a4\u0001\u0000\u0000\u0000F"+ + "\u02b4\u0001\u0000\u0000\u0000H\u02ba\u0001\u0000\u0000\u0000J\u02be\u0001"+ + "\u0000\u0000\u0000L\u02c0\u0001\u0000\u0000\u0000N\u02c2\u0001\u0000\u0000"+ + "\u0000P\u02c5\u0001\u0000\u0000\u0000R\u02c7\u0001\u0000\u0000\u0000T"+ + "\u02d0\u0001\u0000\u0000\u0000V\u02d2\u0001\u0000\u0000\u0000X\u02d7\u0001"+ + "\u0000\u0000\u0000Z\u02d9\u0001\u0000\u0000\u0000\\\u02de\u0001\u0000"+ + "\u0000\u0000^\u02fd\u0001\u0000\u0000\u0000`\u0300\u0001\u0000\u0000\u0000"+ + "b\u032e\u0001\u0000\u0000\u0000d\u0330\u0001\u0000\u0000\u0000f\u0333"+ + "\u0001\u0000\u0000\u0000h\u0337\u0001\u0000\u0000\u0000j\u033b\u0001\u0000"+ + "\u0000\u0000l\u033d\u0001\u0000\u0000\u0000n\u0340\u0001\u0000\u0000\u0000"+ + "p\u0342\u0001\u0000\u0000\u0000r\u0344\u0001\u0000\u0000\u0000t\u0349"+ + "\u0001\u0000\u0000\u0000v\u034b\u0001\u0000\u0000\u0000x\u0351\u0001\u0000"+ + "\u0000\u0000z\u0357\u0001\u0000\u0000\u0000|\u035a\u0001\u0000\u0000\u0000"+ + "~\u035d\u0001\u0000\u0000\u0000\u0080\u0362\u0001\u0000\u0000\u0000\u0082"+ + "\u0367\u0001\u0000\u0000\u0000\u0084\u0369\u0001\u0000\u0000\u0000\u0086"+ + "\u036d\u0001\u0000\u0000\u0000\u0088\u0372\u0001\u0000\u0000\u0000\u008a"+ + "\u0378\u0001\u0000\u0000\u0000\u008c\u037b\u0001\u0000\u0000\u0000\u008e"+ + "\u037d\u0001\u0000\u0000\u0000\u0090\u0383\u0001\u0000\u0000\u0000\u0092"+ + "\u0385\u0001\u0000\u0000\u0000\u0094\u038a\u0001\u0000\u0000\u0000\u0096"+ + "\u038d\u0001\u0000\u0000\u0000\u0098\u0390\u0001\u0000\u0000\u0000\u009a"+ + "\u0393\u0001\u0000\u0000\u0000\u009c\u0395\u0001\u0000\u0000\u0000\u009e"+ + "\u0398\u0001\u0000\u0000\u0000\u00a0\u039a\u0001\u0000\u0000\u0000\u00a2"+ + "\u039d\u0001\u0000\u0000\u0000\u00a4\u039f\u0001\u0000\u0000\u0000\u00a6"+ + "\u03a1\u0001\u0000\u0000\u0000\u00a8\u03a3\u0001\u0000\u0000\u0000\u00aa"+ + "\u03a5\u0001\u0000\u0000\u0000\u00ac\u03a7\u0001\u0000\u0000\u0000\u00ae"+ + "\u03bc\u0001\u0000\u0000\u0000\u00b0\u03be\u0001\u0000\u0000\u0000\u00b2"+ + "\u03c3\u0001\u0000\u0000\u0000\u00b4\u03d8\u0001\u0000\u0000\u0000\u00b6"+ + "\u03da\u0001\u0000\u0000\u0000\u00b8\u03e2\u0001\u0000\u0000\u0000\u00ba"+ + "\u03e4\u0001\u0000\u0000\u0000\u00bc\u03e8\u0001\u0000\u0000\u0000\u00be"+ + "\u03ec\u0001\u0000\u0000\u0000\u00c0\u03f0\u0001\u0000\u0000\u0000\u00c2"+ + "\u03f5\u0001\u0000\u0000\u0000\u00c4\u03fa\u0001\u0000\u0000\u0000\u00c6"+ + "\u03fe\u0001\u0000\u0000\u0000\u00c8\u0402\u0001\u0000\u0000\u0000\u00ca"+ + "\u0406\u0001\u0000\u0000\u0000\u00cc\u040b\u0001\u0000\u0000\u0000\u00ce"+ + "\u040f\u0001\u0000\u0000\u0000\u00d0\u0413\u0001\u0000\u0000\u0000\u00d2"+ + "\u0417\u0001\u0000\u0000\u0000\u00d4\u041b\u0001\u0000\u0000\u0000\u00d6"+ + "\u041f\u0001\u0000\u0000\u0000\u00d8\u042b\u0001\u0000\u0000\u0000\u00da"+ + "\u042e\u0001\u0000\u0000\u0000\u00dc\u0432\u0001\u0000\u0000\u0000\u00de"+ + "\u0436\u0001\u0000\u0000\u0000\u00e0\u043a\u0001\u0000\u0000\u0000\u00e2"+ + "\u043e\u0001\u0000\u0000\u0000\u00e4\u0442\u0001\u0000\u0000\u0000\u00e6"+ + "\u0446\u0001\u0000\u0000\u0000\u00e8\u044b\u0001\u0000\u0000\u0000\u00ea"+ + "\u044f\u0001\u0000\u0000\u0000\u00ec\u0453\u0001\u0000\u0000\u0000\u00ee"+ + "\u0458\u0001\u0000\u0000\u0000\u00f0\u0461\u0001\u0000\u0000\u0000\u00f2"+ + "\u0476\u0001\u0000\u0000\u0000\u00f4\u047a\u0001\u0000\u0000\u0000\u00f6"+ + "\u047e\u0001\u0000\u0000\u0000\u00f8\u0482\u0001\u0000\u0000\u0000\u00fa"+ + "\u0486\u0001\u0000\u0000\u0000\u00fc\u048a\u0001\u0000\u0000\u0000\u00fe"+ + "\u048f\u0001\u0000\u0000\u0000\u0100\u0493\u0001\u0000\u0000\u0000\u0102"+ + "\u0497\u0001\u0000\u0000\u0000\u0104\u049b\u0001\u0000\u0000\u0000\u0106"+ + "\u04a0\u0001\u0000\u0000\u0000\u0108\u04a5\u0001\u0000\u0000\u0000\u010a"+ + "\u04a8\u0001\u0000\u0000\u0000\u010c\u04ac\u0001\u0000\u0000\u0000\u010e"+ + "\u04b0\u0001\u0000\u0000\u0000\u0110\u04b4\u0001\u0000\u0000\u0000\u0112"+ + "\u04b8\u0001\u0000\u0000\u0000\u0114\u04bd\u0001\u0000\u0000\u0000\u0116"+ + "\u04c2\u0001\u0000\u0000\u0000\u0118\u04c7\u0001\u0000\u0000\u0000\u011a"+ + "\u04ce\u0001\u0000\u0000\u0000\u011c\u04d7\u0001\u0000\u0000\u0000\u011e"+ + "\u04de\u0001\u0000\u0000\u0000\u0120\u04e2\u0001\u0000\u0000\u0000\u0122"+ + "\u04e6\u0001\u0000\u0000\u0000\u0124\u04ea\u0001\u0000\u0000\u0000\u0126"+ + "\u04ee\u0001\u0000\u0000\u0000\u0128\u04f4\u0001\u0000\u0000\u0000\u012a"+ + "\u04f8\u0001\u0000\u0000\u0000\u012c\u04fc\u0001\u0000\u0000\u0000\u012e"+ + "\u0500\u0001\u0000\u0000\u0000\u0130\u0504\u0001\u0000\u0000\u0000\u0132"+ + "\u0508\u0001\u0000\u0000\u0000\u0134\u050c\u0001\u0000\u0000\u0000\u0136"+ + "\u0511\u0001\u0000\u0000\u0000\u0138\u0516\u0001\u0000\u0000\u0000\u013a"+ + "\u051a\u0001\u0000\u0000\u0000\u013c\u051e\u0001\u0000\u0000\u0000\u013e"+ + "\u0522\u0001\u0000\u0000\u0000\u0140\u0527\u0001\u0000\u0000\u0000\u0142"+ + "\u052b\u0001\u0000\u0000\u0000\u0144\u0530\u0001\u0000\u0000\u0000\u0146"+ + "\u0535\u0001\u0000\u0000\u0000\u0148\u0539\u0001\u0000\u0000\u0000\u014a"+ + "\u053d\u0001\u0000\u0000\u0000\u014c\u0541\u0001\u0000\u0000\u0000\u014e"+ + "\u0545\u0001\u0000\u0000\u0000\u0150\u0549\u0001\u0000\u0000\u0000\u0152"+ + "\u054e\u0001\u0000\u0000\u0000\u0154\u0553\u0001\u0000\u0000\u0000\u0156"+ + "\u0557\u0001\u0000\u0000\u0000\u0158\u055b\u0001\u0000\u0000\u0000\u015a"+ + "\u055f\u0001\u0000\u0000\u0000\u015c\u0564\u0001\u0000\u0000\u0000\u015e"+ + "\u056d\u0001\u0000\u0000\u0000\u0160\u0571\u0001\u0000\u0000\u0000\u0162"+ + "\u0575\u0001\u0000\u0000\u0000\u0164\u0579\u0001\u0000\u0000\u0000\u0166"+ + "\u057d\u0001\u0000\u0000\u0000\u0168\u0582\u0001\u0000\u0000\u0000\u016a"+ + "\u0586\u0001\u0000\u0000\u0000\u016c\u058a\u0001\u0000\u0000\u0000\u016e"+ + "\u058e\u0001\u0000\u0000\u0000\u0170\u0593\u0001\u0000\u0000\u0000\u0172"+ + "\u0597\u0001\u0000\u0000\u0000\u0174\u059b\u0001\u0000\u0000\u0000\u0176"+ + "\u059f\u0001\u0000\u0000\u0000\u0178\u05a3\u0001\u0000\u0000\u0000\u017a"+ + "\u05a7\u0001\u0000\u0000\u0000\u017c\u05ad\u0001\u0000\u0000\u0000\u017e"+ + "\u05b1\u0001\u0000\u0000\u0000\u0180\u05b5\u0001\u0000\u0000\u0000\u0182"+ + "\u05b9\u0001\u0000\u0000\u0000\u0184\u05bd\u0001\u0000\u0000\u0000\u0186"+ + "\u05c1\u0001\u0000\u0000\u0000\u0188\u05c5\u0001\u0000\u0000\u0000\u018a"+ + "\u05ca\u0001\u0000\u0000\u0000\u018c\u05ce\u0001\u0000\u0000\u0000\u018e"+ + "\u05d2\u0001\u0000\u0000\u0000\u0190\u05d8\u0001\u0000\u0000\u0000\u0192"+ + "\u05e1\u0001\u0000\u0000\u0000\u0194\u05e5\u0001\u0000\u0000\u0000\u0196"+ + "\u05e9\u0001\u0000\u0000\u0000\u0198\u05ed\u0001\u0000\u0000\u0000\u019a"+ + "\u05f1\u0001\u0000\u0000\u0000\u019c\u05f5\u0001\u0000\u0000\u0000\u019e"+ + "\u05fa\u0001\u0000\u0000\u0000\u01a0\u0600\u0001\u0000\u0000\u0000\u01a2"+ + "\u0606\u0001\u0000\u0000\u0000\u01a4\u060a\u0001\u0000\u0000\u0000\u01a6"+ + "\u060e\u0001\u0000\u0000\u0000\u01a8\u0612\u0001\u0000\u0000\u0000\u01aa"+ + "\u0618\u0001\u0000\u0000\u0000\u01ac\u061e\u0001\u0000\u0000\u0000\u01ae"+ + "\u0622\u0001\u0000\u0000\u0000\u01b0\u0626\u0001\u0000\u0000\u0000\u01b2"+ + "\u062a\u0001\u0000\u0000\u0000\u01b4\u0630\u0001\u0000\u0000\u0000\u01b6"+ + "\u0636\u0001\u0000\u0000\u0000\u01b8\u063c\u0001\u0000\u0000\u0000\u01ba"+ + "\u01bb\u0007\u0000\u0000\u0000\u01bb\u01bc\u0007\u0001\u0000\u0000\u01bc"+ + "\u01bd\u0007\u0002\u0000\u0000\u01bd\u01be\u0007\u0002\u0000\u0000\u01be"+ + "\u01bf\u0007\u0003\u0000\u0000\u01bf\u01c0\u0007\u0004\u0000\u0000\u01c0"+ + "\u01c1\u0007\u0005\u0000\u0000\u01c1\u01c2\u0001\u0000\u0000\u0000\u01c2"+ + "\u01c3\u0006\u0000\u0000\u0000\u01c3\u0011\u0001\u0000\u0000\u0000\u01c4"+ + "\u01c5\u0007\u0000\u0000\u0000\u01c5\u01c6\u0007\u0006\u0000\u0000\u01c6"+ + "\u01c7\u0007\u0007\u0000\u0000\u01c7\u01c8\u0007\b\u0000\u0000\u01c8\u01c9"+ + "\u0001\u0000\u0000\u0000\u01c9\u01ca\u0006\u0001\u0001\u0000\u01ca\u0013"+ + "\u0001\u0000\u0000\u0000\u01cb\u01cc\u0007\u0003\u0000\u0000\u01cc\u01cd"+ + "\u0007\t\u0000\u0000\u01cd\u01ce\u0007\u0006\u0000\u0000\u01ce\u01cf\u0007"+ + "\u0001\u0000\u0000\u01cf\u01d0\u0007\u0004\u0000\u0000\u01d0\u01d1\u0007"+ + "\n\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000\u01d2\u01d3\u0006\u0002"+ + "\u0002\u0000\u01d3\u0015\u0001\u0000\u0000\u0000\u01d4\u01d5\u0007\u0003"+ + "\u0000\u0000\u01d5\u01d6\u0007\u000b\u0000\u0000\u01d6\u01d7\u0007\f\u0000"+ + "\u0000\u01d7\u01d8\u0007\r\u0000\u0000\u01d8\u01d9\u0001\u0000\u0000\u0000"+ + "\u01d9\u01da\u0006\u0003\u0000\u0000\u01da\u0017\u0001\u0000\u0000\u0000"+ + "\u01db\u01dc\u0007\u0003\u0000\u0000\u01dc\u01dd\u0007\u000e\u0000\u0000"+ + "\u01dd\u01de\u0007\b\u0000\u0000\u01de\u01df\u0007\r\u0000\u0000\u01df"+ + "\u01e0\u0007\f\u0000\u0000\u01e0\u01e1\u0007\u0001\u0000\u0000\u01e1\u01e2"+ + "\u0007\t\u0000\u0000\u01e2\u01e3\u0001\u0000\u0000\u0000\u01e3\u01e4\u0006"+ + "\u0004\u0003\u0000\u01e4\u0019\u0001\u0000\u0000\u0000\u01e5\u01e6\u0007"+ + "\u000f\u0000\u0000\u01e6\u01e7\u0007\u0006\u0000\u0000\u01e7\u01e8\u0007"+ + "\u0007\u0000\u0000\u01e8\u01e9\u0007\u0010\u0000\u0000\u01e9\u01ea\u0001"+ + "\u0000\u0000\u0000\u01ea\u01eb\u0006\u0005\u0004\u0000\u01eb\u001b\u0001"+ + "\u0000\u0000\u0000\u01ec\u01ed\u0007\u0011\u0000\u0000\u01ed\u01ee\u0007"+ + "\u0006\u0000\u0000\u01ee\u01ef\u0007\u0007\u0000\u0000\u01ef\u01f0\u0007"+ + "\u0012\u0000\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000\u01f1\u01f2\u0006"+ + "\u0006\u0000\u0000\u01f2\u001d\u0001\u0000\u0000\u0000\u01f3\u01f4\u0007"+ + "\u0012\u0000\u0000\u01f4\u01f5\u0007\u0003\u0000\u0000\u01f5\u01f6\u0007"+ + "\u0003\u0000\u0000\u01f6\u01f7\u0007\b\u0000\u0000\u01f7\u01f8\u0001\u0000"+ + "\u0000\u0000\u01f8\u01f9\u0006\u0007\u0001\u0000\u01f9\u001f\u0001\u0000"+ + "\u0000\u0000\u01fa\u01fb\u0007\r\u0000\u0000\u01fb\u01fc\u0007\u0001\u0000"+ + "\u0000\u01fc\u01fd\u0007\u0010\u0000\u0000\u01fd\u01fe\u0007\u0001\u0000"+ + "\u0000\u01fe\u01ff\u0007\u0005\u0000\u0000\u01ff\u0200\u0001\u0000\u0000"+ + "\u0000\u0200\u0201\u0006\b\u0000\u0000\u0201!\u0001\u0000\u0000\u0000"+ + "\u0202\u0203\u0007\u0010\u0000\u0000\u0203\u0204\u0007\u000b\u0000\u0000"+ + "\u0204\u0205\u0005_\u0000\u0000\u0205\u0206\u0007\u0003\u0000\u0000\u0206"+ + "\u0207\u0007\u000e\u0000\u0000\u0207\u0208\u0007\b\u0000\u0000\u0208\u0209"+ + "\u0007\f\u0000\u0000\u0209\u020a\u0007\t\u0000\u0000\u020a\u020b\u0007"+ + "\u0000\u0000\u0000\u020b\u020c\u0001\u0000\u0000\u0000\u020c\u020d\u0006"+ + "\t\u0005\u0000\u020d#\u0001\u0000\u0000\u0000\u020e\u020f\u0007\u0006"+ + "\u0000\u0000\u020f\u0210\u0007\u0003\u0000\u0000\u0210\u0211\u0007\t\u0000"+ + "\u0000\u0211\u0212\u0007\f\u0000\u0000\u0212\u0213\u0007\u0010\u0000\u0000"+ + "\u0213\u0214\u0007\u0003\u0000\u0000\u0214\u0215\u0001\u0000\u0000\u0000"+ + "\u0215\u0216\u0006\n\u0006\u0000\u0216%\u0001\u0000\u0000\u0000\u0217"+ + "\u0218\u0007\u0006\u0000\u0000\u0218\u0219\u0007\u0007\u0000\u0000\u0219"+ + "\u021a\u0007\u0013\u0000\u0000\u021a\u021b\u0001\u0000\u0000\u0000\u021b"+ + "\u021c\u0006\u000b\u0000\u0000\u021c\'\u0001\u0000\u0000\u0000\u021d\u021e"+ + "\u0007\u0002\u0000\u0000\u021e\u021f\u0007\n\u0000\u0000\u021f\u0220\u0007"+ + "\u0007\u0000\u0000\u0220\u0221\u0007\u0013\u0000\u0000\u0221\u0222\u0001"+ + "\u0000\u0000\u0000\u0222\u0223\u0006\f\u0007\u0000\u0223)\u0001\u0000"+ + "\u0000\u0000\u0224\u0225\u0007\u0002\u0000\u0000\u0225\u0226\u0007\u0007"+ + "\u0000\u0000\u0226\u0227\u0007\u0006\u0000\u0000\u0227\u0228\u0007\u0005"+ + "\u0000\u0000\u0228\u0229\u0001\u0000\u0000\u0000\u0229\u022a\u0006\r\u0000"+ + "\u0000\u022a+\u0001\u0000\u0000\u0000\u022b\u022c\u0007\u0002\u0000\u0000"+ + "\u022c\u022d\u0007\u0005\u0000\u0000\u022d\u022e\u0007\f\u0000\u0000\u022e"+ + "\u022f\u0007\u0005\u0000\u0000\u022f\u0230\u0007\u0002\u0000\u0000\u0230"+ + "\u0231\u0001\u0000\u0000\u0000\u0231\u0232\u0006\u000e\u0000\u0000\u0232"+ + "-\u0001\u0000\u0000\u0000\u0233\u0234\u0007\u0013\u0000\u0000\u0234\u0235"+ + "\u0007\n\u0000\u0000\u0235\u0236\u0007\u0003\u0000\u0000\u0236\u0237\u0007"+ + "\u0006\u0000\u0000\u0237\u0238\u0007\u0003\u0000\u0000\u0238\u0239\u0001"+ + "\u0000\u0000\u0000\u0239\u023a\u0006\u000f\u0000\u0000\u023a/\u0001\u0000"+ + "\u0000\u0000\u023b\u023c\u0004\u0010\u0000\u0000\u023c\u023d\u0007\u0001"+ + "\u0000\u0000\u023d\u023e\u0007\t\u0000\u0000\u023e\u023f\u0007\r\u0000"+ + "\u0000\u023f\u0240\u0007\u0001\u0000\u0000\u0240\u0241\u0007\t\u0000\u0000"+ + "\u0241\u0242\u0007\u0003\u0000\u0000\u0242\u0243\u0007\u0002\u0000\u0000"+ + "\u0243\u0244\u0007\u0005\u0000\u0000\u0244\u0245\u0007\f\u0000\u0000\u0245"+ + "\u0246\u0007\u0005\u0000\u0000\u0246\u0247\u0007\u0002\u0000\u0000\u0247"+ + "\u0248\u0001\u0000\u0000\u0000\u0248\u0249\u0006\u0010\u0000\u0000\u0249"+ + "1\u0001\u0000\u0000\u0000\u024a\u024b\u0004\u0011\u0001\u0000\u024b\u024c"+ + "\u0007\r\u0000\u0000\u024c\u024d\u0007\u0007\u0000\u0000\u024d\u024e\u0007"+ + "\u0007\u0000\u0000\u024e\u024f\u0007\u0012\u0000\u0000\u024f\u0250\u0007"+ + "\u0014\u0000\u0000\u0250\u0251\u0007\b\u0000\u0000\u0251\u0252\u0005_"+ + "\u0000\u0000\u0252\u0253\u0005\u8001\uf414\u0000\u0000\u0253\u0254\u0001"+ + "\u0000\u0000\u0000\u0254\u0255\u0006\u0011\b\u0000\u02553\u0001\u0000"+ + "\u0000\u0000\u0256\u0257\u0004\u0012\u0002\u0000\u0257\u0258\u0007\u0010"+ + "\u0000\u0000\u0258\u0259\u0007\u0003\u0000\u0000\u0259\u025a\u0007\u0005"+ + "\u0000\u0000\u025a\u025b\u0007\u0006\u0000\u0000\u025b\u025c\u0007\u0001"+ + "\u0000\u0000\u025c\u025d\u0007\u0004\u0000\u0000\u025d\u025e\u0007\u0002"+ + "\u0000\u0000\u025e\u025f\u0001\u0000\u0000\u0000\u025f\u0260\u0006\u0012"+ + "\t\u0000\u02605\u0001\u0000\u0000\u0000\u0261\u0262\u0004\u0013\u0003"+ + "\u0000\u0262\u0263\u0007\u0015\u0000\u0000\u0263\u0264\u0007\u0007\u0000"+ + "\u0000\u0264\u0265\u0007\u0001\u0000\u0000\u0265\u0266\u0007\t\u0000\u0000"+ + "\u0266\u0267\u0001\u0000\u0000\u0000\u0267\u0268\u0006\u0013\n\u0000\u0268"+ + "7\u0001\u0000\u0000\u0000\u0269\u026a\u0004\u0014\u0004\u0000\u026a\u026b"+ + "\u0007\u000f\u0000\u0000\u026b\u026c\u0007\u0014\u0000\u0000\u026c\u026d"+ + "\u0007\r\u0000\u0000\u026d\u026e\u0007\r\u0000\u0000\u026e\u026f\u0001"+ + "\u0000\u0000\u0000\u026f\u0270\u0006\u0014\n\u0000\u02709\u0001\u0000"+ + "\u0000\u0000\u0271\u0272\u0004\u0015\u0005\u0000\u0272\u0273\u0007\r\u0000"+ + "\u0000\u0273\u0274\u0007\u0003\u0000\u0000\u0274\u0275\u0007\u000f\u0000"+ + "\u0000\u0275\u0276\u0007\u0005\u0000\u0000\u0276\u0277\u0001\u0000\u0000"+ + "\u0000\u0277\u0278\u0006\u0015\n\u0000\u0278;\u0001\u0000\u0000\u0000"+ + "\u0279\u027a\u0004\u0016\u0006\u0000\u027a\u027b\u0007\u0006\u0000\u0000"+ + "\u027b\u027c\u0007\u0001\u0000\u0000\u027c\u027d\u0007\u0011\u0000\u0000"+ + "\u027d\u027e\u0007\n\u0000\u0000\u027e\u027f\u0007\u0005\u0000\u0000\u027f"+ + "\u0280\u0001\u0000\u0000\u0000\u0280\u0281\u0006\u0016\n\u0000\u0281="+ + "\u0001\u0000\u0000\u0000\u0282\u0283\u0004\u0017\u0007\u0000\u0283\u0284"+ + "\u0007\r\u0000\u0000\u0284\u0285\u0007\u0007\u0000\u0000\u0285\u0286\u0007"+ + "\u0007\u0000\u0000\u0286\u0287\u0007\u0012\u0000\u0000\u0287\u0288\u0007"+ + "\u0014\u0000\u0000\u0288\u0289\u0007\b\u0000\u0000\u0289\u028a\u0001\u0000"+ + "\u0000\u0000\u028a\u028b\u0006\u0017\n\u0000\u028b?\u0001\u0000\u0000"+ + "\u0000\u028c\u028e\b\u0016\u0000\u0000\u028d\u028c\u0001\u0000\u0000\u0000"+ + "\u028e\u028f\u0001\u0000\u0000\u0000\u028f\u028d\u0001\u0000\u0000\u0000"+ + "\u028f\u0290\u0001\u0000\u0000\u0000\u0290\u0291\u0001\u0000\u0000\u0000"+ + "\u0291\u0292\u0006\u0018\u0000\u0000\u0292A\u0001\u0000\u0000\u0000\u0293"+ + "\u0294\u0005/\u0000\u0000\u0294\u0295\u0005/\u0000\u0000\u0295\u0299\u0001"+ + "\u0000\u0000\u0000\u0296\u0298\b\u0017\u0000\u0000\u0297\u0296\u0001\u0000"+ + "\u0000\u0000\u0298\u029b\u0001\u0000\u0000\u0000\u0299\u0297\u0001\u0000"+ + "\u0000\u0000\u0299\u029a\u0001\u0000\u0000\u0000\u029a\u029d\u0001\u0000"+ + "\u0000\u0000\u029b\u0299\u0001\u0000\u0000\u0000\u029c\u029e\u0005\r\u0000"+ + "\u0000\u029d\u029c\u0001\u0000\u0000\u0000\u029d\u029e\u0001\u0000\u0000"+ + "\u0000\u029e\u02a0\u0001\u0000\u0000\u0000\u029f\u02a1\u0005\n\u0000\u0000"+ + "\u02a0\u029f\u0001\u0000\u0000\u0000\u02a0\u02a1\u0001\u0000\u0000\u0000"+ + "\u02a1\u02a2\u0001\u0000\u0000\u0000\u02a2\u02a3\u0006\u0019\u000b\u0000"+ + "\u02a3C\u0001\u0000\u0000\u0000\u02a4\u02a5\u0005/\u0000\u0000\u02a5\u02a6"+ + "\u0005*\u0000\u0000\u02a6\u02ab\u0001\u0000\u0000\u0000\u02a7\u02aa\u0003"+ + "D\u001a\u0000\u02a8\u02aa\t\u0000\u0000\u0000\u02a9\u02a7\u0001\u0000"+ + "\u0000\u0000\u02a9\u02a8\u0001\u0000\u0000\u0000\u02aa\u02ad\u0001\u0000"+ + "\u0000\u0000\u02ab\u02ac\u0001\u0000\u0000\u0000\u02ab\u02a9\u0001\u0000"+ + "\u0000\u0000\u02ac\u02ae\u0001\u0000\u0000\u0000\u02ad\u02ab\u0001\u0000"+ + "\u0000\u0000\u02ae\u02af\u0005*\u0000\u0000\u02af\u02b0\u0005/\u0000\u0000"+ + "\u02b0\u02b1\u0001\u0000\u0000\u0000\u02b1\u02b2\u0006\u001a\u000b\u0000"+ + "\u02b2E\u0001\u0000\u0000\u0000\u02b3\u02b5\u0007\u0018\u0000\u0000\u02b4"+ + "\u02b3\u0001\u0000\u0000\u0000\u02b5\u02b6\u0001\u0000\u0000\u0000\u02b6"+ + "\u02b4\u0001\u0000\u0000\u0000\u02b6\u02b7\u0001\u0000\u0000\u0000\u02b7"+ + "\u02b8\u0001\u0000\u0000\u0000\u02b8\u02b9\u0006\u001b\u000b\u0000\u02b9"+ + "G\u0001\u0000\u0000\u0000\u02ba\u02bb\u0005|\u0000\u0000\u02bb\u02bc\u0001"+ + "\u0000\u0000\u0000\u02bc\u02bd\u0006\u001c\f\u0000\u02bdI\u0001\u0000"+ + "\u0000\u0000\u02be\u02bf\u0007\u0019\u0000\u0000\u02bfK\u0001\u0000\u0000"+ + "\u0000\u02c0\u02c1\u0007\u001a\u0000\u0000\u02c1M\u0001\u0000\u0000\u0000"+ + "\u02c2\u02c3\u0005\\\u0000\u0000\u02c3\u02c4\u0007\u001b\u0000\u0000\u02c4"+ + "O\u0001\u0000\u0000\u0000\u02c5\u02c6\b\u001c\u0000\u0000\u02c6Q\u0001"+ + "\u0000\u0000\u0000\u02c7\u02c9\u0007\u0003\u0000\u0000\u02c8\u02ca\u0007"+ + "\u001d\u0000\u0000\u02c9\u02c8\u0001\u0000\u0000\u0000\u02c9\u02ca\u0001"+ + "\u0000\u0000\u0000\u02ca\u02cc\u0001\u0000\u0000\u0000\u02cb\u02cd\u0003"+ + "J\u001d\u0000\u02cc\u02cb\u0001\u0000\u0000\u0000\u02cd\u02ce\u0001\u0000"+ + "\u0000\u0000\u02ce\u02cc\u0001\u0000\u0000\u0000\u02ce\u02cf\u0001\u0000"+ + "\u0000\u0000\u02cfS\u0001\u0000\u0000\u0000\u02d0\u02d1\u0005@\u0000\u0000"+ + "\u02d1U\u0001\u0000\u0000\u0000\u02d2\u02d3\u0005`\u0000\u0000\u02d3W"+ + "\u0001\u0000\u0000\u0000\u02d4\u02d8\b\u001e\u0000\u0000\u02d5\u02d6\u0005"+ + "`\u0000\u0000\u02d6\u02d8\u0005`\u0000\u0000\u02d7\u02d4\u0001\u0000\u0000"+ + "\u0000\u02d7\u02d5\u0001\u0000\u0000\u0000\u02d8Y\u0001\u0000\u0000\u0000"+ + "\u02d9\u02da\u0005_\u0000\u0000\u02da[\u0001\u0000\u0000\u0000\u02db\u02df"+ + "\u0003L\u001e\u0000\u02dc\u02df\u0003J\u001d\u0000\u02dd\u02df\u0003Z"+ + "%\u0000\u02de\u02db\u0001\u0000\u0000\u0000\u02de\u02dc\u0001\u0000\u0000"+ + "\u0000\u02de\u02dd\u0001\u0000\u0000\u0000\u02df]\u0001\u0000\u0000\u0000"+ + "\u02e0\u02e5\u0005\"\u0000\u0000\u02e1\u02e4\u0003N\u001f\u0000\u02e2"+ + "\u02e4\u0003P \u0000\u02e3\u02e1\u0001\u0000\u0000\u0000\u02e3\u02e2\u0001"+ + "\u0000\u0000\u0000\u02e4\u02e7\u0001\u0000\u0000\u0000\u02e5\u02e3\u0001"+ + "\u0000\u0000\u0000\u02e5\u02e6\u0001\u0000\u0000\u0000\u02e6\u02e8\u0001"+ + "\u0000\u0000\u0000\u02e7\u02e5\u0001\u0000\u0000\u0000\u02e8\u02fe\u0005"+ + "\"\u0000\u0000\u02e9\u02ea\u0005\"\u0000\u0000\u02ea\u02eb\u0005\"\u0000"+ + "\u0000\u02eb\u02ec\u0005\"\u0000\u0000\u02ec\u02f0\u0001\u0000\u0000\u0000"+ + "\u02ed\u02ef\b\u0017\u0000\u0000\u02ee\u02ed\u0001\u0000\u0000\u0000\u02ef"+ + "\u02f2\u0001\u0000\u0000\u0000\u02f0\u02f1\u0001\u0000\u0000\u0000\u02f0"+ + "\u02ee\u0001\u0000\u0000\u0000\u02f1\u02f3\u0001\u0000\u0000\u0000\u02f2"+ + "\u02f0\u0001\u0000\u0000\u0000\u02f3\u02f4\u0005\"\u0000\u0000\u02f4\u02f5"+ + "\u0005\"\u0000\u0000\u02f5\u02f6\u0005\"\u0000\u0000\u02f6\u02f8\u0001"+ + "\u0000\u0000\u0000\u02f7\u02f9\u0005\"\u0000\u0000\u02f8\u02f7\u0001\u0000"+ + "\u0000\u0000\u02f8\u02f9\u0001\u0000\u0000\u0000\u02f9\u02fb\u0001\u0000"+ + "\u0000\u0000\u02fa\u02fc\u0005\"\u0000\u0000\u02fb\u02fa\u0001\u0000\u0000"+ + "\u0000\u02fb\u02fc\u0001\u0000\u0000\u0000\u02fc\u02fe\u0001\u0000\u0000"+ + "\u0000\u02fd\u02e0\u0001\u0000\u0000\u0000\u02fd\u02e9\u0001\u0000\u0000"+ + "\u0000\u02fe_\u0001\u0000\u0000\u0000\u02ff\u0301\u0003J\u001d\u0000\u0300"+ + "\u02ff\u0001\u0000\u0000\u0000\u0301\u0302\u0001\u0000\u0000\u0000\u0302"+ + "\u0300\u0001\u0000\u0000\u0000\u0302\u0303\u0001\u0000\u0000\u0000\u0303"+ + "a\u0001\u0000\u0000\u0000\u0304\u0306\u0003J\u001d\u0000\u0305\u0304\u0001"+ + "\u0000\u0000\u0000\u0306\u0307\u0001\u0000\u0000\u0000\u0307\u0305\u0001"+ + "\u0000\u0000\u0000\u0307\u0308\u0001\u0000\u0000\u0000\u0308\u0309\u0001"+ + "\u0000\u0000\u0000\u0309\u030d\u0003t2\u0000\u030a\u030c\u0003J\u001d"+ + "\u0000\u030b\u030a\u0001\u0000\u0000\u0000\u030c\u030f\u0001\u0000\u0000"+ + "\u0000\u030d\u030b\u0001\u0000\u0000\u0000\u030d\u030e\u0001\u0000\u0000"+ + "\u0000\u030e\u032f\u0001\u0000\u0000\u0000\u030f\u030d\u0001\u0000\u0000"+ + "\u0000\u0310\u0312\u0003t2\u0000\u0311\u0313\u0003J\u001d\u0000\u0312"+ + "\u0311\u0001\u0000\u0000\u0000\u0313\u0314\u0001\u0000\u0000\u0000\u0314"+ + "\u0312\u0001\u0000\u0000\u0000\u0314\u0315\u0001\u0000\u0000\u0000\u0315"+ + "\u032f\u0001\u0000\u0000\u0000\u0316\u0318\u0003J\u001d\u0000\u0317\u0316"+ + "\u0001\u0000\u0000\u0000\u0318\u0319\u0001\u0000\u0000\u0000\u0319\u0317"+ + "\u0001\u0000\u0000\u0000\u0319\u031a\u0001\u0000\u0000\u0000\u031a\u0322"+ + "\u0001\u0000\u0000\u0000\u031b\u031f\u0003t2\u0000\u031c\u031e\u0003J"+ + "\u001d\u0000\u031d\u031c\u0001\u0000\u0000\u0000\u031e\u0321\u0001\u0000"+ + "\u0000\u0000\u031f\u031d\u0001\u0000\u0000\u0000\u031f\u0320\u0001\u0000"+ + "\u0000\u0000\u0320\u0323\u0001\u0000\u0000\u0000\u0321\u031f\u0001\u0000"+ + "\u0000\u0000\u0322\u031b\u0001\u0000\u0000\u0000\u0322\u0323\u0001\u0000"+ + "\u0000\u0000\u0323\u0324\u0001\u0000\u0000\u0000\u0324\u0325\u0003R!\u0000"+ + "\u0325\u032f\u0001\u0000\u0000\u0000\u0326\u0328\u0003t2\u0000\u0327\u0329"+ + "\u0003J\u001d\u0000\u0328\u0327\u0001\u0000\u0000\u0000\u0329\u032a\u0001"+ + "\u0000\u0000\u0000\u032a\u0328\u0001\u0000\u0000\u0000\u032a\u032b\u0001"+ + "\u0000\u0000\u0000\u032b\u032c\u0001\u0000\u0000\u0000\u032c\u032d\u0003"+ + "R!\u0000\u032d\u032f\u0001\u0000\u0000\u0000\u032e\u0305\u0001\u0000\u0000"+ + "\u0000\u032e\u0310\u0001\u0000\u0000\u0000\u032e\u0317\u0001\u0000\u0000"+ + "\u0000\u032e\u0326\u0001\u0000\u0000\u0000\u032fc\u0001\u0000\u0000\u0000"+ + "\u0330\u0331\u0007\u001f\u0000\u0000\u0331\u0332\u0007 \u0000\u0000\u0332"+ + "e\u0001\u0000\u0000\u0000\u0333\u0334\u0007\f\u0000\u0000\u0334\u0335"+ + "\u0007\t\u0000\u0000\u0335\u0336\u0007\u0000\u0000\u0000\u0336g\u0001"+ + "\u0000\u0000\u0000\u0337\u0338\u0007\f\u0000\u0000\u0338\u0339\u0007\u0002"+ + "\u0000\u0000\u0339\u033a\u0007\u0004\u0000\u0000\u033ai\u0001\u0000\u0000"+ + "\u0000\u033b\u033c\u0005=\u0000\u0000\u033ck\u0001\u0000\u0000\u0000\u033d"+ + "\u033e\u0005:\u0000\u0000\u033e\u033f\u0005:\u0000\u0000\u033fm\u0001"+ + "\u0000\u0000\u0000\u0340\u0341\u0005:\u0000\u0000\u0341o\u0001\u0000\u0000"+ + "\u0000\u0342\u0343\u0005,\u0000\u0000\u0343q\u0001\u0000\u0000\u0000\u0344"+ + "\u0345\u0007\u0000\u0000\u0000\u0345\u0346\u0007\u0003\u0000\u0000\u0346"+ + "\u0347\u0007\u0002\u0000\u0000\u0347\u0348\u0007\u0004\u0000\u0000\u0348"+ + "s\u0001\u0000\u0000\u0000\u0349\u034a\u0005.\u0000\u0000\u034au\u0001"+ + "\u0000\u0000\u0000\u034b\u034c\u0007\u000f\u0000\u0000\u034c\u034d\u0007"+ + "\f\u0000\u0000\u034d\u034e\u0007\r\u0000\u0000\u034e\u034f\u0007\u0002"+ + "\u0000\u0000\u034f\u0350\u0007\u0003\u0000\u0000\u0350w\u0001\u0000\u0000"+ + "\u0000\u0351\u0352\u0007\u000f\u0000\u0000\u0352\u0353\u0007\u0001\u0000"+ + "\u0000\u0353\u0354\u0007\u0006\u0000\u0000\u0354\u0355\u0007\u0002\u0000"+ + "\u0000\u0355\u0356\u0007\u0005\u0000\u0000\u0356y\u0001\u0000\u0000\u0000"+ + "\u0357\u0358\u0007\u0001\u0000\u0000\u0358\u0359\u0007\t\u0000\u0000\u0359"+ + "{\u0001\u0000\u0000\u0000\u035a\u035b\u0007\u0001\u0000\u0000\u035b\u035c"+ + "\u0007\u0002\u0000\u0000\u035c}\u0001\u0000\u0000\u0000\u035d\u035e\u0007"+ + "\r\u0000\u0000\u035e\u035f\u0007\f\u0000\u0000\u035f\u0360\u0007\u0002"+ + "\u0000\u0000\u0360\u0361\u0007\u0005\u0000\u0000\u0361\u007f\u0001\u0000"+ + "\u0000\u0000\u0362\u0363\u0007\r\u0000\u0000\u0363\u0364\u0007\u0001\u0000"+ + "\u0000\u0364\u0365\u0007\u0012\u0000\u0000\u0365\u0366\u0007\u0003\u0000"+ + "\u0000\u0366\u0081\u0001\u0000\u0000\u0000\u0367\u0368\u0005(\u0000\u0000"+ + "\u0368\u0083\u0001\u0000\u0000\u0000\u0369\u036a\u0007\t\u0000\u0000\u036a"+ + "\u036b\u0007\u0007\u0000\u0000\u036b\u036c\u0007\u0005\u0000\u0000\u036c"+ + "\u0085\u0001\u0000\u0000\u0000\u036d\u036e\u0007\t\u0000\u0000\u036e\u036f"+ + "\u0007\u0014\u0000\u0000\u036f\u0370\u0007\r\u0000\u0000\u0370\u0371\u0007"+ + "\r\u0000\u0000\u0371\u0087\u0001\u0000\u0000\u0000\u0372\u0373\u0007\t"+ + "\u0000\u0000\u0373\u0374\u0007\u0014\u0000\u0000\u0374\u0375\u0007\r\u0000"+ + "\u0000\u0375\u0376\u0007\r\u0000\u0000\u0376\u0377\u0007\u0002\u0000\u0000"+ + "\u0377\u0089\u0001\u0000\u0000\u0000\u0378\u0379\u0007\u0007\u0000\u0000"+ + "\u0379\u037a\u0007\u0006\u0000\u0000\u037a\u008b\u0001\u0000\u0000\u0000"+ + "\u037b\u037c\u0005?\u0000\u0000\u037c\u008d\u0001\u0000\u0000\u0000\u037d"+ + "\u037e\u0007\u0006\u0000\u0000\u037e\u037f\u0007\r\u0000\u0000\u037f\u0380"+ + "\u0007\u0001\u0000\u0000\u0380\u0381\u0007\u0012\u0000\u0000\u0381\u0382"+ + "\u0007\u0003\u0000\u0000\u0382\u008f\u0001\u0000\u0000\u0000\u0383\u0384"+ + "\u0005)\u0000\u0000\u0384\u0091\u0001\u0000\u0000\u0000\u0385\u0386\u0007"+ + "\u0005\u0000\u0000\u0386\u0387\u0007\u0006\u0000\u0000\u0387\u0388\u0007"+ + "\u0014\u0000\u0000\u0388\u0389\u0007\u0003\u0000\u0000\u0389\u0093\u0001"+ + "\u0000\u0000\u0000\u038a\u038b\u0005=\u0000\u0000\u038b\u038c\u0005=\u0000"+ + "\u0000\u038c\u0095\u0001\u0000\u0000\u0000\u038d\u038e\u0005=\u0000\u0000"+ + "\u038e\u038f\u0005~\u0000\u0000\u038f\u0097\u0001\u0000\u0000\u0000\u0390"+ + "\u0391\u0005!\u0000\u0000\u0391\u0392\u0005=\u0000\u0000\u0392\u0099\u0001"+ + "\u0000\u0000\u0000\u0393\u0394\u0005<\u0000\u0000\u0394\u009b\u0001\u0000"+ + "\u0000\u0000\u0395\u0396\u0005<\u0000\u0000\u0396\u0397\u0005=\u0000\u0000"+ + "\u0397\u009d\u0001\u0000\u0000\u0000\u0398\u0399\u0005>\u0000\u0000\u0399"+ + "\u009f\u0001\u0000\u0000\u0000\u039a\u039b\u0005>\u0000\u0000\u039b\u039c"+ + "\u0005=\u0000\u0000\u039c\u00a1\u0001\u0000\u0000\u0000\u039d\u039e\u0005"+ + "+\u0000\u0000\u039e\u00a3\u0001\u0000\u0000\u0000\u039f\u03a0\u0005-\u0000"+ + "\u0000\u03a0\u00a5\u0001\u0000\u0000\u0000\u03a1\u03a2\u0005*\u0000\u0000"+ + "\u03a2\u00a7\u0001\u0000\u0000\u0000\u03a3\u03a4\u0005/\u0000\u0000\u03a4"+ + "\u00a9\u0001\u0000\u0000\u0000\u03a5\u03a6\u0005%\u0000\u0000\u03a6\u00ab"+ + "\u0001\u0000\u0000\u0000\u03a7\u03a8\u0003.\u000f\u0000\u03a8\u03a9\u0001"+ + "\u0000\u0000\u0000\u03a9\u03aa\u0006N\r\u0000\u03aa\u00ad\u0001\u0000"+ + "\u0000\u0000\u03ab\u03ae\u0003\u008c>\u0000\u03ac\u03af\u0003L\u001e\u0000"+ + "\u03ad\u03af\u0003Z%\u0000\u03ae\u03ac\u0001\u0000\u0000\u0000\u03ae\u03ad"+ + "\u0001\u0000\u0000\u0000\u03af\u03b3\u0001\u0000\u0000\u0000\u03b0\u03b2"+ + "\u0003\\&\u0000\u03b1\u03b0\u0001\u0000\u0000\u0000\u03b2\u03b5\u0001"+ + "\u0000\u0000\u0000\u03b3\u03b1\u0001\u0000\u0000\u0000\u03b3\u03b4\u0001"+ + "\u0000\u0000\u0000\u03b4\u03bd\u0001\u0000\u0000\u0000\u03b5\u03b3\u0001"+ + "\u0000\u0000\u0000\u03b6\u03b8\u0003\u008c>\u0000\u03b7\u03b9\u0003J\u001d"+ + "\u0000\u03b8\u03b7\u0001\u0000\u0000\u0000\u03b9\u03ba\u0001\u0000\u0000"+ + "\u0000\u03ba\u03b8\u0001\u0000\u0000\u0000\u03ba\u03bb\u0001\u0000\u0000"+ + "\u0000\u03bb\u03bd\u0001\u0000\u0000\u0000\u03bc\u03ab\u0001\u0000\u0000"+ + "\u0000\u03bc\u03b6\u0001\u0000\u0000\u0000\u03bd\u00af\u0001\u0000\u0000"+ + "\u0000\u03be\u03bf\u0005[\u0000\u0000\u03bf\u03c0\u0001\u0000\u0000\u0000"+ + "\u03c0\u03c1\u0006P\u0000\u0000\u03c1\u03c2\u0006P\u0000\u0000\u03c2\u00b1"+ + "\u0001\u0000\u0000\u0000\u03c3\u03c4\u0005]\u0000\u0000\u03c4\u03c5\u0001"+ + "\u0000\u0000\u0000\u03c5\u03c6\u0006Q\f\u0000\u03c6\u03c7\u0006Q\f\u0000"+ + "\u03c7\u00b3\u0001\u0000\u0000\u0000\u03c8\u03cc\u0003L\u001e\u0000\u03c9"+ + "\u03cb\u0003\\&\u0000\u03ca\u03c9\u0001\u0000\u0000\u0000\u03cb\u03ce"+ + "\u0001\u0000\u0000\u0000\u03cc\u03ca\u0001\u0000\u0000\u0000\u03cc\u03cd"+ + "\u0001\u0000\u0000\u0000\u03cd\u03d9\u0001\u0000\u0000\u0000\u03ce\u03cc"+ + "\u0001\u0000\u0000\u0000\u03cf\u03d2\u0003Z%\u0000\u03d0\u03d2\u0003T"+ + "\"\u0000\u03d1\u03cf\u0001\u0000\u0000\u0000\u03d1\u03d0\u0001\u0000\u0000"+ + "\u0000\u03d2\u03d4\u0001\u0000\u0000\u0000\u03d3\u03d5\u0003\\&\u0000"+ + "\u03d4\u03d3\u0001\u0000\u0000\u0000\u03d5\u03d6\u0001\u0000\u0000\u0000"+ + "\u03d6\u03d4\u0001\u0000\u0000\u0000\u03d6\u03d7\u0001\u0000\u0000\u0000"+ + "\u03d7\u03d9\u0001\u0000\u0000\u0000\u03d8\u03c8\u0001\u0000\u0000\u0000"+ + "\u03d8\u03d1\u0001\u0000\u0000\u0000\u03d9\u00b5\u0001\u0000\u0000\u0000"+ + "\u03da\u03dc\u0003V#\u0000\u03db\u03dd\u0003X$\u0000\u03dc\u03db\u0001"+ + "\u0000\u0000\u0000\u03dd\u03de\u0001\u0000\u0000\u0000\u03de\u03dc\u0001"+ + "\u0000\u0000\u0000\u03de\u03df\u0001\u0000\u0000\u0000\u03df\u03e0\u0001"+ + "\u0000\u0000\u0000\u03e0\u03e1\u0003V#\u0000\u03e1\u00b7\u0001\u0000\u0000"+ + "\u0000\u03e2\u03e3\u0003\u00b6S\u0000\u03e3\u00b9\u0001\u0000\u0000\u0000"+ + "\u03e4\u03e5\u0003B\u0019\u0000\u03e5\u03e6\u0001\u0000\u0000\u0000\u03e6"+ + "\u03e7\u0006U\u000b\u0000\u03e7\u00bb\u0001\u0000\u0000\u0000\u03e8\u03e9"+ + "\u0003D\u001a\u0000\u03e9\u03ea\u0001\u0000\u0000\u0000\u03ea\u03eb\u0006"+ + "V\u000b\u0000\u03eb\u00bd\u0001\u0000\u0000\u0000\u03ec\u03ed\u0003F\u001b"+ + "\u0000\u03ed\u03ee\u0001\u0000\u0000\u0000\u03ee\u03ef\u0006W\u000b\u0000"+ + "\u03ef\u00bf\u0001\u0000\u0000\u0000\u03f0\u03f1\u0003\u00b0P\u0000\u03f1"+ + "\u03f2\u0001\u0000\u0000\u0000\u03f2\u03f3\u0006X\u000e\u0000\u03f3\u03f4"+ + "\u0006X\u000f\u0000\u03f4\u00c1\u0001\u0000\u0000\u0000\u03f5\u03f6\u0003"+ + "H\u001c\u0000\u03f6\u03f7\u0001\u0000\u0000\u0000\u03f7\u03f8\u0006Y\u0010"+ + "\u0000\u03f8\u03f9\u0006Y\f\u0000\u03f9\u00c3\u0001\u0000\u0000\u0000"+ + "\u03fa\u03fb\u0003F\u001b\u0000\u03fb\u03fc\u0001\u0000\u0000\u0000\u03fc"+ + "\u03fd\u0006Z\u000b\u0000\u03fd\u00c5\u0001\u0000\u0000\u0000\u03fe\u03ff"+ + "\u0003B\u0019\u0000\u03ff\u0400\u0001\u0000\u0000\u0000\u0400\u0401\u0006"+ + "[\u000b\u0000\u0401\u00c7\u0001\u0000\u0000\u0000\u0402\u0403\u0003D\u001a"+ + "\u0000\u0403\u0404\u0001\u0000\u0000\u0000\u0404\u0405\u0006\\\u000b\u0000"+ + "\u0405\u00c9\u0001\u0000\u0000\u0000\u0406\u0407\u0003H\u001c\u0000\u0407"+ + "\u0408\u0001\u0000\u0000\u0000\u0408\u0409\u0006]\u0010\u0000\u0409\u040a"+ + "\u0006]\f\u0000\u040a\u00cb\u0001\u0000\u0000\u0000\u040b\u040c\u0003"+ + "\u00b0P\u0000\u040c\u040d\u0001\u0000\u0000\u0000\u040d\u040e\u0006^\u000e"+ + "\u0000\u040e\u00cd\u0001\u0000\u0000\u0000\u040f\u0410\u0003\u00b2Q\u0000"+ + "\u0410\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006_\u0011\u0000\u0412"+ + "\u00cf\u0001\u0000\u0000\u0000\u0413\u0414\u0003n/\u0000\u0414\u0415\u0001"+ + "\u0000\u0000\u0000\u0415\u0416\u0006`\u0012\u0000\u0416\u00d1\u0001\u0000"+ + "\u0000\u0000\u0417\u0418\u0003p0\u0000\u0418\u0419\u0001\u0000\u0000\u0000"+ + "\u0419\u041a\u0006a\u0013\u0000\u041a\u00d3\u0001\u0000\u0000\u0000\u041b"+ + "\u041c\u0003j-\u0000\u041c\u041d\u0001\u0000\u0000\u0000\u041d\u041e\u0006"+ + "b\u0014\u0000\u041e\u00d5\u0001\u0000\u0000\u0000\u041f\u0420\u0007\u0010"+ + "\u0000\u0000\u0420\u0421\u0007\u0003\u0000\u0000\u0421\u0422\u0007\u0005"+ + "\u0000\u0000\u0422\u0423\u0007\f\u0000\u0000\u0423\u0424\u0007\u0000\u0000"+ + "\u0000\u0424\u0425\u0007\f\u0000\u0000\u0425\u0426\u0007\u0005\u0000\u0000"+ + "\u0426\u0427\u0007\f\u0000\u0000\u0427\u00d7\u0001\u0000\u0000\u0000\u0428"+ + "\u042c\b!\u0000\u0000\u0429\u042a\u0005/\u0000\u0000\u042a\u042c\b\"\u0000"+ + "\u0000\u042b\u0428\u0001\u0000\u0000\u0000\u042b\u0429\u0001\u0000\u0000"+ + "\u0000\u042c\u00d9\u0001\u0000\u0000\u0000\u042d\u042f\u0003\u00d8d\u0000"+ + "\u042e\u042d\u0001\u0000\u0000\u0000\u042f\u0430\u0001\u0000\u0000\u0000"+ + "\u0430\u042e\u0001\u0000\u0000\u0000\u0430\u0431\u0001\u0000\u0000\u0000"+ + "\u0431\u00db\u0001\u0000\u0000\u0000\u0432\u0433\u0003\u00dae\u0000\u0433"+ + "\u0434\u0001\u0000\u0000\u0000\u0434\u0435\u0006f\u0015\u0000\u0435\u00dd"+ + "\u0001\u0000\u0000\u0000\u0436\u0437\u0003^\'\u0000\u0437\u0438\u0001"+ + "\u0000\u0000\u0000\u0438\u0439\u0006g\u0016\u0000\u0439\u00df\u0001\u0000"+ + "\u0000\u0000\u043a\u043b\u0003B\u0019\u0000\u043b\u043c\u0001\u0000\u0000"+ + "\u0000\u043c\u043d\u0006h\u000b\u0000\u043d\u00e1\u0001\u0000\u0000\u0000"+ + "\u043e\u043f\u0003D\u001a\u0000\u043f\u0440\u0001\u0000\u0000\u0000\u0440"+ + "\u0441\u0006i\u000b\u0000\u0441\u00e3\u0001\u0000\u0000\u0000\u0442\u0443"+ + "\u0003F\u001b\u0000\u0443\u0444\u0001\u0000\u0000\u0000\u0444\u0445\u0006"+ + "j\u000b\u0000\u0445\u00e5\u0001\u0000\u0000\u0000\u0446\u0447\u0003H\u001c"+ + "\u0000\u0447\u0448\u0001\u0000\u0000\u0000\u0448\u0449\u0006k\u0010\u0000"+ + "\u0449\u044a\u0006k\f\u0000\u044a\u00e7\u0001\u0000\u0000\u0000\u044b"+ + "\u044c\u0003t2\u0000\u044c\u044d\u0001\u0000\u0000\u0000\u044d\u044e\u0006"+ + "l\u0017\u0000\u044e\u00e9\u0001\u0000\u0000\u0000\u044f\u0450\u0003p0"+ + "\u0000\u0450\u0451\u0001\u0000\u0000\u0000\u0451\u0452\u0006m\u0013\u0000"+ + "\u0452\u00eb\u0001\u0000\u0000\u0000\u0453\u0454\u0004n\b\u0000\u0454"+ + "\u0455\u0003\u008c>\u0000\u0455\u0456\u0001\u0000\u0000\u0000\u0456\u0457"+ + "\u0006n\u0018\u0000\u0457\u00ed\u0001\u0000\u0000\u0000\u0458\u0459\u0004"+ + "o\t\u0000\u0459\u045a\u0003\u00aeO\u0000\u045a\u045b\u0001\u0000\u0000"+ + "\u0000\u045b\u045c\u0006o\u0019\u0000\u045c\u00ef\u0001\u0000\u0000\u0000"+ + "\u045d\u0462\u0003L\u001e\u0000\u045e\u0462\u0003J\u001d\u0000\u045f\u0462"+ + "\u0003Z%\u0000\u0460\u0462\u0003\u00a6K\u0000\u0461\u045d\u0001\u0000"+ + "\u0000\u0000\u0461\u045e\u0001\u0000\u0000\u0000\u0461\u045f\u0001\u0000"+ + "\u0000\u0000\u0461\u0460\u0001\u0000\u0000\u0000\u0462\u00f1\u0001\u0000"+ + "\u0000\u0000\u0463\u0466\u0003L\u001e\u0000\u0464\u0466\u0003\u00a6K\u0000"+ + "\u0465\u0463\u0001\u0000\u0000\u0000\u0465\u0464\u0001\u0000\u0000\u0000"+ + "\u0466\u046a\u0001\u0000\u0000\u0000\u0467\u0469\u0003\u00f0p\u0000\u0468"+ + "\u0467\u0001\u0000\u0000\u0000\u0469\u046c\u0001\u0000\u0000\u0000\u046a"+ + "\u0468\u0001\u0000\u0000\u0000\u046a\u046b\u0001\u0000\u0000\u0000\u046b"+ + "\u0477\u0001\u0000\u0000\u0000\u046c\u046a\u0001\u0000\u0000\u0000\u046d"+ + "\u0470\u0003Z%\u0000\u046e\u0470\u0003T\"\u0000\u046f\u046d\u0001\u0000"+ + "\u0000\u0000\u046f\u046e\u0001\u0000\u0000\u0000\u0470\u0472\u0001\u0000"+ + "\u0000\u0000\u0471\u0473\u0003\u00f0p\u0000\u0472\u0471\u0001\u0000\u0000"+ + "\u0000\u0473\u0474\u0001\u0000\u0000\u0000\u0474\u0472\u0001\u0000\u0000"+ + "\u0000\u0474\u0475\u0001\u0000\u0000\u0000\u0475\u0477\u0001\u0000\u0000"+ + "\u0000\u0476\u0465\u0001\u0000\u0000\u0000\u0476\u046f\u0001\u0000\u0000"+ + "\u0000\u0477\u00f3\u0001\u0000\u0000\u0000\u0478\u047b\u0003\u00f2q\u0000"+ + "\u0479\u047b\u0003\u00b6S\u0000\u047a\u0478\u0001\u0000\u0000\u0000\u047a"+ + "\u0479\u0001\u0000\u0000\u0000\u047b\u047c\u0001\u0000\u0000\u0000\u047c"+ + "\u047a\u0001\u0000\u0000\u0000\u047c\u047d\u0001\u0000\u0000\u0000\u047d"+ + "\u00f5\u0001\u0000\u0000\u0000\u047e\u047f\u0003B\u0019\u0000\u047f\u0480"+ + "\u0001\u0000\u0000\u0000\u0480\u0481\u0006s\u000b\u0000\u0481\u00f7\u0001"+ + "\u0000\u0000\u0000\u0482\u0483\u0003D\u001a\u0000\u0483\u0484\u0001\u0000"+ + "\u0000\u0000\u0484\u0485\u0006t\u000b\u0000\u0485\u00f9\u0001\u0000\u0000"+ + "\u0000\u0486\u0487\u0003F\u001b\u0000\u0487\u0488\u0001\u0000\u0000\u0000"+ + "\u0488\u0489\u0006u\u000b\u0000\u0489\u00fb\u0001\u0000\u0000\u0000\u048a"+ + "\u048b\u0003H\u001c\u0000\u048b\u048c\u0001\u0000\u0000\u0000\u048c\u048d"+ + "\u0006v\u0010\u0000\u048d\u048e\u0006v\f\u0000\u048e\u00fd\u0001\u0000"+ + "\u0000\u0000\u048f\u0490\u0003j-\u0000\u0490\u0491\u0001\u0000\u0000\u0000"+ + "\u0491\u0492\u0006w\u0014\u0000\u0492\u00ff\u0001\u0000\u0000\u0000\u0493"+ + "\u0494\u0003p0\u0000\u0494\u0495\u0001\u0000\u0000\u0000\u0495\u0496\u0006"+ + "x\u0013\u0000\u0496\u0101\u0001\u0000\u0000\u0000\u0497\u0498\u0003t2"+ + "\u0000\u0498\u0499\u0001\u0000\u0000\u0000\u0499\u049a\u0006y\u0017\u0000"+ + "\u049a\u0103\u0001\u0000\u0000\u0000\u049b\u049c\u0004z\n\u0000\u049c"+ + "\u049d\u0003\u008c>\u0000\u049d\u049e\u0001\u0000\u0000\u0000\u049e\u049f"+ + "\u0006z\u0018\u0000\u049f\u0105\u0001\u0000\u0000\u0000\u04a0\u04a1\u0004"+ + "{\u000b\u0000\u04a1\u04a2\u0003\u00aeO\u0000\u04a2\u04a3\u0001\u0000\u0000"+ + "\u0000\u04a3\u04a4\u0006{\u0019\u0000\u04a4\u0107\u0001\u0000\u0000\u0000"+ + "\u04a5\u04a6\u0007\f\u0000\u0000\u04a6\u04a7\u0007\u0002\u0000\u0000\u04a7"+ + "\u0109\u0001\u0000\u0000\u0000\u04a8\u04a9\u0003\u00f4r\u0000\u04a9\u04aa"+ + "\u0001\u0000\u0000\u0000\u04aa\u04ab\u0006}\u001a\u0000\u04ab\u010b\u0001"+ + "\u0000\u0000\u0000\u04ac\u04ad\u0003B\u0019\u0000\u04ad\u04ae\u0001\u0000"+ + "\u0000\u0000\u04ae\u04af\u0006~\u000b\u0000\u04af\u010d\u0001\u0000\u0000"+ + "\u0000\u04b0\u04b1\u0003D\u001a\u0000\u04b1\u04b2\u0001\u0000\u0000\u0000"+ + "\u04b2\u04b3\u0006\u007f\u000b\u0000\u04b3\u010f\u0001\u0000\u0000\u0000"+ + "\u04b4\u04b5\u0003F\u001b\u0000\u04b5\u04b6\u0001\u0000\u0000\u0000\u04b6"+ + "\u04b7\u0006\u0080\u000b\u0000\u04b7\u0111\u0001\u0000\u0000\u0000\u04b8"+ + "\u04b9\u0003H\u001c\u0000\u04b9\u04ba\u0001\u0000\u0000\u0000\u04ba\u04bb"+ + "\u0006\u0081\u0010\u0000\u04bb\u04bc\u0006\u0081\f\u0000\u04bc\u0113\u0001"+ + "\u0000\u0000\u0000\u04bd\u04be\u0003\u00b0P\u0000\u04be\u04bf\u0001\u0000"+ + "\u0000\u0000\u04bf\u04c0\u0006\u0082\u000e\u0000\u04c0\u04c1\u0006\u0082"+ + "\u001b\u0000\u04c1\u0115\u0001\u0000\u0000\u0000\u04c2\u04c3\u0007\u0007"+ + "\u0000\u0000\u04c3\u04c4\u0007\t\u0000\u0000\u04c4\u04c5\u0001\u0000\u0000"+ + "\u0000\u04c5\u04c6\u0006\u0083\u001c\u0000\u04c6\u0117\u0001\u0000\u0000"+ + "\u0000\u04c7\u04c8\u0007\u0013\u0000\u0000\u04c8\u04c9\u0007\u0001\u0000"+ + "\u0000\u04c9\u04ca\u0007\u0005\u0000\u0000\u04ca\u04cb\u0007\n\u0000\u0000"+ + "\u04cb\u04cc\u0001\u0000\u0000\u0000\u04cc\u04cd\u0006\u0084\u001c\u0000"+ + "\u04cd\u0119\u0001\u0000\u0000\u0000\u04ce\u04cf\b#\u0000\u0000\u04cf"+ + "\u011b\u0001\u0000\u0000\u0000\u04d0\u04d2\u0003\u011a\u0085\u0000\u04d1"+ + "\u04d0\u0001\u0000\u0000\u0000\u04d2\u04d3\u0001\u0000\u0000\u0000\u04d3"+ + "\u04d1\u0001\u0000\u0000\u0000\u04d3\u04d4\u0001\u0000\u0000\u0000\u04d4"+ + "\u04d5\u0001\u0000\u0000\u0000\u04d5\u04d6\u0003n/\u0000\u04d6\u04d8\u0001"+ + "\u0000\u0000\u0000\u04d7\u04d1\u0001\u0000\u0000\u0000\u04d7\u04d8\u0001"+ + "\u0000\u0000\u0000\u04d8\u04da\u0001\u0000\u0000\u0000\u04d9\u04db\u0003"+ + "\u011a\u0085\u0000\u04da\u04d9\u0001\u0000\u0000\u0000\u04db\u04dc\u0001"+ + "\u0000\u0000\u0000\u04dc\u04da\u0001\u0000\u0000\u0000\u04dc\u04dd\u0001"+ + "\u0000\u0000\u0000\u04dd\u011d\u0001\u0000\u0000\u0000\u04de\u04df\u0003"+ + "\u011c\u0086\u0000\u04df\u04e0\u0001\u0000\u0000\u0000\u04e0\u04e1\u0006"+ + "\u0087\u001d\u0000\u04e1\u011f\u0001\u0000\u0000\u0000\u04e2\u04e3\u0003"+ + "B\u0019\u0000\u04e3\u04e4\u0001\u0000\u0000\u0000\u04e4\u04e5\u0006\u0088"+ + "\u000b\u0000\u04e5\u0121\u0001\u0000\u0000\u0000\u04e6\u04e7\u0003D\u001a"+ + "\u0000\u04e7\u04e8\u0001\u0000\u0000\u0000\u04e8\u04e9\u0006\u0089\u000b"+ + "\u0000\u04e9\u0123\u0001\u0000\u0000\u0000\u04ea\u04eb\u0003F\u001b\u0000"+ + "\u04eb\u04ec\u0001\u0000\u0000\u0000\u04ec\u04ed\u0006\u008a\u000b\u0000"+ + "\u04ed\u0125\u0001\u0000\u0000\u0000\u04ee\u04ef\u0003H\u001c\u0000\u04ef"+ + "\u04f0\u0001\u0000\u0000\u0000\u04f0\u04f1\u0006\u008b\u0010\u0000\u04f1"+ + "\u04f2\u0006\u008b\f\u0000\u04f2\u04f3\u0006\u008b\f\u0000\u04f3\u0127"+ + "\u0001\u0000\u0000\u0000\u04f4\u04f5\u0003j-\u0000\u04f5\u04f6\u0001\u0000"+ + "\u0000\u0000\u04f6\u04f7\u0006\u008c\u0014\u0000\u04f7\u0129\u0001\u0000"+ + "\u0000\u0000\u04f8\u04f9\u0003p0\u0000\u04f9\u04fa\u0001\u0000\u0000\u0000"+ + "\u04fa\u04fb\u0006\u008d\u0013\u0000\u04fb\u012b\u0001\u0000\u0000\u0000"+ + "\u04fc\u04fd\u0003t2\u0000\u04fd\u04fe\u0001\u0000\u0000\u0000\u04fe\u04ff"+ + "\u0006\u008e\u0017\u0000\u04ff\u012d\u0001\u0000\u0000\u0000\u0500\u0501"+ + "\u0003\u0118\u0084\u0000\u0501\u0502\u0001\u0000\u0000\u0000\u0502\u0503"+ + "\u0006\u008f\u001e\u0000\u0503\u012f\u0001\u0000\u0000\u0000\u0504\u0505"+ + "\u0003\u00f4r\u0000\u0505\u0506\u0001\u0000\u0000\u0000\u0506\u0507\u0006"+ + "\u0090\u001a\u0000\u0507\u0131\u0001\u0000\u0000\u0000\u0508\u0509\u0003"+ + "\u00b8T\u0000\u0509\u050a\u0001\u0000\u0000\u0000\u050a\u050b\u0006\u0091"+ + "\u001f\u0000\u050b\u0133\u0001\u0000\u0000\u0000\u050c\u050d\u0004\u0092"+ + "\f\u0000\u050d\u050e\u0003\u008c>\u0000\u050e\u050f\u0001\u0000\u0000"+ + "\u0000\u050f\u0510\u0006\u0092\u0018\u0000\u0510\u0135\u0001\u0000\u0000"+ + "\u0000\u0511\u0512\u0004\u0093\r\u0000\u0512\u0513\u0003\u00aeO\u0000"+ + "\u0513\u0514\u0001\u0000\u0000\u0000\u0514\u0515\u0006\u0093\u0019\u0000"+ + "\u0515\u0137\u0001\u0000\u0000\u0000\u0516\u0517\u0003B\u0019\u0000\u0517"+ + "\u0518\u0001\u0000\u0000\u0000\u0518\u0519\u0006\u0094\u000b\u0000\u0519"+ + "\u0139\u0001\u0000\u0000\u0000\u051a\u051b\u0003D\u001a\u0000\u051b\u051c"+ + "\u0001\u0000\u0000\u0000\u051c\u051d\u0006\u0095\u000b\u0000\u051d\u013b"+ + "\u0001\u0000\u0000\u0000\u051e\u051f\u0003F\u001b\u0000\u051f\u0520\u0001"+ + "\u0000\u0000\u0000\u0520\u0521\u0006\u0096\u000b\u0000\u0521\u013d\u0001"+ + "\u0000\u0000\u0000\u0522\u0523\u0003H\u001c\u0000\u0523\u0524\u0001\u0000"+ + "\u0000\u0000\u0524\u0525\u0006\u0097\u0010\u0000\u0525\u0526\u0006\u0097"+ + "\f\u0000\u0526\u013f\u0001\u0000\u0000\u0000\u0527\u0528\u0003t2\u0000"+ + "\u0528\u0529\u0001\u0000\u0000\u0000\u0529\u052a\u0006\u0098\u0017\u0000"+ + "\u052a\u0141\u0001\u0000\u0000\u0000\u052b\u052c\u0004\u0099\u000e\u0000"+ + "\u052c\u052d\u0003\u008c>\u0000\u052d\u052e\u0001\u0000\u0000\u0000\u052e"+ + "\u052f\u0006\u0099\u0018\u0000\u052f\u0143\u0001\u0000\u0000\u0000\u0530"+ + "\u0531\u0004\u009a\u000f\u0000\u0531\u0532\u0003\u00aeO\u0000\u0532\u0533"+ + "\u0001\u0000\u0000\u0000\u0533\u0534\u0006\u009a\u0019\u0000\u0534\u0145"+ + "\u0001\u0000\u0000\u0000\u0535\u0536\u0003\u00b8T\u0000\u0536\u0537\u0001"+ + "\u0000\u0000\u0000\u0537\u0538\u0006\u009b\u001f\u0000\u0538\u0147\u0001"+ + "\u0000\u0000\u0000\u0539\u053a\u0003\u00b4R\u0000\u053a\u053b\u0001\u0000"+ + "\u0000\u0000\u053b\u053c\u0006\u009c \u0000\u053c\u0149\u0001\u0000\u0000"+ + "\u0000\u053d\u053e\u0003B\u0019\u0000\u053e\u053f\u0001\u0000\u0000\u0000"+ + "\u053f\u0540\u0006\u009d\u000b\u0000\u0540\u014b\u0001\u0000\u0000\u0000"+ + "\u0541\u0542\u0003D\u001a\u0000\u0542\u0543\u0001\u0000\u0000\u0000\u0543"+ + "\u0544\u0006\u009e\u000b\u0000\u0544\u014d\u0001\u0000\u0000\u0000\u0545"+ + "\u0546\u0003F\u001b\u0000\u0546\u0547\u0001\u0000\u0000\u0000\u0547\u0548"+ + "\u0006\u009f\u000b\u0000\u0548\u014f\u0001\u0000\u0000\u0000\u0549\u054a"+ + "\u0003H\u001c\u0000\u054a\u054b\u0001\u0000\u0000\u0000\u054b\u054c\u0006"+ + "\u00a0\u0010\u0000\u054c\u054d\u0006\u00a0\f\u0000\u054d\u0151\u0001\u0000"+ + "\u0000\u0000\u054e\u054f\u0007\u0001\u0000\u0000\u054f\u0550\u0007\t\u0000"+ + "\u0000\u0550\u0551\u0007\u000f\u0000\u0000\u0551\u0552\u0007\u0007\u0000"+ + "\u0000\u0552\u0153\u0001\u0000\u0000\u0000\u0553\u0554\u0003B\u0019\u0000"+ + "\u0554\u0555\u0001\u0000\u0000\u0000\u0555\u0556\u0006\u00a2\u000b\u0000"+ + "\u0556\u0155\u0001\u0000\u0000\u0000\u0557\u0558\u0003D\u001a\u0000\u0558"+ + "\u0559\u0001\u0000\u0000\u0000\u0559\u055a\u0006\u00a3\u000b\u0000\u055a"+ + "\u0157\u0001\u0000\u0000\u0000\u055b\u055c\u0003F\u001b\u0000\u055c\u055d"+ + "\u0001\u0000\u0000\u0000\u055d\u055e\u0006\u00a4\u000b\u0000\u055e\u0159"+ + "\u0001\u0000\u0000\u0000\u055f\u0560\u0003\u00b2Q\u0000\u0560\u0561\u0001"+ + "\u0000\u0000\u0000\u0561\u0562\u0006\u00a5\u0011\u0000\u0562\u0563\u0006"+ + "\u00a5\f\u0000\u0563\u015b\u0001\u0000\u0000\u0000\u0564\u0565\u0003n"+ + "/\u0000\u0565\u0566\u0001\u0000\u0000\u0000\u0566\u0567\u0006\u00a6\u0012"+ + "\u0000\u0567\u015d\u0001\u0000\u0000\u0000\u0568\u056e\u0003T\"\u0000"+ + "\u0569\u056e\u0003J\u001d\u0000\u056a\u056e\u0003t2\u0000\u056b\u056e"+ + "\u0003L\u001e\u0000\u056c\u056e\u0003Z%\u0000\u056d\u0568\u0001\u0000"+ + "\u0000\u0000\u056d\u0569\u0001\u0000\u0000\u0000\u056d\u056a\u0001\u0000"+ + "\u0000\u0000\u056d\u056b\u0001\u0000\u0000\u0000\u056d\u056c\u0001\u0000"+ + "\u0000\u0000\u056e\u056f\u0001\u0000\u0000\u0000\u056f\u056d\u0001\u0000"+ + "\u0000\u0000\u056f\u0570\u0001\u0000\u0000\u0000\u0570\u015f\u0001\u0000"+ + "\u0000\u0000\u0571\u0572\u0003B\u0019\u0000\u0572\u0573\u0001\u0000\u0000"+ + "\u0000\u0573\u0574\u0006\u00a8\u000b\u0000\u0574\u0161\u0001\u0000\u0000"+ + "\u0000\u0575\u0576\u0003D\u001a\u0000\u0576\u0577\u0001\u0000\u0000\u0000"+ + "\u0577\u0578\u0006\u00a9\u000b\u0000\u0578\u0163\u0001\u0000\u0000\u0000"+ + "\u0579\u057a\u0003F\u001b\u0000\u057a\u057b\u0001\u0000\u0000\u0000\u057b"+ + "\u057c\u0006\u00aa\u000b\u0000\u057c\u0165\u0001\u0000\u0000\u0000\u057d"+ + "\u057e\u0003H\u001c\u0000\u057e\u057f\u0001\u0000\u0000\u0000\u057f\u0580"+ + "\u0006\u00ab\u0010\u0000\u0580\u0581\u0006\u00ab\f\u0000\u0581\u0167\u0001"+ + "\u0000\u0000\u0000\u0582\u0583\u0003n/\u0000\u0583\u0584\u0001\u0000\u0000"+ + "\u0000\u0584\u0585\u0006\u00ac\u0012\u0000\u0585\u0169\u0001\u0000\u0000"+ + "\u0000\u0586\u0587\u0003p0\u0000\u0587\u0588\u0001\u0000\u0000\u0000\u0588"+ + "\u0589\u0006\u00ad\u0013\u0000\u0589\u016b\u0001\u0000\u0000\u0000\u058a"+ + "\u058b\u0003t2\u0000\u058b\u058c\u0001\u0000\u0000\u0000\u058c\u058d\u0006"+ + "\u00ae\u0017\u0000\u058d\u016d\u0001\u0000\u0000\u0000\u058e\u058f\u0003"+ + "\u0116\u0083\u0000\u058f\u0590\u0001\u0000\u0000\u0000\u0590\u0591\u0006"+ + "\u00af!\u0000\u0591\u0592\u0006\u00af\"\u0000\u0592\u016f\u0001\u0000"+ + "\u0000\u0000\u0593\u0594\u0003\u00dae\u0000\u0594\u0595\u0001\u0000\u0000"+ + "\u0000\u0595\u0596\u0006\u00b0\u0015\u0000\u0596\u0171\u0001\u0000\u0000"+ + "\u0000\u0597\u0598\u0003^\'\u0000\u0598\u0599\u0001\u0000\u0000\u0000"+ + "\u0599\u059a\u0006\u00b1\u0016\u0000\u059a\u0173\u0001\u0000\u0000\u0000"+ + "\u059b\u059c\u0003B\u0019\u0000\u059c\u059d\u0001\u0000\u0000\u0000\u059d"+ + "\u059e\u0006\u00b2\u000b\u0000\u059e\u0175\u0001\u0000\u0000\u0000\u059f"+ + "\u05a0\u0003D\u001a\u0000\u05a0\u05a1\u0001\u0000\u0000\u0000\u05a1\u05a2"+ + "\u0006\u00b3\u000b\u0000\u05a2\u0177\u0001\u0000\u0000\u0000\u05a3\u05a4"+ + "\u0003F\u001b\u0000\u05a4\u05a5\u0001\u0000\u0000\u0000\u05a5\u05a6\u0006"+ + "\u00b4\u000b\u0000\u05a6\u0179\u0001\u0000\u0000\u0000\u05a7\u05a8\u0003"+ + "H\u001c\u0000\u05a8\u05a9\u0001\u0000\u0000\u0000\u05a9\u05aa\u0006\u00b5"+ + "\u0010\u0000\u05aa\u05ab\u0006\u00b5\f\u0000\u05ab\u05ac\u0006\u00b5\f"+ + "\u0000\u05ac\u017b\u0001\u0000\u0000\u0000\u05ad\u05ae\u0003p0\u0000\u05ae"+ + "\u05af\u0001\u0000\u0000\u0000\u05af\u05b0\u0006\u00b6\u0013\u0000\u05b0"+ + "\u017d\u0001\u0000\u0000\u0000\u05b1\u05b2\u0003t2\u0000\u05b2\u05b3\u0001"+ + "\u0000\u0000\u0000\u05b3\u05b4\u0006\u00b7\u0017\u0000\u05b4\u017f\u0001"+ + "\u0000\u0000\u0000\u05b5\u05b6\u0003\u00f4r\u0000\u05b6\u05b7\u0001\u0000"+ + "\u0000\u0000\u05b7\u05b8\u0006\u00b8\u001a\u0000\u05b8\u0181\u0001\u0000"+ + "\u0000\u0000\u05b9\u05ba\u0003B\u0019\u0000\u05ba\u05bb\u0001\u0000\u0000"+ + "\u0000\u05bb\u05bc\u0006\u00b9\u000b\u0000\u05bc\u0183\u0001\u0000\u0000"+ + "\u0000\u05bd\u05be\u0003D\u001a\u0000\u05be\u05bf\u0001\u0000\u0000\u0000"+ + "\u05bf\u05c0\u0006\u00ba\u000b\u0000\u05c0\u0185\u0001\u0000\u0000\u0000"+ + "\u05c1\u05c2\u0003F\u001b\u0000\u05c2\u05c3\u0001\u0000\u0000\u0000\u05c3"+ + "\u05c4\u0006\u00bb\u000b\u0000\u05c4\u0187\u0001\u0000\u0000\u0000\u05c5"+ + "\u05c6\u0003H\u001c\u0000\u05c6\u05c7\u0001\u0000\u0000\u0000\u05c7\u05c8"+ + "\u0006\u00bc\u0010\u0000\u05c8\u05c9\u0006\u00bc\f\u0000\u05c9\u0189\u0001"+ + "\u0000\u0000\u0000\u05ca\u05cb\u00036\u0013\u0000\u05cb\u05cc\u0001\u0000"+ + "\u0000\u0000\u05cc\u05cd\u0006\u00bd#\u0000\u05cd\u018b\u0001\u0000\u0000"+ + "\u0000\u05ce\u05cf\u0003\u0108|\u0000\u05cf\u05d0\u0001\u0000\u0000\u0000"+ + "\u05d0\u05d1\u0006\u00be$\u0000\u05d1\u018d\u0001\u0000\u0000\u0000\u05d2"+ + "\u05d3\u0003\u0116\u0083\u0000\u05d3\u05d4\u0001\u0000\u0000\u0000\u05d4"+ + "\u05d5\u0006\u00bf!\u0000\u05d5\u05d6\u0006\u00bf\f\u0000\u05d6\u05d7"+ + "\u0006\u00bf\u0000\u0000\u05d7\u018f\u0001\u0000\u0000\u0000\u05d8\u05d9"+ + "\u0007\u0014\u0000\u0000\u05d9\u05da\u0007\u0002\u0000\u0000\u05da\u05db"+ + "\u0007\u0001\u0000\u0000\u05db\u05dc\u0007\t\u0000\u0000\u05dc\u05dd\u0007"+ + "\u0011\u0000\u0000\u05dd\u05de\u0001\u0000\u0000\u0000\u05de\u05df\u0006"+ + "\u00c0\f\u0000\u05df\u05e0\u0006\u00c0\u0000\u0000\u05e0\u0191\u0001\u0000"+ + "\u0000\u0000\u05e1\u05e2\u0003\u00b4R\u0000\u05e2\u05e3\u0001\u0000\u0000"+ + "\u0000\u05e3\u05e4\u0006\u00c1 \u0000\u05e4\u0193\u0001\u0000\u0000\u0000"+ + "\u05e5\u05e6\u0003\u00b8T\u0000\u05e6\u05e7\u0001\u0000\u0000\u0000\u05e7"+ + "\u05e8\u0006\u00c2\u001f\u0000\u05e8\u0195\u0001\u0000\u0000\u0000\u05e9"+ + "\u05ea\u0003B\u0019\u0000\u05ea\u05eb\u0001\u0000\u0000\u0000\u05eb\u05ec"+ + "\u0006\u00c3\u000b\u0000\u05ec\u0197\u0001\u0000\u0000\u0000\u05ed\u05ee"+ + "\u0003D\u001a\u0000\u05ee\u05ef\u0001\u0000\u0000\u0000\u05ef\u05f0\u0006"+ + "\u00c4\u000b\u0000\u05f0\u0199\u0001\u0000\u0000\u0000\u05f1\u05f2\u0003"+ + "F\u001b\u0000\u05f2\u05f3\u0001\u0000\u0000\u0000\u05f3\u05f4\u0006\u00c5"+ + "\u000b\u0000\u05f4\u019b\u0001\u0000\u0000\u0000\u05f5\u05f6\u0003H\u001c"+ + "\u0000\u05f6\u05f7\u0001\u0000\u0000\u0000\u05f7\u05f8\u0006\u00c6\u0010"+ + "\u0000\u05f8\u05f9\u0006\u00c6\f\u0000\u05f9\u019d\u0001\u0000\u0000\u0000"+ + "\u05fa\u05fb\u0003\u00dae\u0000\u05fb\u05fc\u0001\u0000\u0000\u0000\u05fc"+ + "\u05fd\u0006\u00c7\u0015\u0000\u05fd\u05fe\u0006\u00c7\f\u0000\u05fe\u05ff"+ + "\u0006\u00c7%\u0000\u05ff\u019f\u0001\u0000\u0000\u0000\u0600\u0601\u0003"+ + "^\'\u0000\u0601\u0602\u0001\u0000\u0000\u0000\u0602\u0603\u0006\u00c8"+ + "\u0016\u0000\u0603\u0604\u0006\u00c8\f\u0000\u0604\u0605\u0006\u00c8%"+ + "\u0000\u0605\u01a1\u0001\u0000\u0000\u0000\u0606\u0607\u0003B\u0019\u0000"+ + "\u0607\u0608\u0001\u0000\u0000\u0000\u0608\u0609\u0006\u00c9\u000b\u0000"+ + "\u0609\u01a3\u0001\u0000\u0000\u0000\u060a\u060b\u0003D\u001a\u0000\u060b"+ + "\u060c\u0001\u0000\u0000\u0000\u060c\u060d\u0006\u00ca\u000b\u0000\u060d"+ + "\u01a5\u0001\u0000\u0000\u0000\u060e\u060f\u0003F\u001b\u0000\u060f\u0610"+ + "\u0001\u0000\u0000\u0000\u0610\u0611\u0006\u00cb\u000b\u0000\u0611\u01a7"+ + "\u0001\u0000\u0000\u0000\u0612\u0613\u0003n/\u0000\u0613\u0614\u0001\u0000"+ + "\u0000\u0000\u0614\u0615\u0006\u00cc\u0012\u0000\u0615\u0616\u0006\u00cc"+ + "\f\u0000\u0616\u0617\u0006\u00cc\t\u0000\u0617\u01a9\u0001\u0000\u0000"+ + "\u0000\u0618\u0619\u0003p0\u0000\u0619\u061a\u0001\u0000\u0000\u0000\u061a"+ + "\u061b\u0006\u00cd\u0013\u0000\u061b\u061c\u0006\u00cd\f\u0000\u061c\u061d"+ + "\u0006\u00cd\t\u0000\u061d\u01ab\u0001\u0000\u0000\u0000\u061e\u061f\u0003"+ + "B\u0019\u0000\u061f\u0620\u0001\u0000\u0000\u0000\u0620\u0621\u0006\u00ce"+ + "\u000b\u0000\u0621\u01ad\u0001\u0000\u0000\u0000\u0622\u0623\u0003D\u001a"+ + "\u0000\u0623\u0624\u0001\u0000\u0000\u0000\u0624\u0625\u0006\u00cf\u000b"+ + "\u0000\u0625\u01af\u0001\u0000\u0000\u0000\u0626\u0627\u0003F\u001b\u0000"+ + "\u0627\u0628\u0001\u0000\u0000\u0000\u0628\u0629\u0006\u00d0\u000b\u0000"+ + "\u0629\u01b1\u0001\u0000\u0000\u0000\u062a\u062b\u0003\u00b8T\u0000\u062b"+ + "\u062c\u0001\u0000\u0000\u0000\u062c\u062d\u0006\u00d1\f\u0000\u062d\u062e"+ + "\u0006\u00d1\u0000\u0000\u062e\u062f\u0006\u00d1\u001f\u0000\u062f\u01b3"+ + "\u0001\u0000\u0000\u0000\u0630\u0631\u0003\u00b4R\u0000\u0631\u0632\u0001"+ + "\u0000\u0000\u0000\u0632\u0633\u0006\u00d2\f\u0000\u0633\u0634\u0006\u00d2"+ + "\u0000\u0000\u0634\u0635\u0006\u00d2 \u0000\u0635\u01b5\u0001\u0000\u0000"+ + "\u0000\u0636\u0637\u0003d*\u0000\u0637\u0638\u0001\u0000\u0000\u0000\u0638"+ + "\u0639\u0006\u00d3\f\u0000\u0639\u063a\u0006\u00d3\u0000\u0000\u063a\u063b"+ + "\u0006\u00d3&\u0000\u063b\u01b7\u0001\u0000\u0000\u0000\u063c\u063d\u0003"+ + "H\u001c\u0000\u063d\u063e\u0001\u0000\u0000\u0000\u063e\u063f\u0006\u00d4"+ + "\u0010\u0000\u063f\u0640\u0006\u00d4\f\u0000\u0640\u01b9\u0001\u0000\u0000"+ + "\u0000B\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f"+ + "\r\u000e\u000f\u028f\u0299\u029d\u02a0\u02a9\u02ab\u02b6\u02c9\u02ce\u02d7"+ + "\u02de\u02e3\u02e5\u02f0\u02f8\u02fb\u02fd\u0302\u0307\u030d\u0314\u0319"+ + "\u031f\u0322\u032a\u032e\u03ae\u03b3\u03ba\u03bc\u03cc\u03d1\u03d6\u03d8"+ + "\u03de\u042b\u0430\u0461\u0465\u046a\u046f\u0474\u0476\u047a\u047c\u04d3"+ + "\u04d7\u04dc\u056d\u056f\'\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006"+ + "\u0000\u0005\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005\u0000"+ + "\u0005\t\u0000\u0005\u000b\u0000\u0005\u000e\u0000\u0005\r\u0000\u0000"+ + "\u0001\u0000\u0004\u0000\u0000\u0007\u0010\u0000\u0007F\u0000\u0005\u0000"+ + "\u0000\u0007\u001d\u0000\u0007G\u0000\u0007&\u0000\u0007\'\u0000\u0007"+ + "$\u0000\u0007Q\u0000\u0007\u001e\u0000\u0007)\u0000\u00075\u0000\u0007"+ + "E\u0000\u0007U\u0000\u0005\n\u0000\u0005\u0007\u0000\u0007_\u0000\u0007"+ + "^\u0000\u0007I\u0000\u0007H\u0000\u0007]\u0000\u0005\f\u0000\u0007\u0014"+ + "\u0000\u0007Y\u0000\u0005\u000f\u0000\u0007!\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index a2b339f378f12..50493f584fe4c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -23,7 +23,11 @@ null null null null -':' +null +null +null +null +null '|' null null @@ -33,6 +37,7 @@ null 'asc' '=' '::' +':' ',' 'desc' '.' @@ -113,6 +118,10 @@ null null null null +'USING' +null +null +null null null null @@ -141,11 +150,15 @@ WHERE DEV_INLINESTATS DEV_LOOKUP DEV_METRICS +DEV_JOIN +DEV_JOIN_FULL +DEV_JOIN_LEFT +DEV_JOIN_RIGHT +DEV_JOIN_LOOKUP UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS -COLON PIPE QUOTED_STRING INTEGER_LITERAL @@ -155,6 +168,7 @@ AND ASC ASSIGN CAST_OP +COLON COMMA DESC DOT @@ -235,6 +249,10 @@ LOOKUP_WS LOOKUP_FIELD_LINE_COMMENT LOOKUP_FIELD_MULTILINE_COMMENT LOOKUP_FIELD_WS +USING +JOIN_LINE_COMMENT +JOIN_MULTILINE_COMMENT +JOIN_WS METRICS_LINE_COMMENT METRICS_MULTILINE_COMMENT METRICS_WS @@ -305,7 +323,11 @@ enrichCommand enrichWithClause lookupCommand inlinestatsCommand +joinCommand +joinTarget +joinCondition +joinPredicate atn: -[4, 1, 119, 603, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 134, 8, 1, 10, 1, 12, 1, 137, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 145, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 163, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 175, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 182, 8, 5, 10, 5, 12, 5, 185, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 192, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 198, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 206, 8, 5, 10, 5, 12, 5, 209, 9, 5, 1, 6, 1, 6, 3, 6, 213, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 220, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 225, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 236, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 242, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 250, 8, 9, 10, 9, 12, 9, 253, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 263, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 268, 8, 10, 10, 10, 12, 10, 271, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 279, 8, 11, 10, 11, 12, 11, 282, 9, 11, 3, 11, 284, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 298, 8, 15, 10, 15, 12, 15, 301, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 306, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 314, 8, 17, 10, 17, 12, 17, 317, 9, 17, 1, 17, 3, 17, 320, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 325, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 335, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 341, 8, 22, 10, 22, 12, 22, 344, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 354, 8, 24, 10, 24, 12, 24, 357, 9, 24, 1, 24, 3, 24, 360, 8, 24, 1, 24, 1, 24, 3, 24, 364, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 371, 8, 26, 1, 26, 1, 26, 3, 26, 375, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 380, 8, 27, 10, 27, 12, 27, 383, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 388, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 393, 8, 29, 10, 29, 12, 29, 396, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 401, 8, 30, 10, 30, 12, 30, 404, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 409, 8, 31, 10, 31, 12, 31, 412, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 419, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 434, 8, 34, 10, 34, 12, 34, 437, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 445, 8, 34, 10, 34, 12, 34, 448, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 456, 8, 34, 10, 34, 12, 34, 459, 9, 34, 1, 34, 1, 34, 3, 34, 463, 8, 34, 1, 35, 1, 35, 3, 35, 467, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 472, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 481, 8, 38, 10, 38, 12, 38, 484, 9, 38, 1, 39, 1, 39, 3, 39, 488, 8, 39, 1, 39, 1, 39, 3, 39, 492, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 504, 8, 42, 10, 42, 12, 42, 507, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 517, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 529, 8, 47, 10, 47, 12, 47, 532, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 542, 8, 50, 1, 51, 3, 51, 545, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 550, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 572, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 578, 8, 58, 10, 58, 12, 58, 581, 9, 58, 3, 58, 583, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 588, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 601, 8, 61, 1, 61, 0, 4, 2, 10, 18, 20, 62, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 0, 8, 1, 0, 59, 60, 1, 0, 61, 63, 2, 0, 26, 26, 76, 76, 1, 0, 67, 68, 2, 0, 31, 31, 35, 35, 2, 0, 38, 38, 41, 41, 2, 0, 37, 37, 51, 51, 2, 0, 52, 52, 54, 58, 628, 0, 124, 1, 0, 0, 0, 2, 127, 1, 0, 0, 0, 4, 144, 1, 0, 0, 0, 6, 162, 1, 0, 0, 0, 8, 164, 1, 0, 0, 0, 10, 197, 1, 0, 0, 0, 12, 224, 1, 0, 0, 0, 14, 226, 1, 0, 0, 0, 16, 235, 1, 0, 0, 0, 18, 241, 1, 0, 0, 0, 20, 262, 1, 0, 0, 0, 22, 272, 1, 0, 0, 0, 24, 287, 1, 0, 0, 0, 26, 289, 1, 0, 0, 0, 28, 291, 1, 0, 0, 0, 30, 294, 1, 0, 0, 0, 32, 305, 1, 0, 0, 0, 34, 309, 1, 0, 0, 0, 36, 324, 1, 0, 0, 0, 38, 328, 1, 0, 0, 0, 40, 330, 1, 0, 0, 0, 42, 334, 1, 0, 0, 0, 44, 336, 1, 0, 0, 0, 46, 345, 1, 0, 0, 0, 48, 349, 1, 0, 0, 0, 50, 365, 1, 0, 0, 0, 52, 368, 1, 0, 0, 0, 54, 376, 1, 0, 0, 0, 56, 384, 1, 0, 0, 0, 58, 389, 1, 0, 0, 0, 60, 397, 1, 0, 0, 0, 62, 405, 1, 0, 0, 0, 64, 413, 1, 0, 0, 0, 66, 418, 1, 0, 0, 0, 68, 462, 1, 0, 0, 0, 70, 466, 1, 0, 0, 0, 72, 471, 1, 0, 0, 0, 74, 473, 1, 0, 0, 0, 76, 476, 1, 0, 0, 0, 78, 485, 1, 0, 0, 0, 80, 493, 1, 0, 0, 0, 82, 496, 1, 0, 0, 0, 84, 499, 1, 0, 0, 0, 86, 508, 1, 0, 0, 0, 88, 512, 1, 0, 0, 0, 90, 518, 1, 0, 0, 0, 92, 522, 1, 0, 0, 0, 94, 525, 1, 0, 0, 0, 96, 533, 1, 0, 0, 0, 98, 537, 1, 0, 0, 0, 100, 541, 1, 0, 0, 0, 102, 544, 1, 0, 0, 0, 104, 549, 1, 0, 0, 0, 106, 553, 1, 0, 0, 0, 108, 555, 1, 0, 0, 0, 110, 557, 1, 0, 0, 0, 112, 560, 1, 0, 0, 0, 114, 564, 1, 0, 0, 0, 116, 567, 1, 0, 0, 0, 118, 587, 1, 0, 0, 0, 120, 591, 1, 0, 0, 0, 122, 596, 1, 0, 0, 0, 124, 125, 3, 2, 1, 0, 125, 126, 5, 0, 0, 1, 126, 1, 1, 0, 0, 0, 127, 128, 6, 1, -1, 0, 128, 129, 3, 4, 2, 0, 129, 135, 1, 0, 0, 0, 130, 131, 10, 1, 0, 0, 131, 132, 5, 25, 0, 0, 132, 134, 3, 6, 3, 0, 133, 130, 1, 0, 0, 0, 134, 137, 1, 0, 0, 0, 135, 133, 1, 0, 0, 0, 135, 136, 1, 0, 0, 0, 136, 3, 1, 0, 0, 0, 137, 135, 1, 0, 0, 0, 138, 145, 3, 110, 55, 0, 139, 145, 3, 34, 17, 0, 140, 145, 3, 28, 14, 0, 141, 145, 3, 114, 57, 0, 142, 143, 4, 2, 1, 0, 143, 145, 3, 48, 24, 0, 144, 138, 1, 0, 0, 0, 144, 139, 1, 0, 0, 0, 144, 140, 1, 0, 0, 0, 144, 141, 1, 0, 0, 0, 144, 142, 1, 0, 0, 0, 145, 5, 1, 0, 0, 0, 146, 163, 3, 50, 25, 0, 147, 163, 3, 8, 4, 0, 148, 163, 3, 80, 40, 0, 149, 163, 3, 74, 37, 0, 150, 163, 3, 52, 26, 0, 151, 163, 3, 76, 38, 0, 152, 163, 3, 82, 41, 0, 153, 163, 3, 84, 42, 0, 154, 163, 3, 88, 44, 0, 155, 163, 3, 90, 45, 0, 156, 163, 3, 116, 58, 0, 157, 163, 3, 92, 46, 0, 158, 159, 4, 3, 2, 0, 159, 163, 3, 122, 61, 0, 160, 161, 4, 3, 3, 0, 161, 163, 3, 120, 60, 0, 162, 146, 1, 0, 0, 0, 162, 147, 1, 0, 0, 0, 162, 148, 1, 0, 0, 0, 162, 149, 1, 0, 0, 0, 162, 150, 1, 0, 0, 0, 162, 151, 1, 0, 0, 0, 162, 152, 1, 0, 0, 0, 162, 153, 1, 0, 0, 0, 162, 154, 1, 0, 0, 0, 162, 155, 1, 0, 0, 0, 162, 156, 1, 0, 0, 0, 162, 157, 1, 0, 0, 0, 162, 158, 1, 0, 0, 0, 162, 160, 1, 0, 0, 0, 163, 7, 1, 0, 0, 0, 164, 165, 5, 16, 0, 0, 165, 166, 3, 10, 5, 0, 166, 9, 1, 0, 0, 0, 167, 168, 6, 5, -1, 0, 168, 169, 5, 44, 0, 0, 169, 198, 3, 10, 5, 8, 170, 198, 3, 16, 8, 0, 171, 198, 3, 12, 6, 0, 172, 174, 3, 16, 8, 0, 173, 175, 5, 44, 0, 0, 174, 173, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 177, 5, 39, 0, 0, 177, 178, 5, 43, 0, 0, 178, 183, 3, 16, 8, 0, 179, 180, 5, 34, 0, 0, 180, 182, 3, 16, 8, 0, 181, 179, 1, 0, 0, 0, 182, 185, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 186, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 186, 187, 5, 50, 0, 0, 187, 198, 1, 0, 0, 0, 188, 189, 3, 16, 8, 0, 189, 191, 5, 40, 0, 0, 190, 192, 5, 44, 0, 0, 191, 190, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 1, 0, 0, 0, 193, 194, 5, 45, 0, 0, 194, 198, 1, 0, 0, 0, 195, 196, 4, 5, 4, 0, 196, 198, 3, 14, 7, 0, 197, 167, 1, 0, 0, 0, 197, 170, 1, 0, 0, 0, 197, 171, 1, 0, 0, 0, 197, 172, 1, 0, 0, 0, 197, 188, 1, 0, 0, 0, 197, 195, 1, 0, 0, 0, 198, 207, 1, 0, 0, 0, 199, 200, 10, 5, 0, 0, 200, 201, 5, 30, 0, 0, 201, 206, 3, 10, 5, 6, 202, 203, 10, 4, 0, 0, 203, 204, 5, 47, 0, 0, 204, 206, 3, 10, 5, 5, 205, 199, 1, 0, 0, 0, 205, 202, 1, 0, 0, 0, 206, 209, 1, 0, 0, 0, 207, 205, 1, 0, 0, 0, 207, 208, 1, 0, 0, 0, 208, 11, 1, 0, 0, 0, 209, 207, 1, 0, 0, 0, 210, 212, 3, 16, 8, 0, 211, 213, 5, 44, 0, 0, 212, 211, 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 214, 1, 0, 0, 0, 214, 215, 5, 42, 0, 0, 215, 216, 3, 106, 53, 0, 216, 225, 1, 0, 0, 0, 217, 219, 3, 16, 8, 0, 218, 220, 5, 44, 0, 0, 219, 218, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 221, 1, 0, 0, 0, 221, 222, 5, 49, 0, 0, 222, 223, 3, 106, 53, 0, 223, 225, 1, 0, 0, 0, 224, 210, 1, 0, 0, 0, 224, 217, 1, 0, 0, 0, 225, 13, 1, 0, 0, 0, 226, 227, 3, 58, 29, 0, 227, 228, 5, 24, 0, 0, 228, 229, 3, 68, 34, 0, 229, 15, 1, 0, 0, 0, 230, 236, 3, 18, 9, 0, 231, 232, 3, 18, 9, 0, 232, 233, 3, 108, 54, 0, 233, 234, 3, 18, 9, 0, 234, 236, 1, 0, 0, 0, 235, 230, 1, 0, 0, 0, 235, 231, 1, 0, 0, 0, 236, 17, 1, 0, 0, 0, 237, 238, 6, 9, -1, 0, 238, 242, 3, 20, 10, 0, 239, 240, 7, 0, 0, 0, 240, 242, 3, 18, 9, 3, 241, 237, 1, 0, 0, 0, 241, 239, 1, 0, 0, 0, 242, 251, 1, 0, 0, 0, 243, 244, 10, 2, 0, 0, 244, 245, 7, 1, 0, 0, 245, 250, 3, 18, 9, 3, 246, 247, 10, 1, 0, 0, 247, 248, 7, 0, 0, 0, 248, 250, 3, 18, 9, 2, 249, 243, 1, 0, 0, 0, 249, 246, 1, 0, 0, 0, 250, 253, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 251, 252, 1, 0, 0, 0, 252, 19, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 254, 255, 6, 10, -1, 0, 255, 263, 3, 68, 34, 0, 256, 263, 3, 58, 29, 0, 257, 263, 3, 22, 11, 0, 258, 259, 5, 43, 0, 0, 259, 260, 3, 10, 5, 0, 260, 261, 5, 50, 0, 0, 261, 263, 1, 0, 0, 0, 262, 254, 1, 0, 0, 0, 262, 256, 1, 0, 0, 0, 262, 257, 1, 0, 0, 0, 262, 258, 1, 0, 0, 0, 263, 269, 1, 0, 0, 0, 264, 265, 10, 1, 0, 0, 265, 266, 5, 33, 0, 0, 266, 268, 3, 26, 13, 0, 267, 264, 1, 0, 0, 0, 268, 271, 1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 21, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 272, 273, 3, 24, 12, 0, 273, 283, 5, 43, 0, 0, 274, 284, 5, 61, 0, 0, 275, 280, 3, 10, 5, 0, 276, 277, 5, 34, 0, 0, 277, 279, 3, 10, 5, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 274, 1, 0, 0, 0, 283, 275, 1, 0, 0, 0, 283, 284, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 286, 5, 50, 0, 0, 286, 23, 1, 0, 0, 0, 287, 288, 3, 72, 36, 0, 288, 25, 1, 0, 0, 0, 289, 290, 3, 64, 32, 0, 290, 27, 1, 0, 0, 0, 291, 292, 5, 12, 0, 0, 292, 293, 3, 30, 15, 0, 293, 29, 1, 0, 0, 0, 294, 299, 3, 32, 16, 0, 295, 296, 5, 34, 0, 0, 296, 298, 3, 32, 16, 0, 297, 295, 1, 0, 0, 0, 298, 301, 1, 0, 0, 0, 299, 297, 1, 0, 0, 0, 299, 300, 1, 0, 0, 0, 300, 31, 1, 0, 0, 0, 301, 299, 1, 0, 0, 0, 302, 303, 3, 58, 29, 0, 303, 304, 5, 32, 0, 0, 304, 306, 1, 0, 0, 0, 305, 302, 1, 0, 0, 0, 305, 306, 1, 0, 0, 0, 306, 307, 1, 0, 0, 0, 307, 308, 3, 10, 5, 0, 308, 33, 1, 0, 0, 0, 309, 310, 5, 6, 0, 0, 310, 315, 3, 36, 18, 0, 311, 312, 5, 34, 0, 0, 312, 314, 3, 36, 18, 0, 313, 311, 1, 0, 0, 0, 314, 317, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 318, 320, 3, 42, 21, 0, 319, 318, 1, 0, 0, 0, 319, 320, 1, 0, 0, 0, 320, 35, 1, 0, 0, 0, 321, 322, 3, 38, 19, 0, 322, 323, 5, 24, 0, 0, 323, 325, 1, 0, 0, 0, 324, 321, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 326, 1, 0, 0, 0, 326, 327, 3, 40, 20, 0, 327, 37, 1, 0, 0, 0, 328, 329, 5, 76, 0, 0, 329, 39, 1, 0, 0, 0, 330, 331, 7, 2, 0, 0, 331, 41, 1, 0, 0, 0, 332, 335, 3, 44, 22, 0, 333, 335, 3, 46, 23, 0, 334, 332, 1, 0, 0, 0, 334, 333, 1, 0, 0, 0, 335, 43, 1, 0, 0, 0, 336, 337, 5, 75, 0, 0, 337, 342, 5, 76, 0, 0, 338, 339, 5, 34, 0, 0, 339, 341, 5, 76, 0, 0, 340, 338, 1, 0, 0, 0, 341, 344, 1, 0, 0, 0, 342, 340, 1, 0, 0, 0, 342, 343, 1, 0, 0, 0, 343, 45, 1, 0, 0, 0, 344, 342, 1, 0, 0, 0, 345, 346, 5, 65, 0, 0, 346, 347, 3, 44, 22, 0, 347, 348, 5, 66, 0, 0, 348, 47, 1, 0, 0, 0, 349, 350, 5, 19, 0, 0, 350, 355, 3, 36, 18, 0, 351, 352, 5, 34, 0, 0, 352, 354, 3, 36, 18, 0, 353, 351, 1, 0, 0, 0, 354, 357, 1, 0, 0, 0, 355, 353, 1, 0, 0, 0, 355, 356, 1, 0, 0, 0, 356, 359, 1, 0, 0, 0, 357, 355, 1, 0, 0, 0, 358, 360, 3, 54, 27, 0, 359, 358, 1, 0, 0, 0, 359, 360, 1, 0, 0, 0, 360, 363, 1, 0, 0, 0, 361, 362, 5, 29, 0, 0, 362, 364, 3, 30, 15, 0, 363, 361, 1, 0, 0, 0, 363, 364, 1, 0, 0, 0, 364, 49, 1, 0, 0, 0, 365, 366, 5, 4, 0, 0, 366, 367, 3, 30, 15, 0, 367, 51, 1, 0, 0, 0, 368, 370, 5, 15, 0, 0, 369, 371, 3, 54, 27, 0, 370, 369, 1, 0, 0, 0, 370, 371, 1, 0, 0, 0, 371, 374, 1, 0, 0, 0, 372, 373, 5, 29, 0, 0, 373, 375, 3, 30, 15, 0, 374, 372, 1, 0, 0, 0, 374, 375, 1, 0, 0, 0, 375, 53, 1, 0, 0, 0, 376, 381, 3, 56, 28, 0, 377, 378, 5, 34, 0, 0, 378, 380, 3, 56, 28, 0, 379, 377, 1, 0, 0, 0, 380, 383, 1, 0, 0, 0, 381, 379, 1, 0, 0, 0, 381, 382, 1, 0, 0, 0, 382, 55, 1, 0, 0, 0, 383, 381, 1, 0, 0, 0, 384, 387, 3, 32, 16, 0, 385, 386, 5, 16, 0, 0, 386, 388, 3, 10, 5, 0, 387, 385, 1, 0, 0, 0, 387, 388, 1, 0, 0, 0, 388, 57, 1, 0, 0, 0, 389, 394, 3, 72, 36, 0, 390, 391, 5, 36, 0, 0, 391, 393, 3, 72, 36, 0, 392, 390, 1, 0, 0, 0, 393, 396, 1, 0, 0, 0, 394, 392, 1, 0, 0, 0, 394, 395, 1, 0, 0, 0, 395, 59, 1, 0, 0, 0, 396, 394, 1, 0, 0, 0, 397, 402, 3, 66, 33, 0, 398, 399, 5, 36, 0, 0, 399, 401, 3, 66, 33, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 61, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 410, 3, 60, 30, 0, 406, 407, 5, 34, 0, 0, 407, 409, 3, 60, 30, 0, 408, 406, 1, 0, 0, 0, 409, 412, 1, 0, 0, 0, 410, 408, 1, 0, 0, 0, 410, 411, 1, 0, 0, 0, 411, 63, 1, 0, 0, 0, 412, 410, 1, 0, 0, 0, 413, 414, 7, 3, 0, 0, 414, 65, 1, 0, 0, 0, 415, 419, 5, 80, 0, 0, 416, 417, 4, 33, 10, 0, 417, 419, 3, 70, 35, 0, 418, 415, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 419, 67, 1, 0, 0, 0, 420, 463, 5, 45, 0, 0, 421, 422, 3, 104, 52, 0, 422, 423, 5, 67, 0, 0, 423, 463, 1, 0, 0, 0, 424, 463, 3, 102, 51, 0, 425, 463, 3, 104, 52, 0, 426, 463, 3, 98, 49, 0, 427, 463, 3, 70, 35, 0, 428, 463, 3, 106, 53, 0, 429, 430, 5, 65, 0, 0, 430, 435, 3, 100, 50, 0, 431, 432, 5, 34, 0, 0, 432, 434, 3, 100, 50, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 438, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 5, 66, 0, 0, 439, 463, 1, 0, 0, 0, 440, 441, 5, 65, 0, 0, 441, 446, 3, 98, 49, 0, 442, 443, 5, 34, 0, 0, 443, 445, 3, 98, 49, 0, 444, 442, 1, 0, 0, 0, 445, 448, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 446, 447, 1, 0, 0, 0, 447, 449, 1, 0, 0, 0, 448, 446, 1, 0, 0, 0, 449, 450, 5, 66, 0, 0, 450, 463, 1, 0, 0, 0, 451, 452, 5, 65, 0, 0, 452, 457, 3, 106, 53, 0, 453, 454, 5, 34, 0, 0, 454, 456, 3, 106, 53, 0, 455, 453, 1, 0, 0, 0, 456, 459, 1, 0, 0, 0, 457, 455, 1, 0, 0, 0, 457, 458, 1, 0, 0, 0, 458, 460, 1, 0, 0, 0, 459, 457, 1, 0, 0, 0, 460, 461, 5, 66, 0, 0, 461, 463, 1, 0, 0, 0, 462, 420, 1, 0, 0, 0, 462, 421, 1, 0, 0, 0, 462, 424, 1, 0, 0, 0, 462, 425, 1, 0, 0, 0, 462, 426, 1, 0, 0, 0, 462, 427, 1, 0, 0, 0, 462, 428, 1, 0, 0, 0, 462, 429, 1, 0, 0, 0, 462, 440, 1, 0, 0, 0, 462, 451, 1, 0, 0, 0, 463, 69, 1, 0, 0, 0, 464, 467, 5, 48, 0, 0, 465, 467, 5, 64, 0, 0, 466, 464, 1, 0, 0, 0, 466, 465, 1, 0, 0, 0, 467, 71, 1, 0, 0, 0, 468, 472, 3, 64, 32, 0, 469, 470, 4, 36, 11, 0, 470, 472, 3, 70, 35, 0, 471, 468, 1, 0, 0, 0, 471, 469, 1, 0, 0, 0, 472, 73, 1, 0, 0, 0, 473, 474, 5, 9, 0, 0, 474, 475, 5, 27, 0, 0, 475, 75, 1, 0, 0, 0, 476, 477, 5, 14, 0, 0, 477, 482, 3, 78, 39, 0, 478, 479, 5, 34, 0, 0, 479, 481, 3, 78, 39, 0, 480, 478, 1, 0, 0, 0, 481, 484, 1, 0, 0, 0, 482, 480, 1, 0, 0, 0, 482, 483, 1, 0, 0, 0, 483, 77, 1, 0, 0, 0, 484, 482, 1, 0, 0, 0, 485, 487, 3, 10, 5, 0, 486, 488, 7, 4, 0, 0, 487, 486, 1, 0, 0, 0, 487, 488, 1, 0, 0, 0, 488, 491, 1, 0, 0, 0, 489, 490, 5, 46, 0, 0, 490, 492, 7, 5, 0, 0, 491, 489, 1, 0, 0, 0, 491, 492, 1, 0, 0, 0, 492, 79, 1, 0, 0, 0, 493, 494, 5, 8, 0, 0, 494, 495, 3, 62, 31, 0, 495, 81, 1, 0, 0, 0, 496, 497, 5, 2, 0, 0, 497, 498, 3, 62, 31, 0, 498, 83, 1, 0, 0, 0, 499, 500, 5, 11, 0, 0, 500, 505, 3, 86, 43, 0, 501, 502, 5, 34, 0, 0, 502, 504, 3, 86, 43, 0, 503, 501, 1, 0, 0, 0, 504, 507, 1, 0, 0, 0, 505, 503, 1, 0, 0, 0, 505, 506, 1, 0, 0, 0, 506, 85, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 508, 509, 3, 60, 30, 0, 509, 510, 5, 84, 0, 0, 510, 511, 3, 60, 30, 0, 511, 87, 1, 0, 0, 0, 512, 513, 5, 1, 0, 0, 513, 514, 3, 20, 10, 0, 514, 516, 3, 106, 53, 0, 515, 517, 3, 94, 47, 0, 516, 515, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 89, 1, 0, 0, 0, 518, 519, 5, 7, 0, 0, 519, 520, 3, 20, 10, 0, 520, 521, 3, 106, 53, 0, 521, 91, 1, 0, 0, 0, 522, 523, 5, 10, 0, 0, 523, 524, 3, 58, 29, 0, 524, 93, 1, 0, 0, 0, 525, 530, 3, 96, 48, 0, 526, 527, 5, 34, 0, 0, 527, 529, 3, 96, 48, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 95, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 534, 3, 64, 32, 0, 534, 535, 5, 32, 0, 0, 535, 536, 3, 68, 34, 0, 536, 97, 1, 0, 0, 0, 537, 538, 7, 6, 0, 0, 538, 99, 1, 0, 0, 0, 539, 542, 3, 102, 51, 0, 540, 542, 3, 104, 52, 0, 541, 539, 1, 0, 0, 0, 541, 540, 1, 0, 0, 0, 542, 101, 1, 0, 0, 0, 543, 545, 7, 0, 0, 0, 544, 543, 1, 0, 0, 0, 544, 545, 1, 0, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 5, 28, 0, 0, 547, 103, 1, 0, 0, 0, 548, 550, 7, 0, 0, 0, 549, 548, 1, 0, 0, 0, 549, 550, 1, 0, 0, 0, 550, 551, 1, 0, 0, 0, 551, 552, 5, 27, 0, 0, 552, 105, 1, 0, 0, 0, 553, 554, 5, 26, 0, 0, 554, 107, 1, 0, 0, 0, 555, 556, 7, 7, 0, 0, 556, 109, 1, 0, 0, 0, 557, 558, 5, 5, 0, 0, 558, 559, 3, 112, 56, 0, 559, 111, 1, 0, 0, 0, 560, 561, 5, 65, 0, 0, 561, 562, 3, 2, 1, 0, 562, 563, 5, 66, 0, 0, 563, 113, 1, 0, 0, 0, 564, 565, 5, 13, 0, 0, 565, 566, 5, 100, 0, 0, 566, 115, 1, 0, 0, 0, 567, 568, 5, 3, 0, 0, 568, 571, 5, 90, 0, 0, 569, 570, 5, 88, 0, 0, 570, 572, 3, 60, 30, 0, 571, 569, 1, 0, 0, 0, 571, 572, 1, 0, 0, 0, 572, 582, 1, 0, 0, 0, 573, 574, 5, 89, 0, 0, 574, 579, 3, 118, 59, 0, 575, 576, 5, 34, 0, 0, 576, 578, 3, 118, 59, 0, 577, 575, 1, 0, 0, 0, 578, 581, 1, 0, 0, 0, 579, 577, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 583, 1, 0, 0, 0, 581, 579, 1, 0, 0, 0, 582, 573, 1, 0, 0, 0, 582, 583, 1, 0, 0, 0, 583, 117, 1, 0, 0, 0, 584, 585, 3, 60, 30, 0, 585, 586, 5, 32, 0, 0, 586, 588, 1, 0, 0, 0, 587, 584, 1, 0, 0, 0, 587, 588, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 590, 3, 60, 30, 0, 590, 119, 1, 0, 0, 0, 591, 592, 5, 18, 0, 0, 592, 593, 3, 36, 18, 0, 593, 594, 5, 88, 0, 0, 594, 595, 3, 62, 31, 0, 595, 121, 1, 0, 0, 0, 596, 597, 5, 17, 0, 0, 597, 600, 3, 54, 27, 0, 598, 599, 5, 29, 0, 0, 599, 601, 3, 30, 15, 0, 600, 598, 1, 0, 0, 0, 600, 601, 1, 0, 0, 0, 601, 123, 1, 0, 0, 0, 58, 135, 144, 162, 174, 183, 191, 197, 205, 207, 212, 219, 224, 235, 241, 249, 251, 262, 269, 280, 283, 299, 305, 315, 319, 324, 334, 342, 355, 359, 363, 370, 374, 381, 387, 394, 402, 410, 418, 435, 446, 457, 462, 466, 471, 482, 487, 491, 505, 516, 530, 541, 544, 549, 571, 579, 582, 587, 600] \ No newline at end of file +[4, 1, 128, 635, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 142, 8, 1, 10, 1, 12, 1, 145, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 153, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 173, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 185, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 192, 8, 5, 10, 5, 12, 5, 195, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 202, 8, 5, 1, 5, 1, 5, 1, 5, 3, 5, 207, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 215, 8, 5, 10, 5, 12, 5, 218, 9, 5, 1, 6, 1, 6, 3, 6, 222, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 229, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 234, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 245, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 251, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 259, 8, 9, 10, 9, 12, 9, 262, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 272, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 277, 8, 10, 10, 10, 12, 10, 280, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 288, 8, 11, 10, 11, 12, 11, 291, 9, 11, 3, 11, 293, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 307, 8, 15, 10, 15, 12, 15, 310, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 315, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 323, 8, 17, 10, 17, 12, 17, 326, 9, 17, 1, 17, 3, 17, 329, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 334, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 344, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 350, 8, 22, 10, 22, 12, 22, 353, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 363, 8, 24, 10, 24, 12, 24, 366, 9, 24, 1, 24, 3, 24, 369, 8, 24, 1, 24, 1, 24, 3, 24, 373, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 380, 8, 26, 1, 26, 1, 26, 3, 26, 384, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 389, 8, 27, 10, 27, 12, 27, 392, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 397, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 402, 8, 29, 10, 29, 12, 29, 405, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 410, 8, 30, 10, 30, 12, 30, 413, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 418, 8, 31, 10, 31, 12, 31, 421, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 428, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 443, 8, 34, 10, 34, 12, 34, 446, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 454, 8, 34, 10, 34, 12, 34, 457, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 465, 8, 34, 10, 34, 12, 34, 468, 9, 34, 1, 34, 1, 34, 3, 34, 472, 8, 34, 1, 35, 1, 35, 3, 35, 476, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 481, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 490, 8, 38, 10, 38, 12, 38, 493, 9, 38, 1, 39, 1, 39, 3, 39, 497, 8, 39, 1, 39, 1, 39, 3, 39, 501, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 513, 8, 42, 10, 42, 12, 42, 516, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 526, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 538, 8, 47, 10, 47, 12, 47, 541, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 551, 8, 50, 1, 51, 3, 51, 554, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 559, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 581, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 587, 8, 58, 10, 58, 12, 58, 590, 9, 58, 3, 58, 592, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 597, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 610, 8, 61, 1, 62, 3, 62, 613, 8, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 3, 63, 622, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 628, 8, 64, 10, 64, 12, 64, 631, 9, 64, 1, 65, 1, 65, 1, 65, 0, 4, 2, 10, 18, 20, 66, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 30, 30, 81, 81, 1, 0, 72, 73, 2, 0, 35, 35, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 1, 0, 22, 24, 660, 0, 132, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 4, 152, 1, 0, 0, 0, 6, 172, 1, 0, 0, 0, 8, 174, 1, 0, 0, 0, 10, 206, 1, 0, 0, 0, 12, 233, 1, 0, 0, 0, 14, 235, 1, 0, 0, 0, 16, 244, 1, 0, 0, 0, 18, 250, 1, 0, 0, 0, 20, 271, 1, 0, 0, 0, 22, 281, 1, 0, 0, 0, 24, 296, 1, 0, 0, 0, 26, 298, 1, 0, 0, 0, 28, 300, 1, 0, 0, 0, 30, 303, 1, 0, 0, 0, 32, 314, 1, 0, 0, 0, 34, 318, 1, 0, 0, 0, 36, 333, 1, 0, 0, 0, 38, 337, 1, 0, 0, 0, 40, 339, 1, 0, 0, 0, 42, 343, 1, 0, 0, 0, 44, 345, 1, 0, 0, 0, 46, 354, 1, 0, 0, 0, 48, 358, 1, 0, 0, 0, 50, 374, 1, 0, 0, 0, 52, 377, 1, 0, 0, 0, 54, 385, 1, 0, 0, 0, 56, 393, 1, 0, 0, 0, 58, 398, 1, 0, 0, 0, 60, 406, 1, 0, 0, 0, 62, 414, 1, 0, 0, 0, 64, 422, 1, 0, 0, 0, 66, 427, 1, 0, 0, 0, 68, 471, 1, 0, 0, 0, 70, 475, 1, 0, 0, 0, 72, 480, 1, 0, 0, 0, 74, 482, 1, 0, 0, 0, 76, 485, 1, 0, 0, 0, 78, 494, 1, 0, 0, 0, 80, 502, 1, 0, 0, 0, 82, 505, 1, 0, 0, 0, 84, 508, 1, 0, 0, 0, 86, 517, 1, 0, 0, 0, 88, 521, 1, 0, 0, 0, 90, 527, 1, 0, 0, 0, 92, 531, 1, 0, 0, 0, 94, 534, 1, 0, 0, 0, 96, 542, 1, 0, 0, 0, 98, 546, 1, 0, 0, 0, 100, 550, 1, 0, 0, 0, 102, 553, 1, 0, 0, 0, 104, 558, 1, 0, 0, 0, 106, 562, 1, 0, 0, 0, 108, 564, 1, 0, 0, 0, 110, 566, 1, 0, 0, 0, 112, 569, 1, 0, 0, 0, 114, 573, 1, 0, 0, 0, 116, 576, 1, 0, 0, 0, 118, 596, 1, 0, 0, 0, 120, 600, 1, 0, 0, 0, 122, 605, 1, 0, 0, 0, 124, 612, 1, 0, 0, 0, 126, 618, 1, 0, 0, 0, 128, 623, 1, 0, 0, 0, 130, 632, 1, 0, 0, 0, 132, 133, 3, 2, 1, 0, 133, 134, 5, 0, 0, 1, 134, 1, 1, 0, 0, 0, 135, 136, 6, 1, -1, 0, 136, 137, 3, 4, 2, 0, 137, 143, 1, 0, 0, 0, 138, 139, 10, 1, 0, 0, 139, 140, 5, 29, 0, 0, 140, 142, 3, 6, 3, 0, 141, 138, 1, 0, 0, 0, 142, 145, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 3, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 146, 153, 3, 110, 55, 0, 147, 153, 3, 34, 17, 0, 148, 153, 3, 28, 14, 0, 149, 153, 3, 114, 57, 0, 150, 151, 4, 2, 1, 0, 151, 153, 3, 48, 24, 0, 152, 146, 1, 0, 0, 0, 152, 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 152, 149, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 153, 5, 1, 0, 0, 0, 154, 173, 3, 50, 25, 0, 155, 173, 3, 8, 4, 0, 156, 173, 3, 80, 40, 0, 157, 173, 3, 74, 37, 0, 158, 173, 3, 52, 26, 0, 159, 173, 3, 76, 38, 0, 160, 173, 3, 82, 41, 0, 161, 173, 3, 84, 42, 0, 162, 173, 3, 88, 44, 0, 163, 173, 3, 90, 45, 0, 164, 173, 3, 116, 58, 0, 165, 173, 3, 92, 46, 0, 166, 167, 4, 3, 2, 0, 167, 173, 3, 122, 61, 0, 168, 169, 4, 3, 3, 0, 169, 173, 3, 120, 60, 0, 170, 171, 4, 3, 4, 0, 171, 173, 3, 124, 62, 0, 172, 154, 1, 0, 0, 0, 172, 155, 1, 0, 0, 0, 172, 156, 1, 0, 0, 0, 172, 157, 1, 0, 0, 0, 172, 158, 1, 0, 0, 0, 172, 159, 1, 0, 0, 0, 172, 160, 1, 0, 0, 0, 172, 161, 1, 0, 0, 0, 172, 162, 1, 0, 0, 0, 172, 163, 1, 0, 0, 0, 172, 164, 1, 0, 0, 0, 172, 165, 1, 0, 0, 0, 172, 166, 1, 0, 0, 0, 172, 168, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 7, 1, 0, 0, 0, 174, 175, 5, 16, 0, 0, 175, 176, 3, 10, 5, 0, 176, 9, 1, 0, 0, 0, 177, 178, 6, 5, -1, 0, 178, 179, 5, 49, 0, 0, 179, 207, 3, 10, 5, 8, 180, 207, 3, 16, 8, 0, 181, 207, 3, 12, 6, 0, 182, 184, 3, 16, 8, 0, 183, 185, 5, 49, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 44, 0, 0, 187, 188, 5, 48, 0, 0, 188, 193, 3, 16, 8, 0, 189, 190, 5, 39, 0, 0, 190, 192, 3, 16, 8, 0, 191, 189, 1, 0, 0, 0, 192, 195, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 196, 197, 5, 55, 0, 0, 197, 207, 1, 0, 0, 0, 198, 199, 3, 16, 8, 0, 199, 201, 5, 45, 0, 0, 200, 202, 5, 49, 0, 0, 201, 200, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 204, 5, 50, 0, 0, 204, 207, 1, 0, 0, 0, 205, 207, 3, 14, 7, 0, 206, 177, 1, 0, 0, 0, 206, 180, 1, 0, 0, 0, 206, 181, 1, 0, 0, 0, 206, 182, 1, 0, 0, 0, 206, 198, 1, 0, 0, 0, 206, 205, 1, 0, 0, 0, 207, 216, 1, 0, 0, 0, 208, 209, 10, 5, 0, 0, 209, 210, 5, 34, 0, 0, 210, 215, 3, 10, 5, 6, 211, 212, 10, 4, 0, 0, 212, 213, 5, 52, 0, 0, 213, 215, 3, 10, 5, 5, 214, 208, 1, 0, 0, 0, 214, 211, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 11, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 221, 3, 16, 8, 0, 220, 222, 5, 49, 0, 0, 221, 220, 1, 0, 0, 0, 221, 222, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 224, 5, 47, 0, 0, 224, 225, 3, 106, 53, 0, 225, 234, 1, 0, 0, 0, 226, 228, 3, 16, 8, 0, 227, 229, 5, 49, 0, 0, 228, 227, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 231, 5, 54, 0, 0, 231, 232, 3, 106, 53, 0, 232, 234, 1, 0, 0, 0, 233, 219, 1, 0, 0, 0, 233, 226, 1, 0, 0, 0, 234, 13, 1, 0, 0, 0, 235, 236, 3, 58, 29, 0, 236, 237, 5, 38, 0, 0, 237, 238, 3, 68, 34, 0, 238, 15, 1, 0, 0, 0, 239, 245, 3, 18, 9, 0, 240, 241, 3, 18, 9, 0, 241, 242, 3, 108, 54, 0, 242, 243, 3, 18, 9, 0, 243, 245, 1, 0, 0, 0, 244, 239, 1, 0, 0, 0, 244, 240, 1, 0, 0, 0, 245, 17, 1, 0, 0, 0, 246, 247, 6, 9, -1, 0, 247, 251, 3, 20, 10, 0, 248, 249, 7, 0, 0, 0, 249, 251, 3, 18, 9, 3, 250, 246, 1, 0, 0, 0, 250, 248, 1, 0, 0, 0, 251, 260, 1, 0, 0, 0, 252, 253, 10, 2, 0, 0, 253, 254, 7, 1, 0, 0, 254, 259, 3, 18, 9, 3, 255, 256, 10, 1, 0, 0, 256, 257, 7, 0, 0, 0, 257, 259, 3, 18, 9, 2, 258, 252, 1, 0, 0, 0, 258, 255, 1, 0, 0, 0, 259, 262, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 19, 1, 0, 0, 0, 262, 260, 1, 0, 0, 0, 263, 264, 6, 10, -1, 0, 264, 272, 3, 68, 34, 0, 265, 272, 3, 58, 29, 0, 266, 272, 3, 22, 11, 0, 267, 268, 5, 48, 0, 0, 268, 269, 3, 10, 5, 0, 269, 270, 5, 55, 0, 0, 270, 272, 1, 0, 0, 0, 271, 263, 1, 0, 0, 0, 271, 265, 1, 0, 0, 0, 271, 266, 1, 0, 0, 0, 271, 267, 1, 0, 0, 0, 272, 278, 1, 0, 0, 0, 273, 274, 10, 1, 0, 0, 274, 275, 5, 37, 0, 0, 275, 277, 3, 26, 13, 0, 276, 273, 1, 0, 0, 0, 277, 280, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 21, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 281, 282, 3, 24, 12, 0, 282, 292, 5, 48, 0, 0, 283, 293, 5, 66, 0, 0, 284, 289, 3, 10, 5, 0, 285, 286, 5, 39, 0, 0, 286, 288, 3, 10, 5, 0, 287, 285, 1, 0, 0, 0, 288, 291, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 289, 290, 1, 0, 0, 0, 290, 293, 1, 0, 0, 0, 291, 289, 1, 0, 0, 0, 292, 283, 1, 0, 0, 0, 292, 284, 1, 0, 0, 0, 292, 293, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 295, 5, 55, 0, 0, 295, 23, 1, 0, 0, 0, 296, 297, 3, 72, 36, 0, 297, 25, 1, 0, 0, 0, 298, 299, 3, 64, 32, 0, 299, 27, 1, 0, 0, 0, 300, 301, 5, 12, 0, 0, 301, 302, 3, 30, 15, 0, 302, 29, 1, 0, 0, 0, 303, 308, 3, 32, 16, 0, 304, 305, 5, 39, 0, 0, 305, 307, 3, 32, 16, 0, 306, 304, 1, 0, 0, 0, 307, 310, 1, 0, 0, 0, 308, 306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 31, 1, 0, 0, 0, 310, 308, 1, 0, 0, 0, 311, 312, 3, 58, 29, 0, 312, 313, 5, 36, 0, 0, 313, 315, 1, 0, 0, 0, 314, 311, 1, 0, 0, 0, 314, 315, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 317, 3, 10, 5, 0, 317, 33, 1, 0, 0, 0, 318, 319, 5, 6, 0, 0, 319, 324, 3, 36, 18, 0, 320, 321, 5, 39, 0, 0, 321, 323, 3, 36, 18, 0, 322, 320, 1, 0, 0, 0, 323, 326, 1, 0, 0, 0, 324, 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 328, 1, 0, 0, 0, 326, 324, 1, 0, 0, 0, 327, 329, 3, 42, 21, 0, 328, 327, 1, 0, 0, 0, 328, 329, 1, 0, 0, 0, 329, 35, 1, 0, 0, 0, 330, 331, 3, 38, 19, 0, 331, 332, 5, 38, 0, 0, 332, 334, 1, 0, 0, 0, 333, 330, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334, 335, 1, 0, 0, 0, 335, 336, 3, 40, 20, 0, 336, 37, 1, 0, 0, 0, 337, 338, 5, 81, 0, 0, 338, 39, 1, 0, 0, 0, 339, 340, 7, 2, 0, 0, 340, 41, 1, 0, 0, 0, 341, 344, 3, 44, 22, 0, 342, 344, 3, 46, 23, 0, 343, 341, 1, 0, 0, 0, 343, 342, 1, 0, 0, 0, 344, 43, 1, 0, 0, 0, 345, 346, 5, 80, 0, 0, 346, 351, 5, 81, 0, 0, 347, 348, 5, 39, 0, 0, 348, 350, 5, 81, 0, 0, 349, 347, 1, 0, 0, 0, 350, 353, 1, 0, 0, 0, 351, 349, 1, 0, 0, 0, 351, 352, 1, 0, 0, 0, 352, 45, 1, 0, 0, 0, 353, 351, 1, 0, 0, 0, 354, 355, 5, 70, 0, 0, 355, 356, 3, 44, 22, 0, 356, 357, 5, 71, 0, 0, 357, 47, 1, 0, 0, 0, 358, 359, 5, 19, 0, 0, 359, 364, 3, 36, 18, 0, 360, 361, 5, 39, 0, 0, 361, 363, 3, 36, 18, 0, 362, 360, 1, 0, 0, 0, 363, 366, 1, 0, 0, 0, 364, 362, 1, 0, 0, 0, 364, 365, 1, 0, 0, 0, 365, 368, 1, 0, 0, 0, 366, 364, 1, 0, 0, 0, 367, 369, 3, 54, 27, 0, 368, 367, 1, 0, 0, 0, 368, 369, 1, 0, 0, 0, 369, 372, 1, 0, 0, 0, 370, 371, 5, 33, 0, 0, 371, 373, 3, 30, 15, 0, 372, 370, 1, 0, 0, 0, 372, 373, 1, 0, 0, 0, 373, 49, 1, 0, 0, 0, 374, 375, 5, 4, 0, 0, 375, 376, 3, 30, 15, 0, 376, 51, 1, 0, 0, 0, 377, 379, 5, 15, 0, 0, 378, 380, 3, 54, 27, 0, 379, 378, 1, 0, 0, 0, 379, 380, 1, 0, 0, 0, 380, 383, 1, 0, 0, 0, 381, 382, 5, 33, 0, 0, 382, 384, 3, 30, 15, 0, 383, 381, 1, 0, 0, 0, 383, 384, 1, 0, 0, 0, 384, 53, 1, 0, 0, 0, 385, 390, 3, 56, 28, 0, 386, 387, 5, 39, 0, 0, 387, 389, 3, 56, 28, 0, 388, 386, 1, 0, 0, 0, 389, 392, 1, 0, 0, 0, 390, 388, 1, 0, 0, 0, 390, 391, 1, 0, 0, 0, 391, 55, 1, 0, 0, 0, 392, 390, 1, 0, 0, 0, 393, 396, 3, 32, 16, 0, 394, 395, 5, 16, 0, 0, 395, 397, 3, 10, 5, 0, 396, 394, 1, 0, 0, 0, 396, 397, 1, 0, 0, 0, 397, 57, 1, 0, 0, 0, 398, 403, 3, 72, 36, 0, 399, 400, 5, 41, 0, 0, 400, 402, 3, 72, 36, 0, 401, 399, 1, 0, 0, 0, 402, 405, 1, 0, 0, 0, 403, 401, 1, 0, 0, 0, 403, 404, 1, 0, 0, 0, 404, 59, 1, 0, 0, 0, 405, 403, 1, 0, 0, 0, 406, 411, 3, 66, 33, 0, 407, 408, 5, 41, 0, 0, 408, 410, 3, 66, 33, 0, 409, 407, 1, 0, 0, 0, 410, 413, 1, 0, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 61, 1, 0, 0, 0, 413, 411, 1, 0, 0, 0, 414, 419, 3, 60, 30, 0, 415, 416, 5, 39, 0, 0, 416, 418, 3, 60, 30, 0, 417, 415, 1, 0, 0, 0, 418, 421, 1, 0, 0, 0, 419, 417, 1, 0, 0, 0, 419, 420, 1, 0, 0, 0, 420, 63, 1, 0, 0, 0, 421, 419, 1, 0, 0, 0, 422, 423, 7, 3, 0, 0, 423, 65, 1, 0, 0, 0, 424, 428, 5, 85, 0, 0, 425, 426, 4, 33, 10, 0, 426, 428, 3, 70, 35, 0, 427, 424, 1, 0, 0, 0, 427, 425, 1, 0, 0, 0, 428, 67, 1, 0, 0, 0, 429, 472, 5, 50, 0, 0, 430, 431, 3, 104, 52, 0, 431, 432, 5, 72, 0, 0, 432, 472, 1, 0, 0, 0, 433, 472, 3, 102, 51, 0, 434, 472, 3, 104, 52, 0, 435, 472, 3, 98, 49, 0, 436, 472, 3, 70, 35, 0, 437, 472, 3, 106, 53, 0, 438, 439, 5, 70, 0, 0, 439, 444, 3, 100, 50, 0, 440, 441, 5, 39, 0, 0, 441, 443, 3, 100, 50, 0, 442, 440, 1, 0, 0, 0, 443, 446, 1, 0, 0, 0, 444, 442, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 447, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 447, 448, 5, 71, 0, 0, 448, 472, 1, 0, 0, 0, 449, 450, 5, 70, 0, 0, 450, 455, 3, 98, 49, 0, 451, 452, 5, 39, 0, 0, 452, 454, 3, 98, 49, 0, 453, 451, 1, 0, 0, 0, 454, 457, 1, 0, 0, 0, 455, 453, 1, 0, 0, 0, 455, 456, 1, 0, 0, 0, 456, 458, 1, 0, 0, 0, 457, 455, 1, 0, 0, 0, 458, 459, 5, 71, 0, 0, 459, 472, 1, 0, 0, 0, 460, 461, 5, 70, 0, 0, 461, 466, 3, 106, 53, 0, 462, 463, 5, 39, 0, 0, 463, 465, 3, 106, 53, 0, 464, 462, 1, 0, 0, 0, 465, 468, 1, 0, 0, 0, 466, 464, 1, 0, 0, 0, 466, 467, 1, 0, 0, 0, 467, 469, 1, 0, 0, 0, 468, 466, 1, 0, 0, 0, 469, 470, 5, 71, 0, 0, 470, 472, 1, 0, 0, 0, 471, 429, 1, 0, 0, 0, 471, 430, 1, 0, 0, 0, 471, 433, 1, 0, 0, 0, 471, 434, 1, 0, 0, 0, 471, 435, 1, 0, 0, 0, 471, 436, 1, 0, 0, 0, 471, 437, 1, 0, 0, 0, 471, 438, 1, 0, 0, 0, 471, 449, 1, 0, 0, 0, 471, 460, 1, 0, 0, 0, 472, 69, 1, 0, 0, 0, 473, 476, 5, 53, 0, 0, 474, 476, 5, 69, 0, 0, 475, 473, 1, 0, 0, 0, 475, 474, 1, 0, 0, 0, 476, 71, 1, 0, 0, 0, 477, 481, 3, 64, 32, 0, 478, 479, 4, 36, 11, 0, 479, 481, 3, 70, 35, 0, 480, 477, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 481, 73, 1, 0, 0, 0, 482, 483, 5, 9, 0, 0, 483, 484, 5, 31, 0, 0, 484, 75, 1, 0, 0, 0, 485, 486, 5, 14, 0, 0, 486, 491, 3, 78, 39, 0, 487, 488, 5, 39, 0, 0, 488, 490, 3, 78, 39, 0, 489, 487, 1, 0, 0, 0, 490, 493, 1, 0, 0, 0, 491, 489, 1, 0, 0, 0, 491, 492, 1, 0, 0, 0, 492, 77, 1, 0, 0, 0, 493, 491, 1, 0, 0, 0, 494, 496, 3, 10, 5, 0, 495, 497, 7, 4, 0, 0, 496, 495, 1, 0, 0, 0, 496, 497, 1, 0, 0, 0, 497, 500, 1, 0, 0, 0, 498, 499, 5, 51, 0, 0, 499, 501, 7, 5, 0, 0, 500, 498, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 79, 1, 0, 0, 0, 502, 503, 5, 8, 0, 0, 503, 504, 3, 62, 31, 0, 504, 81, 1, 0, 0, 0, 505, 506, 5, 2, 0, 0, 506, 507, 3, 62, 31, 0, 507, 83, 1, 0, 0, 0, 508, 509, 5, 11, 0, 0, 509, 514, 3, 86, 43, 0, 510, 511, 5, 39, 0, 0, 511, 513, 3, 86, 43, 0, 512, 510, 1, 0, 0, 0, 513, 516, 1, 0, 0, 0, 514, 512, 1, 0, 0, 0, 514, 515, 1, 0, 0, 0, 515, 85, 1, 0, 0, 0, 516, 514, 1, 0, 0, 0, 517, 518, 3, 60, 30, 0, 518, 519, 5, 89, 0, 0, 519, 520, 3, 60, 30, 0, 520, 87, 1, 0, 0, 0, 521, 522, 5, 1, 0, 0, 522, 523, 3, 20, 10, 0, 523, 525, 3, 106, 53, 0, 524, 526, 3, 94, 47, 0, 525, 524, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 89, 1, 0, 0, 0, 527, 528, 5, 7, 0, 0, 528, 529, 3, 20, 10, 0, 529, 530, 3, 106, 53, 0, 530, 91, 1, 0, 0, 0, 531, 532, 5, 10, 0, 0, 532, 533, 3, 58, 29, 0, 533, 93, 1, 0, 0, 0, 534, 539, 3, 96, 48, 0, 535, 536, 5, 39, 0, 0, 536, 538, 3, 96, 48, 0, 537, 535, 1, 0, 0, 0, 538, 541, 1, 0, 0, 0, 539, 537, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 95, 1, 0, 0, 0, 541, 539, 1, 0, 0, 0, 542, 543, 3, 64, 32, 0, 543, 544, 5, 36, 0, 0, 544, 545, 3, 68, 34, 0, 545, 97, 1, 0, 0, 0, 546, 547, 7, 6, 0, 0, 547, 99, 1, 0, 0, 0, 548, 551, 3, 102, 51, 0, 549, 551, 3, 104, 52, 0, 550, 548, 1, 0, 0, 0, 550, 549, 1, 0, 0, 0, 551, 101, 1, 0, 0, 0, 552, 554, 7, 0, 0, 0, 553, 552, 1, 0, 0, 0, 553, 554, 1, 0, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 5, 32, 0, 0, 556, 103, 1, 0, 0, 0, 557, 559, 7, 0, 0, 0, 558, 557, 1, 0, 0, 0, 558, 559, 1, 0, 0, 0, 559, 560, 1, 0, 0, 0, 560, 561, 5, 31, 0, 0, 561, 105, 1, 0, 0, 0, 562, 563, 5, 30, 0, 0, 563, 107, 1, 0, 0, 0, 564, 565, 7, 7, 0, 0, 565, 109, 1, 0, 0, 0, 566, 567, 5, 5, 0, 0, 567, 568, 3, 112, 56, 0, 568, 111, 1, 0, 0, 0, 569, 570, 5, 70, 0, 0, 570, 571, 3, 2, 1, 0, 571, 572, 5, 71, 0, 0, 572, 113, 1, 0, 0, 0, 573, 574, 5, 13, 0, 0, 574, 575, 5, 105, 0, 0, 575, 115, 1, 0, 0, 0, 576, 577, 5, 3, 0, 0, 577, 580, 5, 95, 0, 0, 578, 579, 5, 93, 0, 0, 579, 581, 3, 60, 30, 0, 580, 578, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 591, 1, 0, 0, 0, 582, 583, 5, 94, 0, 0, 583, 588, 3, 118, 59, 0, 584, 585, 5, 39, 0, 0, 585, 587, 3, 118, 59, 0, 586, 584, 1, 0, 0, 0, 587, 590, 1, 0, 0, 0, 588, 586, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 592, 1, 0, 0, 0, 590, 588, 1, 0, 0, 0, 591, 582, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 117, 1, 0, 0, 0, 593, 594, 3, 60, 30, 0, 594, 595, 5, 36, 0, 0, 595, 597, 1, 0, 0, 0, 596, 593, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 3, 60, 30, 0, 599, 119, 1, 0, 0, 0, 600, 601, 5, 18, 0, 0, 601, 602, 3, 36, 18, 0, 602, 603, 5, 93, 0, 0, 603, 604, 3, 62, 31, 0, 604, 121, 1, 0, 0, 0, 605, 606, 5, 17, 0, 0, 606, 609, 3, 54, 27, 0, 607, 608, 5, 33, 0, 0, 608, 610, 3, 30, 15, 0, 609, 607, 1, 0, 0, 0, 609, 610, 1, 0, 0, 0, 610, 123, 1, 0, 0, 0, 611, 613, 7, 8, 0, 0, 612, 611, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 615, 5, 20, 0, 0, 615, 616, 3, 126, 63, 0, 616, 617, 3, 128, 64, 0, 617, 125, 1, 0, 0, 0, 618, 621, 3, 64, 32, 0, 619, 620, 5, 89, 0, 0, 620, 622, 3, 64, 32, 0, 621, 619, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 127, 1, 0, 0, 0, 623, 624, 5, 93, 0, 0, 624, 629, 3, 130, 65, 0, 625, 626, 5, 39, 0, 0, 626, 628, 3, 130, 65, 0, 627, 625, 1, 0, 0, 0, 628, 631, 1, 0, 0, 0, 629, 627, 1, 0, 0, 0, 629, 630, 1, 0, 0, 0, 630, 129, 1, 0, 0, 0, 631, 629, 1, 0, 0, 0, 632, 633, 3, 16, 8, 0, 633, 131, 1, 0, 0, 0, 61, 143, 152, 172, 184, 193, 201, 206, 214, 216, 221, 228, 233, 244, 250, 258, 260, 271, 278, 289, 292, 308, 314, 324, 328, 333, 343, 351, 364, 368, 372, 379, 383, 390, 396, 403, 411, 419, 427, 444, 455, 466, 471, 475, 480, 491, 496, 500, 514, 525, 539, 550, 553, 558, 580, 588, 591, 596, 609, 612, 621, 629] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index e36184b1f07da..e864eaff3edd7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -8,26 +8,14 @@ * 2.0. */ -import org.antlr.v4.runtime.FailedPredicateException; -import org.antlr.v4.runtime.NoViableAltException; -import org.antlr.v4.runtime.ParserRuleContext; -import org.antlr.v4.runtime.RecognitionException; -import org.antlr.v4.runtime.RuleContext; -import org.antlr.v4.runtime.RuntimeMetaData; -import org.antlr.v4.runtime.Token; -import org.antlr.v4.runtime.TokenStream; -import org.antlr.v4.runtime.Vocabulary; -import org.antlr.v4.runtime.VocabularyImpl; -import org.antlr.v4.runtime.atn.ATN; -import org.antlr.v4.runtime.atn.ATNDeserializer; -import org.antlr.v4.runtime.atn.ParserATNSimulator; -import org.antlr.v4.runtime.atn.PredictionContextCache; +import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; -import org.antlr.v4.runtime.tree.ParseTreeListener; -import org.antlr.v4.runtime.tree.ParseTreeVisitor; -import org.antlr.v4.runtime.tree.TerminalNode; - +import org.antlr.v4.runtime.*; +import org.antlr.v4.runtime.misc.*; +import org.antlr.v4.runtime.tree.*; import java.util.List; +import java.util.Iterator; +import java.util.ArrayList; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue"}) public class EsqlBaseParser extends ParserConfig { @@ -37,113 +25,120 @@ public class EsqlBaseParser extends ParserConfig { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, - LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, - WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, UNKNOWN_CMD=20, - LINE_COMMENT=21, MULTILINE_COMMENT=22, WS=23, COLON=24, PIPE=25, QUOTED_STRING=26, - INTEGER_LITERAL=27, DECIMAL_LITERAL=28, BY=29, AND=30, ASC=31, ASSIGN=32, - CAST_OP=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, IN=39, IS=40, - LAST=41, LIKE=42, LP=43, NOT=44, NULL=45, NULLS=46, OR=47, PARAM=48, RLIKE=49, - RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, GT=57, GTE=58, - PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, NAMED_OR_POSITIONAL_PARAM=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, - EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, - FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, - PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, - AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, - ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, - ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, - ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, - MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, - SHOW_WS=103, SETTING=104, SETTING_LINE_COMMENT=105, SETTTING_MULTILINE_COMMENT=106, - SETTING_WS=107, LOOKUP_LINE_COMMENT=108, LOOKUP_MULTILINE_COMMENT=109, - LOOKUP_WS=110, LOOKUP_FIELD_LINE_COMMENT=111, LOOKUP_FIELD_MULTILINE_COMMENT=112, - LOOKUP_FIELD_WS=113, METRICS_LINE_COMMENT=114, METRICS_MULTILINE_COMMENT=115, - METRICS_WS=116, CLOSING_METRICS_LINE_COMMENT=117, CLOSING_METRICS_MULTILINE_COMMENT=118, - CLOSING_METRICS_WS=119; + DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, + LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, + WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, DEV_JOIN=20, + DEV_JOIN_FULL=21, DEV_JOIN_LEFT=22, DEV_JOIN_RIGHT=23, DEV_JOIN_LOOKUP=24, + UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, PIPE=29, + QUOTED_STRING=30, INTEGER_LITERAL=31, DECIMAL_LITERAL=32, BY=33, AND=34, + ASC=35, ASSIGN=36, CAST_OP=37, COLON=38, COMMA=39, DESC=40, DOT=41, FALSE=42, + FIRST=43, IN=44, IS=45, LAST=46, LIKE=47, LP=48, NOT=49, NULL=50, NULLS=51, + OR=52, PARAM=53, RLIKE=54, RP=55, TRUE=56, EQ=57, CIEQ=58, NEQ=59, LT=60, + LTE=61, GT=62, GTE=63, PLUS=64, MINUS=65, ASTERISK=66, SLASH=67, PERCENT=68, + NAMED_OR_POSITIONAL_PARAM=69, OPENING_BRACKET=70, CLOSING_BRACKET=71, + UNQUOTED_IDENTIFIER=72, QUOTED_IDENTIFIER=73, EXPR_LINE_COMMENT=74, EXPR_MULTILINE_COMMENT=75, + EXPR_WS=76, EXPLAIN_WS=77, EXPLAIN_LINE_COMMENT=78, EXPLAIN_MULTILINE_COMMENT=79, + METADATA=80, UNQUOTED_SOURCE=81, FROM_LINE_COMMENT=82, FROM_MULTILINE_COMMENT=83, + FROM_WS=84, ID_PATTERN=85, PROJECT_LINE_COMMENT=86, PROJECT_MULTILINE_COMMENT=87, + PROJECT_WS=88, AS=89, RENAME_LINE_COMMENT=90, RENAME_MULTILINE_COMMENT=91, + RENAME_WS=92, ON=93, WITH=94, ENRICH_POLICY_NAME=95, ENRICH_LINE_COMMENT=96, + ENRICH_MULTILINE_COMMENT=97, ENRICH_WS=98, ENRICH_FIELD_LINE_COMMENT=99, + ENRICH_FIELD_MULTILINE_COMMENT=100, ENRICH_FIELD_WS=101, MVEXPAND_LINE_COMMENT=102, + MVEXPAND_MULTILINE_COMMENT=103, MVEXPAND_WS=104, INFO=105, SHOW_LINE_COMMENT=106, + SHOW_MULTILINE_COMMENT=107, SHOW_WS=108, SETTING=109, SETTING_LINE_COMMENT=110, + SETTTING_MULTILINE_COMMENT=111, SETTING_WS=112, LOOKUP_LINE_COMMENT=113, + LOOKUP_MULTILINE_COMMENT=114, LOOKUP_WS=115, LOOKUP_FIELD_LINE_COMMENT=116, + LOOKUP_FIELD_MULTILINE_COMMENT=117, LOOKUP_FIELD_WS=118, USING=119, JOIN_LINE_COMMENT=120, + JOIN_MULTILINE_COMMENT=121, JOIN_WS=122, METRICS_LINE_COMMENT=123, METRICS_MULTILINE_COMMENT=124, + METRICS_WS=125, CLOSING_METRICS_LINE_COMMENT=126, CLOSING_METRICS_MULTILINE_COMMENT=127, + CLOSING_METRICS_WS=128; public static final int - RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, - RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, - RULE_matchBooleanExpression = 7, RULE_valueExpression = 8, RULE_operatorExpression = 9, - RULE_primaryExpression = 10, RULE_functionExpression = 11, RULE_functionName = 12, - RULE_dataType = 13, RULE_rowCommand = 14, RULE_fields = 15, RULE_field = 16, - RULE_fromCommand = 17, RULE_indexPattern = 18, RULE_clusterString = 19, - RULE_indexString = 20, RULE_metadata = 21, RULE_metadataOption = 22, RULE_deprecated_metadata = 23, - RULE_metricsCommand = 24, RULE_evalCommand = 25, RULE_statsCommand = 26, - RULE_aggFields = 27, RULE_aggField = 28, RULE_qualifiedName = 29, RULE_qualifiedNamePattern = 30, - RULE_qualifiedNamePatterns = 31, RULE_identifier = 32, RULE_identifierPattern = 33, - RULE_constant = 34, RULE_parameter = 35, RULE_identifierOrParameter = 36, - RULE_limitCommand = 37, RULE_sortCommand = 38, RULE_orderExpression = 39, - RULE_keepCommand = 40, RULE_dropCommand = 41, RULE_renameCommand = 42, - RULE_renameClause = 43, RULE_dissectCommand = 44, RULE_grokCommand = 45, - RULE_mvExpandCommand = 46, RULE_commandOptions = 47, RULE_commandOption = 48, - RULE_booleanValue = 49, RULE_numericValue = 50, RULE_decimalValue = 51, - RULE_integerValue = 52, RULE_string = 53, RULE_comparisonOperator = 54, - RULE_explainCommand = 55, RULE_subqueryExpression = 56, RULE_showCommand = 57, - RULE_enrichCommand = 58, RULE_enrichWithClause = 59, RULE_lookupCommand = 60, - RULE_inlinestatsCommand = 61; + RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, + RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, + RULE_matchBooleanExpression = 7, RULE_valueExpression = 8, RULE_operatorExpression = 9, + RULE_primaryExpression = 10, RULE_functionExpression = 11, RULE_functionName = 12, + RULE_dataType = 13, RULE_rowCommand = 14, RULE_fields = 15, RULE_field = 16, + RULE_fromCommand = 17, RULE_indexPattern = 18, RULE_clusterString = 19, + RULE_indexString = 20, RULE_metadata = 21, RULE_metadataOption = 22, RULE_deprecated_metadata = 23, + RULE_metricsCommand = 24, RULE_evalCommand = 25, RULE_statsCommand = 26, + RULE_aggFields = 27, RULE_aggField = 28, RULE_qualifiedName = 29, RULE_qualifiedNamePattern = 30, + RULE_qualifiedNamePatterns = 31, RULE_identifier = 32, RULE_identifierPattern = 33, + RULE_constant = 34, RULE_parameter = 35, RULE_identifierOrParameter = 36, + RULE_limitCommand = 37, RULE_sortCommand = 38, RULE_orderExpression = 39, + RULE_keepCommand = 40, RULE_dropCommand = 41, RULE_renameCommand = 42, + RULE_renameClause = 43, RULE_dissectCommand = 44, RULE_grokCommand = 45, + RULE_mvExpandCommand = 46, RULE_commandOptions = 47, RULE_commandOption = 48, + RULE_booleanValue = 49, RULE_numericValue = 50, RULE_decimalValue = 51, + RULE_integerValue = 52, RULE_string = 53, RULE_comparisonOperator = 54, + RULE_explainCommand = 55, RULE_subqueryExpression = 56, RULE_showCommand = 57, + RULE_enrichCommand = 58, RULE_enrichWithClause = 59, RULE_lookupCommand = 60, + RULE_inlinestatsCommand = 61, RULE_joinCommand = 62, RULE_joinTarget = 63, + RULE_joinCondition = 64, RULE_joinPredicate = 65; private static String[] makeRuleNames() { return new String[] { - "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", - "booleanExpression", "regexBooleanExpression", "matchBooleanExpression", - "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", - "functionName", "dataType", "rowCommand", "fields", "field", "fromCommand", - "indexPattern", "clusterString", "indexString", "metadata", "metadataOption", - "deprecated_metadata", "metricsCommand", "evalCommand", "statsCommand", - "aggFields", "aggField", "qualifiedName", "qualifiedNamePattern", "qualifiedNamePatterns", - "identifier", "identifierPattern", "constant", "parameter", "identifierOrParameter", - "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", - "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", - "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", - "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", - "showCommand", "enrichCommand", "enrichWithClause", "lookupCommand", - "inlinestatsCommand" + "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", + "booleanExpression", "regexBooleanExpression", "matchBooleanExpression", + "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", + "functionName", "dataType", "rowCommand", "fields", "field", "fromCommand", + "indexPattern", "clusterString", "indexString", "metadata", "metadataOption", + "deprecated_metadata", "metricsCommand", "evalCommand", "statsCommand", + "aggFields", "aggField", "qualifiedName", "qualifiedNamePattern", "qualifiedNamePatterns", + "identifier", "identifierPattern", "constant", "parameter", "identifierOrParameter", + "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", + "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", + "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", + "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", + "showCommand", "enrichCommand", "enrichWithClause", "lookupCommand", + "inlinestatsCommand", "joinCommand", "joinTarget", "joinCondition", "joinPredicate" }; } public static final String[] ruleNames = makeRuleNames(); private static String[] makeLiteralNames() { return new String[] { - null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", - "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - "':'", "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", - "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", - "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", - "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", - "'+'", "'-'", "'*'", "'/'", "'%'", null, null, "']'", null, null, null, - null, null, null, null, null, "'metadata'", null, null, null, null, null, - null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, - null, null, null, null, null, null, null, null, "'info'" + null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", + "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", + "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, + null, null, null, null, null, "'|'", null, null, null, "'by'", "'and'", + "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", "'first'", + "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", + "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", + "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, null, + "']'", null, null, null, null, null, null, null, null, "'metadata'", + null, null, null, null, null, null, null, null, "'as'", null, null, null, + "'on'", "'with'", null, null, null, null, null, null, null, null, null, + null, "'info'", null, null, null, null, null, null, null, null, null, + null, null, null, null, "'USING'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { - null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", - "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "COLON", "PIPE", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", - "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", - "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", - "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", - "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", - "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", - "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", - "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", + "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", + "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", + "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", + "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", + "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", + "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", + "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", + "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", + "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", + "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", + "LOOKUP_FIELD_WS", "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", + "JOIN_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", + "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" }; } @@ -231,9 +226,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(124); + setState(132); query(0); - setState(125); + setState(133); match(EOF); } } @@ -255,7 +250,7 @@ public QueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_query; } - + @SuppressWarnings("this-escape") public QueryContext() { } public void copyFrom(QueryContext ctx) { @@ -329,11 +324,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(128); + setState(136); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(135); + setState(143); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -344,16 +339,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(130); + setState(138); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(131); + setState(139); match(PIPE); - setState(132); + setState(140); processingCommand(); } - } + } } - setState(137); + setState(145); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -411,43 +406,43 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(144); + setState(152); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(138); + setState(146); explainCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(139); + setState(147); fromCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(140); + setState(148); rowCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(141); + setState(149); showCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(142); + setState(150); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(143); + setState(151); metricsCommand(); } break; @@ -508,6 +503,9 @@ public InlinestatsCommandContext inlinestatsCommand() { public LookupCommandContext lookupCommand() { return getRuleContext(LookupCommandContext.class,0); } + public JoinCommandContext joinCommand() { + return getRuleContext(JoinCommandContext.class,0); + } @SuppressWarnings("this-escape") public ProcessingCommandContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -532,111 +530,120 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(162); + setState(172); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(146); + setState(154); evalCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(147); + setState(155); whereCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(148); + setState(156); keepCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(149); + setState(157); limitCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(150); + setState(158); statsCommand(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(151); + setState(159); sortCommand(); } break; case 7: enterOuterAlt(_localctx, 7); { - setState(152); + setState(160); dropCommand(); } break; case 8: enterOuterAlt(_localctx, 8); { - setState(153); + setState(161); renameCommand(); } break; case 9: enterOuterAlt(_localctx, 9); { - setState(154); + setState(162); dissectCommand(); } break; case 10: enterOuterAlt(_localctx, 10); { - setState(155); + setState(163); grokCommand(); } break; case 11: enterOuterAlt(_localctx, 11); { - setState(156); + setState(164); enrichCommand(); } break; case 12: enterOuterAlt(_localctx, 12); { - setState(157); + setState(165); mvExpandCommand(); } break; case 13: enterOuterAlt(_localctx, 13); { - setState(158); + setState(166); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(159); + setState(167); inlinestatsCommand(); } break; case 14: enterOuterAlt(_localctx, 14); { - setState(160); + setState(168); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(161); + setState(169); lookupCommand(); } break; + case 15: + enterOuterAlt(_localctx, 15); + { + setState(170); + if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); + setState(171); + joinCommand(); + } + break; } } catch (RecognitionException re) { @@ -682,9 +689,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(164); + setState(174); match(WHERE); - setState(165); + setState(175); booleanExpression(0); } } @@ -706,7 +713,7 @@ public BooleanExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_booleanExpression; } - + @SuppressWarnings("this-escape") public BooleanExpressionContext() { } public void copyFrom(BooleanExpressionContext ctx) { @@ -900,7 +907,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(197); + setState(206); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -909,9 +916,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(168); + setState(178); match(NOT); - setState(169); + setState(179); booleanExpression(8); } break; @@ -920,7 +927,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(170); + setState(180); valueExpression(); } break; @@ -929,7 +936,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(171); + setState(181); regexBooleanExpression(); } break; @@ -938,41 +945,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(172); + setState(182); valueExpression(); - setState(174); + setState(184); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(173); + setState(183); match(NOT); } } - setState(176); + setState(186); match(IN); - setState(177); + setState(187); match(LP); - setState(178); + setState(188); valueExpression(); - setState(183); + setState(193); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(179); + setState(189); match(COMMA); - setState(180); + setState(190); valueExpression(); } } - setState(185); + setState(195); _errHandler.sync(this); _la = _input.LA(1); } - setState(186); + setState(196); match(RP); } break; @@ -981,21 +988,21 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(188); + setState(198); valueExpression(); - setState(189); + setState(199); match(IS); - setState(191); + setState(201); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(190); + setState(200); match(NOT); } } - setState(193); + setState(203); match(NULL); } break; @@ -1004,15 +1011,13 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new MatchExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(195); - if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(196); + setState(205); matchBooleanExpression(); } break; } _ctx.stop = _input.LT(-1); - setState(207); + setState(216); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1020,7 +1025,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(205); + setState(214); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -1028,11 +1033,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(199); + setState(208); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(200); + setState(209); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(201); + setState(210); ((LogicalBinaryContext)_localctx).right = booleanExpression(6); } break; @@ -1041,18 +1046,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(202); + setState(211); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(203); + setState(212); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(204); + setState(213); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; } - } + } } - setState(209); + setState(218); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1107,48 +1112,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(224); + setState(233); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(210); + setState(219); valueExpression(); - setState(212); + setState(221); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(211); + setState(220); match(NOT); } } - setState(214); + setState(223); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(215); + setState(224); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(217); + setState(226); valueExpression(); - setState(219); + setState(228); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(218); + setState(227); match(NOT); } } - setState(221); + setState(230); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(222); + setState(231); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1202,11 +1207,11 @@ public final MatchBooleanExpressionContext matchBooleanExpression() throws Recog try { enterOuterAlt(_localctx, 1); { - setState(226); + setState(235); ((MatchBooleanExpressionContext)_localctx).fieldExp = qualifiedName(); - setState(227); + setState(236); match(COLON); - setState(228); + setState(237); ((MatchBooleanExpressionContext)_localctx).queryString = constant(); } } @@ -1228,7 +1233,7 @@ public ValueExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_valueExpression; } - + @SuppressWarnings("this-escape") public ValueExpressionContext() { } public void copyFrom(ValueExpressionContext ctx) { @@ -1290,14 +1295,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 16, RULE_valueExpression); try { - setState(235); + setState(244); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(230); + setState(239); operatorExpression(0); } break; @@ -1305,11 +1310,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(231); + setState(240); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(232); + setState(241); comparisonOperator(); - setState(233); + setState(242); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1333,7 +1338,7 @@ public OperatorExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_operatorExpression; } - + @SuppressWarnings("this-escape") public OperatorExpressionContext() { } public void copyFrom(OperatorExpressionContext ctx) { @@ -1434,7 +1439,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(241); + setState(250); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1443,7 +1448,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(238); + setState(247); primaryExpression(0); } break; @@ -1452,7 +1457,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(239); + setState(248); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1463,13 +1468,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(240); + setState(249); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(251); + setState(260); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1477,7 +1482,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(249); + setState(258); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1485,12 +1490,12 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(243); + setState(252); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(244); + setState(253); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -2305843009213693952L) != 0)) ) { + if ( !(((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & 7L) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { @@ -1498,7 +1503,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(245); + setState(254); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1507,9 +1512,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(246); + setState(255); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(247); + setState(256); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1520,14 +1525,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(248); + setState(257); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } - } + } } - setState(253); + setState(262); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1551,7 +1556,7 @@ public PrimaryExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_primaryExpression; } - + @SuppressWarnings("this-escape") public PrimaryExpressionContext() { } public void copyFrom(PrimaryExpressionContext ctx) { @@ -1685,7 +1690,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(262); + setState(271); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: @@ -1694,7 +1699,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(255); + setState(264); constant(); } break; @@ -1703,7 +1708,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(256); + setState(265); qualifiedName(); } break; @@ -1712,7 +1717,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(257); + setState(266); functionExpression(); } break; @@ -1721,17 +1726,17 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(258); + setState(267); match(LP); - setState(259); + setState(268); booleanExpression(0); - setState(260); + setState(269); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(269); + setState(278); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1742,16 +1747,16 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(264); + setState(273); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(265); + setState(274); match(CAST_OP); - setState(266); + setState(275); dataType(); } - } + } } - setState(271); + setState(280); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); } @@ -1813,37 +1818,37 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(272); + setState(281); functionName(); - setState(273); + setState(282); match(LP); - setState(283); + setState(292); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,19,_ctx) ) { case 1: { - setState(274); + setState(283); match(ASTERISK); } break; case 2: { { - setState(275); + setState(284); booleanExpression(0); - setState(280); + setState(289); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(276); + setState(285); match(COMMA); - setState(277); + setState(286); booleanExpression(0); } } - setState(282); + setState(291); _errHandler.sync(this); _la = _input.LA(1); } @@ -1851,7 +1856,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx } break; } - setState(285); + setState(294); match(RP); } } @@ -1897,7 +1902,7 @@ public final FunctionNameContext functionName() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(287); + setState(296); identifierOrParameter(); } } @@ -1919,7 +1924,7 @@ public DataTypeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_dataType; } - + @SuppressWarnings("this-escape") public DataTypeContext() { } public void copyFrom(DataTypeContext ctx) { @@ -1955,7 +1960,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(289); + setState(298); identifier(); } } @@ -2002,9 +2007,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(291); + setState(300); match(ROW); - setState(292); + setState(301); fields(); } } @@ -2058,23 +2063,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(294); + setState(303); field(); - setState(299); + setState(308); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(295); + setState(304); match(COMMA); - setState(296); + setState(305); field(); } - } + } } - setState(301); + setState(310); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); } @@ -2126,19 +2131,19 @@ public final FieldContext field() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(305); + setState(314); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: { - setState(302); + setState(311); qualifiedName(); - setState(303); + setState(312); match(ASSIGN); } break; } - setState(307); + setState(316); booleanExpression(0); } } @@ -2196,34 +2201,34 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(309); + setState(318); match(FROM); - setState(310); + setState(319); indexPattern(); - setState(315); + setState(324); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(311); + setState(320); match(COMMA); - setState(312); + setState(321); indexPattern(); } - } + } } - setState(317); + setState(326); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); } - setState(319); + setState(328); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { case 1: { - setState(318); + setState(327); metadata(); } break; @@ -2276,19 +2281,19 @@ public final IndexPatternContext indexPattern() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(324); + setState(333); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(321); + setState(330); clusterString(); - setState(322); + setState(331); match(COLON); } break; } - setState(326); + setState(335); indexString(); } } @@ -2332,7 +2337,7 @@ public final ClusterStringContext clusterString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(328); + setState(337); match(UNQUOTED_SOURCE); } } @@ -2378,7 +2383,7 @@ public final IndexStringContext indexString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(330); + setState(339); _la = _input.LA(1); if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { _errHandler.recoverInline(this); @@ -2433,20 +2438,20 @@ public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); enterRule(_localctx, 42, RULE_metadata); try { - setState(334); + setState(343); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(332); + setState(341); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(333); + setState(342); deprecated_metadata(); } break; @@ -2503,25 +2508,25 @@ public final MetadataOptionContext metadataOption() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(336); + setState(345); match(METADATA); - setState(337); + setState(346); match(UNQUOTED_SOURCE); - setState(342); + setState(351); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,26,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(338); + setState(347); match(COMMA); - setState(339); + setState(348); match(UNQUOTED_SOURCE); } - } + } } - setState(344); + setState(353); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,26,_ctx); } @@ -2570,11 +2575,11 @@ public final Deprecated_metadataContext deprecated_metadata() throws Recognition try { enterOuterAlt(_localctx, 1); { - setState(345); + setState(354); match(OPENING_BRACKET); - setState(346); + setState(355); metadataOption(); - setState(347); + setState(356); match(CLOSING_BRACKET); } } @@ -2638,46 +2643,46 @@ public final MetricsCommandContext metricsCommand() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(349); + setState(358); match(DEV_METRICS); - setState(350); + setState(359); indexPattern(); - setState(355); + setState(364); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(351); + setState(360); match(COMMA); - setState(352); + setState(361); indexPattern(); } - } + } } - setState(357); + setState(366); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } - setState(359); + setState(368); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { case 1: { - setState(358); + setState(367); ((MetricsCommandContext)_localctx).aggregates = aggFields(); } break; } - setState(363); + setState(372); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(361); + setState(370); match(BY); - setState(362); + setState(371); ((MetricsCommandContext)_localctx).grouping = fields(); } break; @@ -2727,9 +2732,9 @@ public final EvalCommandContext evalCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(365); + setState(374); match(EVAL); - setState(366); + setState(375); fields(); } } @@ -2782,26 +2787,26 @@ public final StatsCommandContext statsCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(368); + setState(377); match(STATS); - setState(370); + setState(379); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(369); + setState(378); ((StatsCommandContext)_localctx).stats = aggFields(); } break; } - setState(374); + setState(383); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(372); + setState(381); match(BY); - setState(373); + setState(382); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2858,23 +2863,23 @@ public final AggFieldsContext aggFields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(376); + setState(385); aggField(); - setState(381); + setState(390); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,32,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(377); + setState(386); match(COMMA); - setState(378); + setState(387); aggField(); } - } + } } - setState(383); + setState(392); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,32,_ctx); } @@ -2926,16 +2931,16 @@ public final AggFieldContext aggField() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(384); + setState(393); field(); - setState(387); + setState(396); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: { - setState(385); + setState(394); match(WHERE); - setState(386); + setState(395); booleanExpression(0); } break; @@ -2992,23 +2997,23 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(389); + setState(398); identifierOrParameter(); - setState(394); + setState(403); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(390); + setState(399); match(DOT); - setState(391); + setState(400); identifierOrParameter(); } - } + } } - setState(396); + setState(405); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } @@ -3064,23 +3069,23 @@ public final QualifiedNamePatternContext qualifiedNamePattern() throws Recogniti int _alt; enterOuterAlt(_localctx, 1); { - setState(397); + setState(406); identifierPattern(); - setState(402); + setState(411); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,35,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(398); + setState(407); match(DOT); - setState(399); + setState(408); identifierPattern(); } - } + } } - setState(404); + setState(413); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,35,_ctx); } @@ -3136,23 +3141,23 @@ public final QualifiedNamePatternsContext qualifiedNamePatterns() throws Recogni int _alt; enterOuterAlt(_localctx, 1); { - setState(405); + setState(414); qualifiedNamePattern(); - setState(410); + setState(419); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(406); + setState(415); match(COMMA); - setState(407); + setState(416); qualifiedNamePattern(); } - } + } } - setState(412); + setState(421); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); } @@ -3200,7 +3205,7 @@ public final IdentifierContext identifier() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(413); + setState(422); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -3253,22 +3258,22 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); enterRule(_localctx, 66, RULE_identifierPattern); try { - setState(418); + setState(427); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,37,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(415); + setState(424); match(ID_PATTERN); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(416); + setState(425); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(417); + setState(426); parameter(); } break; @@ -3292,7 +3297,7 @@ public ConstantContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_constant; } - + @SuppressWarnings("this-escape") public ConstantContext() { } public void copyFrom(ConstantContext ctx) { @@ -3541,14 +3546,14 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 68, RULE_constant); int _la; try { - setState(462); + setState(471); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(420); + setState(429); match(NULL); } break; @@ -3556,9 +3561,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(421); + setState(430); integerValue(); - setState(422); + setState(431); match(UNQUOTED_IDENTIFIER); } break; @@ -3566,7 +3571,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(424); + setState(433); decimalValue(); } break; @@ -3574,7 +3579,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(425); + setState(434); integerValue(); } break; @@ -3582,7 +3587,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(426); + setState(435); booleanValue(); } break; @@ -3590,7 +3595,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParameterContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(427); + setState(436); parameter(); } break; @@ -3598,7 +3603,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(428); + setState(437); string(); } break; @@ -3606,27 +3611,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(429); + setState(438); match(OPENING_BRACKET); - setState(430); + setState(439); numericValue(); - setState(435); + setState(444); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(431); + setState(440); match(COMMA); - setState(432); + setState(441); numericValue(); } } - setState(437); + setState(446); _errHandler.sync(this); _la = _input.LA(1); } - setState(438); + setState(447); match(CLOSING_BRACKET); } break; @@ -3634,27 +3639,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(440); + setState(449); match(OPENING_BRACKET); - setState(441); + setState(450); booleanValue(); - setState(446); + setState(455); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(442); + setState(451); match(COMMA); - setState(443); + setState(452); booleanValue(); } } - setState(448); + setState(457); _errHandler.sync(this); _la = _input.LA(1); } - setState(449); + setState(458); match(CLOSING_BRACKET); } break; @@ -3662,27 +3667,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(451); + setState(460); match(OPENING_BRACKET); - setState(452); + setState(461); string(); - setState(457); + setState(466); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(453); + setState(462); match(COMMA); - setState(454); + setState(463); string(); } } - setState(459); + setState(468); _errHandler.sync(this); _la = _input.LA(1); } - setState(460); + setState(469); match(CLOSING_BRACKET); } break; @@ -3706,7 +3711,7 @@ public ParameterContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_parameter; } - + @SuppressWarnings("this-escape") public ParameterContext() { } public void copyFrom(ParameterContext ctx) { @@ -3756,14 +3761,14 @@ public final ParameterContext parameter() throws RecognitionException { ParameterContext _localctx = new ParameterContext(_ctx, getState()); enterRule(_localctx, 70, RULE_parameter); try { - setState(466); + setState(475); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(464); + setState(473); match(PARAM); } break; @@ -3771,7 +3776,7 @@ public final ParameterContext parameter() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(465); + setState(474); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3822,22 +3827,22 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni IdentifierOrParameterContext _localctx = new IdentifierOrParameterContext(_ctx, getState()); enterRule(_localctx, 72, RULE_identifierOrParameter); try { - setState(471); + setState(480); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(468); + setState(477); identifier(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(469); + setState(478); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(470); + setState(479); parameter(); } break; @@ -3884,9 +3889,9 @@ public final LimitCommandContext limitCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(473); + setState(482); match(LIMIT); - setState(474); + setState(483); match(INTEGER_LITERAL); } } @@ -3941,25 +3946,25 @@ public final SortCommandContext sortCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(476); + setState(485); match(SORT); - setState(477); + setState(486); orderExpression(); - setState(482); + setState(491); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,44,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(478); + setState(487); match(COMMA); - setState(479); + setState(488); orderExpression(); } - } + } } - setState(484); + setState(493); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,44,_ctx); } @@ -4015,14 +4020,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(485); + setState(494); booleanExpression(0); - setState(487); + setState(496); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: { - setState(486); + setState(495); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -4036,14 +4041,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(491); + setState(500); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: { - setState(489); + setState(498); match(NULLS); - setState(490); + setState(499); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -4102,9 +4107,9 @@ public final KeepCommandContext keepCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(493); + setState(502); match(KEEP); - setState(494); + setState(503); qualifiedNamePatterns(); } } @@ -4151,9 +4156,9 @@ public final DropCommandContext dropCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(496); + setState(505); match(DROP); - setState(497); + setState(506); qualifiedNamePatterns(); } } @@ -4208,25 +4213,25 @@ public final RenameCommandContext renameCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(499); + setState(508); match(RENAME); - setState(500); + setState(509); renameClause(); - setState(505); + setState(514); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,47,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(501); + setState(510); match(COMMA); - setState(502); + setState(511); renameClause(); } - } + } } - setState(507); + setState(516); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,47,_ctx); } @@ -4280,11 +4285,11 @@ public final RenameClauseContext renameClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(508); + setState(517); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(509); + setState(518); match(AS); - setState(510); + setState(519); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4337,18 +4342,18 @@ public final DissectCommandContext dissectCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(512); + setState(521); match(DISSECT); - setState(513); + setState(522); primaryExpression(0); - setState(514); + setState(523); string(); - setState(516); + setState(525); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: { - setState(515); + setState(524); commandOptions(); } break; @@ -4401,11 +4406,11 @@ public final GrokCommandContext grokCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(518); + setState(527); match(GROK); - setState(519); + setState(528); primaryExpression(0); - setState(520); + setState(529); string(); } } @@ -4452,9 +4457,9 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(522); + setState(531); match(MV_EXPAND); - setState(523); + setState(532); qualifiedName(); } } @@ -4508,23 +4513,23 @@ public final CommandOptionsContext commandOptions() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(525); + setState(534); commandOption(); - setState(530); + setState(539); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(526); + setState(535); match(COMMA); - setState(527); + setState(536); commandOption(); } - } + } } - setState(532); + setState(541); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); } @@ -4576,11 +4581,11 @@ public final CommandOptionContext commandOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(533); + setState(542); identifier(); - setState(534); + setState(543); match(ASSIGN); - setState(535); + setState(544); constant(); } } @@ -4626,7 +4631,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(537); + setState(546); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4681,20 +4686,20 @@ public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); enterRule(_localctx, 100, RULE_numericValue); try { - setState(541); + setState(550); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(539); + setState(548); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(540); + setState(549); integerValue(); } break; @@ -4743,12 +4748,12 @@ public final DecimalValueContext decimalValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(544); + setState(553); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(543); + setState(552); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4761,7 +4766,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(546); + setState(555); match(DECIMAL_LITERAL); } } @@ -4808,12 +4813,12 @@ public final IntegerValueContext integerValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(549); + setState(558); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(548); + setState(557); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4826,7 +4831,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(551); + setState(560); match(INTEGER_LITERAL); } } @@ -4870,7 +4875,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(553); + setState(562); match(QUOTED_STRING); } } @@ -4920,9 +4925,9 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(555); + setState(564); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 562949953421312000L) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -432345564227567616L) != 0)) ) { _errHandler.recoverInline(this); } else { @@ -4975,9 +4980,9 @@ public final ExplainCommandContext explainCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(557); + setState(566); match(EXPLAIN); - setState(558); + setState(567); subqueryExpression(); } } @@ -5025,11 +5030,11 @@ public final SubqueryExpressionContext subqueryExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(560); + setState(569); match(OPENING_BRACKET); - setState(561); + setState(570); query(0); - setState(562); + setState(571); match(CLOSING_BRACKET); } } @@ -5051,7 +5056,7 @@ public ShowCommandContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_showCommand; } - + @SuppressWarnings("this-escape") public ShowCommandContext() { } public void copyFrom(ShowCommandContext ctx) { @@ -5086,9 +5091,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(564); + setState(573); match(SHOW); - setState(565); + setState(574); match(INFO); } } @@ -5151,46 +5156,46 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(567); + setState(576); match(ENRICH); - setState(568); + setState(577); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(571); + setState(580); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,53,_ctx) ) { case 1: { - setState(569); + setState(578); match(ON); - setState(570); + setState(579); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(582); + setState(591); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) { case 1: { - setState(573); + setState(582); match(WITH); - setState(574); + setState(583); enrichWithClause(); - setState(579); + setState(588); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,54,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(575); + setState(584); match(COMMA); - setState(576); + setState(585); enrichWithClause(); } - } + } } - setState(581); + setState(590); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,54,_ctx); } @@ -5247,19 +5252,19 @@ public final EnrichWithClauseContext enrichWithClause() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(587); + setState(596); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) { case 1: { - setState(584); + setState(593); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(585); + setState(594); match(ASSIGN); } break; } - setState(589); + setState(598); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5312,13 +5317,13 @@ public final LookupCommandContext lookupCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(591); + setState(600); match(DEV_LOOKUP); - setState(592); + setState(601); ((LookupCommandContext)_localctx).tableName = indexPattern(); - setState(593); + setState(602); match(ON); - setState(594); + setState(603); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5371,18 +5376,18 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(596); + setState(605); match(DEV_INLINESTATS); - setState(597); + setState(606); ((InlinestatsCommandContext)_localctx).stats = aggFields(); - setState(600); + setState(609); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) { case 1: { - setState(598); + setState(607); match(BY); - setState(599); + setState(608); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -5400,6 +5405,270 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class JoinCommandContext extends ParserRuleContext { + public Token type; + public TerminalNode DEV_JOIN() { return getToken(EsqlBaseParser.DEV_JOIN, 0); } + public JoinTargetContext joinTarget() { + return getRuleContext(JoinTargetContext.class,0); + } + public JoinConditionContext joinCondition() { + return getRuleContext(JoinConditionContext.class,0); + } + public TerminalNode DEV_JOIN_LOOKUP() { return getToken(EsqlBaseParser.DEV_JOIN_LOOKUP, 0); } + public TerminalNode DEV_JOIN_LEFT() { return getToken(EsqlBaseParser.DEV_JOIN_LEFT, 0); } + public TerminalNode DEV_JOIN_RIGHT() { return getToken(EsqlBaseParser.DEV_JOIN_RIGHT, 0); } + @SuppressWarnings("this-escape") + public JoinCommandContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_joinCommand; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterJoinCommand(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitJoinCommand(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitJoinCommand(this); + else return visitor.visitChildren(this); + } + } + + public final JoinCommandContext joinCommand() throws RecognitionException { + JoinCommandContext _localctx = new JoinCommandContext(_ctx, getState()); + enterRule(_localctx, 124, RULE_joinCommand); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(612); + _errHandler.sync(this); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) { + { + setState(611); + ((JoinCommandContext)_localctx).type = _input.LT(1); + _la = _input.LA(1); + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) ) { + ((JoinCommandContext)_localctx).type = (Token)_errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } + } + } + + setState(614); + match(DEV_JOIN); + setState(615); + joinTarget(); + setState(616); + joinCondition(); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class JoinTargetContext extends ParserRuleContext { + public IdentifierContext index; + public IdentifierContext alias; + public List identifier() { + return getRuleContexts(IdentifierContext.class); + } + public IdentifierContext identifier(int i) { + return getRuleContext(IdentifierContext.class,i); + } + public TerminalNode AS() { return getToken(EsqlBaseParser.AS, 0); } + @SuppressWarnings("this-escape") + public JoinTargetContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_joinTarget; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterJoinTarget(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitJoinTarget(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitJoinTarget(this); + else return visitor.visitChildren(this); + } + } + + public final JoinTargetContext joinTarget() throws RecognitionException { + JoinTargetContext _localctx = new JoinTargetContext(_ctx, getState()); + enterRule(_localctx, 126, RULE_joinTarget); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(618); + ((JoinTargetContext)_localctx).index = identifier(); + setState(621); + _errHandler.sync(this); + _la = _input.LA(1); + if (_la==AS) { + { + setState(619); + match(AS); + setState(620); + ((JoinTargetContext)_localctx).alias = identifier(); + } + } + + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class JoinConditionContext extends ParserRuleContext { + public TerminalNode ON() { return getToken(EsqlBaseParser.ON, 0); } + public List joinPredicate() { + return getRuleContexts(JoinPredicateContext.class); + } + public JoinPredicateContext joinPredicate(int i) { + return getRuleContext(JoinPredicateContext.class,i); + } + public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } + public TerminalNode COMMA(int i) { + return getToken(EsqlBaseParser.COMMA, i); + } + @SuppressWarnings("this-escape") + public JoinConditionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_joinCondition; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterJoinCondition(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitJoinCondition(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitJoinCondition(this); + else return visitor.visitChildren(this); + } + } + + public final JoinConditionContext joinCondition() throws RecognitionException { + JoinConditionContext _localctx = new JoinConditionContext(_ctx, getState()); + enterRule(_localctx, 128, RULE_joinCondition); + try { + int _alt; + enterOuterAlt(_localctx, 1); + { + setState(623); + match(ON); + setState(624); + joinPredicate(); + setState(629); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,60,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + { + { + setState(625); + match(COMMA); + setState(626); + joinPredicate(); + } + } + } + setState(631); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,60,_ctx); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class JoinPredicateContext extends ParserRuleContext { + public ValueExpressionContext valueExpression() { + return getRuleContext(ValueExpressionContext.class,0); + } + @SuppressWarnings("this-escape") + public JoinPredicateContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_joinPredicate; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterJoinPredicate(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitJoinPredicate(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitJoinPredicate(this); + else return visitor.visitChildren(this); + } + } + + public final JoinPredicateContext joinPredicate() throws RecognitionException { + JoinPredicateContext _localctx = new JoinPredicateContext(_ctx, getState()); + enterRule(_localctx, 130, RULE_joinPredicate); + try { + enterOuterAlt(_localctx, 1); + { + setState(632); + valueExpression(); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { case 1: @@ -5441,13 +5710,13 @@ private boolean processingCommand_sempred(ProcessingCommandContext _localctx, in return this.isDevVersion(); case 3: return this.isDevVersion(); + case 4: + return this.isDevVersion(); } return true; } private boolean booleanExpression_sempred(BooleanExpressionContext _localctx, int predIndex) { switch (predIndex) { - case 4: - return this.isDevVersion(); case 5: return precpred(_ctx, 5); case 6: @@ -5487,387 +5756,406 @@ private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _loca } public static final String _serializedATN = - "\u0004\u0001w\u025b\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ - "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ - "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ - "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ - "\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007\u000f"+ - "\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007\u0012"+ - "\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007\u0015"+ - "\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007\u0018"+ - "\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007\u001b"+ - "\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007\u001e"+ - "\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002"+ - "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ - "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ - "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ - "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u0002"+ - "7\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002"+ - "<\u0007<\u0002=\u0007=\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001"+ - "\u0086\b\u0001\n\u0001\f\u0001\u0089\t\u0001\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0091\b\u0002\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u00a3\b\u0003\u0001"+ - "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00af\b\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00b6"+ - "\b\u0005\n\u0005\f\u0005\u00b9\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0003\u0005\u00c0\b\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0003\u0005\u00c6\b\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00ce\b\u0005"+ - "\n\u0005\f\u0005\u00d1\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006\u00d5"+ - "\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003"+ - "\u0006\u00dc\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00e1"+ - "\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001"+ - "\b\u0001\b\u0001\b\u0001\b\u0003\b\u00ec\b\b\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0003\t\u00f2\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0005"+ - "\t\u00fa\b\t\n\t\f\t\u00fd\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n"+ - "\u0001\n\u0001\n\u0001\n\u0003\n\u0107\b\n\u0001\n\u0001\n\u0001\n\u0005"+ - "\n\u010c\b\n\n\n\f\n\u010f\t\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ - "\u000b\u0001\u000b\u0001\u000b\u0005\u000b\u0117\b\u000b\n\u000b\f\u000b"+ - "\u011a\t\u000b\u0003\u000b\u011c\b\u000b\u0001\u000b\u0001\u000b\u0001"+ - "\f\u0001\f\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0005\u000f\u012a\b\u000f\n\u000f\f\u000f\u012d"+ - "\t\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0003\u0010\u0132\b\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0005\u0011\u013a\b\u0011\n\u0011\f\u0011\u013d\t\u0011\u0001\u0011\u0003"+ - "\u0011\u0140\b\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012\u0145"+ - "\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0014\u0001"+ - "\u0014\u0001\u0015\u0001\u0015\u0003\u0015\u014f\b\u0015\u0001\u0016\u0001"+ - "\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u0155\b\u0016\n\u0016\f\u0016"+ - "\u0158\t\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018"+ - "\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u0162\b\u0018\n\u0018"+ - "\f\u0018\u0165\t\u0018\u0001\u0018\u0003\u0018\u0168\b\u0018\u0001\u0018"+ - "\u0001\u0018\u0003\u0018\u016c\b\u0018\u0001\u0019\u0001\u0019\u0001\u0019"+ - "\u0001\u001a\u0001\u001a\u0003\u001a\u0173\b\u001a\u0001\u001a\u0001\u001a"+ - "\u0003\u001a\u0177\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005\u001b"+ - "\u017c\b\u001b\n\u001b\f\u001b\u017f\t\u001b\u0001\u001c\u0001\u001c\u0001"+ - "\u001c\u0003\u001c\u0184\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0005"+ - "\u001d\u0189\b\u001d\n\u001d\f\u001d\u018c\t\u001d\u0001\u001e\u0001\u001e"+ - "\u0001\u001e\u0005\u001e\u0191\b\u001e\n\u001e\f\u001e\u0194\t\u001e\u0001"+ - "\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u0199\b\u001f\n\u001f\f\u001f"+ - "\u019c\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001!\u0003!\u01a3\b!\u0001"+ - "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ - "\"\u0001\"\u0001\"\u0001\"\u0005\"\u01b2\b\"\n\"\f\"\u01b5\t\"\u0001\""+ - "\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01bd\b\"\n\"\f\"\u01c0"+ - "\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01c8\b\""+ - "\n\"\f\"\u01cb\t\"\u0001\"\u0001\"\u0003\"\u01cf\b\"\u0001#\u0001#\u0003"+ - "#\u01d3\b#\u0001$\u0001$\u0001$\u0003$\u01d8\b$\u0001%\u0001%\u0001%\u0001"+ - "&\u0001&\u0001&\u0001&\u0005&\u01e1\b&\n&\f&\u01e4\t&\u0001\'\u0001\'"+ - "\u0003\'\u01e8\b\'\u0001\'\u0001\'\u0003\'\u01ec\b\'\u0001(\u0001(\u0001"+ - "(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0005*\u01f8\b*\n*"+ - "\f*\u01fb\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0003"+ - ",\u0205\b,\u0001-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001"+ - "/\u0001/\u0005/\u0211\b/\n/\f/\u0214\t/\u00010\u00010\u00010\u00010\u0001"+ - "1\u00011\u00012\u00012\u00032\u021e\b2\u00013\u00033\u0221\b3\u00013\u0001"+ - "3\u00014\u00034\u0226\b4\u00014\u00014\u00015\u00015\u00016\u00016\u0001"+ - "7\u00017\u00017\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u0001"+ - ":\u0001:\u0001:\u0001:\u0003:\u023c\b:\u0001:\u0001:\u0001:\u0001:\u0005"+ - ":\u0242\b:\n:\f:\u0245\t:\u0003:\u0247\b:\u0001;\u0001;\u0001;\u0003;"+ - "\u024c\b;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001"+ - "=\u0001=\u0001=\u0003=\u0259\b=\u0001=\u0000\u0004\u0002\n\u0012\u0014"+ - ">\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a"+ - "\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz\u0000\b\u0001"+ - "\u0000;<\u0001\u0000=?\u0002\u0000\u001a\u001aLL\u0001\u0000CD\u0002\u0000"+ - "\u001f\u001f##\u0002\u0000&&))\u0002\u0000%%33\u0002\u0000446:\u0274\u0000"+ - "|\u0001\u0000\u0000\u0000\u0002\u007f\u0001\u0000\u0000\u0000\u0004\u0090"+ - "\u0001\u0000\u0000\u0000\u0006\u00a2\u0001\u0000\u0000\u0000\b\u00a4\u0001"+ - "\u0000\u0000\u0000\n\u00c5\u0001\u0000\u0000\u0000\f\u00e0\u0001\u0000"+ - "\u0000\u0000\u000e\u00e2\u0001\u0000\u0000\u0000\u0010\u00eb\u0001\u0000"+ - "\u0000\u0000\u0012\u00f1\u0001\u0000\u0000\u0000\u0014\u0106\u0001\u0000"+ - "\u0000\u0000\u0016\u0110\u0001\u0000\u0000\u0000\u0018\u011f\u0001\u0000"+ - "\u0000\u0000\u001a\u0121\u0001\u0000\u0000\u0000\u001c\u0123\u0001\u0000"+ - "\u0000\u0000\u001e\u0126\u0001\u0000\u0000\u0000 \u0131\u0001\u0000\u0000"+ - "\u0000\"\u0135\u0001\u0000\u0000\u0000$\u0144\u0001\u0000\u0000\u0000"+ - "&\u0148\u0001\u0000\u0000\u0000(\u014a\u0001\u0000\u0000\u0000*\u014e"+ - "\u0001\u0000\u0000\u0000,\u0150\u0001\u0000\u0000\u0000.\u0159\u0001\u0000"+ - "\u0000\u00000\u015d\u0001\u0000\u0000\u00002\u016d\u0001\u0000\u0000\u0000"+ - "4\u0170\u0001\u0000\u0000\u00006\u0178\u0001\u0000\u0000\u00008\u0180"+ - "\u0001\u0000\u0000\u0000:\u0185\u0001\u0000\u0000\u0000<\u018d\u0001\u0000"+ - "\u0000\u0000>\u0195\u0001\u0000\u0000\u0000@\u019d\u0001\u0000\u0000\u0000"+ - "B\u01a2\u0001\u0000\u0000\u0000D\u01ce\u0001\u0000\u0000\u0000F\u01d2"+ - "\u0001\u0000\u0000\u0000H\u01d7\u0001\u0000\u0000\u0000J\u01d9\u0001\u0000"+ - "\u0000\u0000L\u01dc\u0001\u0000\u0000\u0000N\u01e5\u0001\u0000\u0000\u0000"+ - "P\u01ed\u0001\u0000\u0000\u0000R\u01f0\u0001\u0000\u0000\u0000T\u01f3"+ - "\u0001\u0000\u0000\u0000V\u01fc\u0001\u0000\u0000\u0000X\u0200\u0001\u0000"+ - "\u0000\u0000Z\u0206\u0001\u0000\u0000\u0000\\\u020a\u0001\u0000\u0000"+ - "\u0000^\u020d\u0001\u0000\u0000\u0000`\u0215\u0001\u0000\u0000\u0000b"+ - "\u0219\u0001\u0000\u0000\u0000d\u021d\u0001\u0000\u0000\u0000f\u0220\u0001"+ - "\u0000\u0000\u0000h\u0225\u0001\u0000\u0000\u0000j\u0229\u0001\u0000\u0000"+ - "\u0000l\u022b\u0001\u0000\u0000\u0000n\u022d\u0001\u0000\u0000\u0000p"+ - "\u0230\u0001\u0000\u0000\u0000r\u0234\u0001\u0000\u0000\u0000t\u0237\u0001"+ - "\u0000\u0000\u0000v\u024b\u0001\u0000\u0000\u0000x\u024f\u0001\u0000\u0000"+ - "\u0000z\u0254\u0001\u0000\u0000\u0000|}\u0003\u0002\u0001\u0000}~\u0005"+ - "\u0000\u0000\u0001~\u0001\u0001\u0000\u0000\u0000\u007f\u0080\u0006\u0001"+ - "\uffff\uffff\u0000\u0080\u0081\u0003\u0004\u0002\u0000\u0081\u0087\u0001"+ - "\u0000\u0000\u0000\u0082\u0083\n\u0001\u0000\u0000\u0083\u0084\u0005\u0019"+ - "\u0000\u0000\u0084\u0086\u0003\u0006\u0003\u0000\u0085\u0082\u0001\u0000"+ - "\u0000\u0000\u0086\u0089\u0001\u0000\u0000\u0000\u0087\u0085\u0001\u0000"+ - "\u0000\u0000\u0087\u0088\u0001\u0000\u0000\u0000\u0088\u0003\u0001\u0000"+ - "\u0000\u0000\u0089\u0087\u0001\u0000\u0000\u0000\u008a\u0091\u0003n7\u0000"+ - "\u008b\u0091\u0003\"\u0011\u0000\u008c\u0091\u0003\u001c\u000e\u0000\u008d"+ - "\u0091\u0003r9\u0000\u008e\u008f\u0004\u0002\u0001\u0000\u008f\u0091\u0003"+ - "0\u0018\u0000\u0090\u008a\u0001\u0000\u0000\u0000\u0090\u008b\u0001\u0000"+ - "\u0000\u0000\u0090\u008c\u0001\u0000\u0000\u0000\u0090\u008d\u0001\u0000"+ - "\u0000\u0000\u0090\u008e\u0001\u0000\u0000\u0000\u0091\u0005\u0001\u0000"+ - "\u0000\u0000\u0092\u00a3\u00032\u0019\u0000\u0093\u00a3\u0003\b\u0004"+ - "\u0000\u0094\u00a3\u0003P(\u0000\u0095\u00a3\u0003J%\u0000\u0096\u00a3"+ - "\u00034\u001a\u0000\u0097\u00a3\u0003L&\u0000\u0098\u00a3\u0003R)\u0000"+ - "\u0099\u00a3\u0003T*\u0000\u009a\u00a3\u0003X,\u0000\u009b\u00a3\u0003"+ - "Z-\u0000\u009c\u00a3\u0003t:\u0000\u009d\u00a3\u0003\\.\u0000\u009e\u009f"+ - "\u0004\u0003\u0002\u0000\u009f\u00a3\u0003z=\u0000\u00a0\u00a1\u0004\u0003"+ - "\u0003\u0000\u00a1\u00a3\u0003x<\u0000\u00a2\u0092\u0001\u0000\u0000\u0000"+ - "\u00a2\u0093\u0001\u0000\u0000\u0000\u00a2\u0094\u0001\u0000\u0000\u0000"+ - "\u00a2\u0095\u0001\u0000\u0000\u0000\u00a2\u0096\u0001\u0000\u0000\u0000"+ - "\u00a2\u0097\u0001\u0000\u0000\u0000\u00a2\u0098\u0001\u0000\u0000\u0000"+ - "\u00a2\u0099\u0001\u0000\u0000\u0000\u00a2\u009a\u0001\u0000\u0000\u0000"+ - "\u00a2\u009b\u0001\u0000\u0000\u0000\u00a2\u009c\u0001\u0000\u0000\u0000"+ - "\u00a2\u009d\u0001\u0000\u0000\u0000\u00a2\u009e\u0001\u0000\u0000\u0000"+ - "\u00a2\u00a0\u0001\u0000\u0000\u0000\u00a3\u0007\u0001\u0000\u0000\u0000"+ - "\u00a4\u00a5\u0005\u0010\u0000\u0000\u00a5\u00a6\u0003\n\u0005\u0000\u00a6"+ - "\t\u0001\u0000\u0000\u0000\u00a7\u00a8\u0006\u0005\uffff\uffff\u0000\u00a8"+ - "\u00a9\u0005,\u0000\u0000\u00a9\u00c6\u0003\n\u0005\b\u00aa\u00c6\u0003"+ - "\u0010\b\u0000\u00ab\u00c6\u0003\f\u0006\u0000\u00ac\u00ae\u0003\u0010"+ - "\b\u0000\u00ad\u00af\u0005,\u0000\u0000\u00ae\u00ad\u0001\u0000\u0000"+ - "\u0000\u00ae\u00af\u0001\u0000\u0000\u0000\u00af\u00b0\u0001\u0000\u0000"+ - "\u0000\u00b0\u00b1\u0005\'\u0000\u0000\u00b1\u00b2\u0005+\u0000\u0000"+ - "\u00b2\u00b7\u0003\u0010\b\u0000\u00b3\u00b4\u0005\"\u0000\u0000\u00b4"+ - "\u00b6\u0003\u0010\b\u0000\u00b5\u00b3\u0001\u0000\u0000\u0000\u00b6\u00b9"+ - "\u0001\u0000\u0000\u0000\u00b7\u00b5\u0001\u0000\u0000\u0000\u00b7\u00b8"+ - "\u0001\u0000\u0000\u0000\u00b8\u00ba\u0001\u0000\u0000\u0000\u00b9\u00b7"+ - "\u0001\u0000\u0000\u0000\u00ba\u00bb\u00052\u0000\u0000\u00bb\u00c6\u0001"+ - "\u0000\u0000\u0000\u00bc\u00bd\u0003\u0010\b\u0000\u00bd\u00bf\u0005("+ - "\u0000\u0000\u00be\u00c0\u0005,\u0000\u0000\u00bf\u00be\u0001\u0000\u0000"+ - "\u0000\u00bf\u00c0\u0001\u0000\u0000\u0000\u00c0\u00c1\u0001\u0000\u0000"+ - "\u0000\u00c1\u00c2\u0005-\u0000\u0000\u00c2\u00c6\u0001\u0000\u0000\u0000"+ - "\u00c3\u00c4\u0004\u0005\u0004\u0000\u00c4\u00c6\u0003\u000e\u0007\u0000"+ - "\u00c5\u00a7\u0001\u0000\u0000\u0000\u00c5\u00aa\u0001\u0000\u0000\u0000"+ - "\u00c5\u00ab\u0001\u0000\u0000\u0000\u00c5\u00ac\u0001\u0000\u0000\u0000"+ - "\u00c5\u00bc\u0001\u0000\u0000\u0000\u00c5\u00c3\u0001\u0000\u0000\u0000"+ - "\u00c6\u00cf\u0001\u0000\u0000\u0000\u00c7\u00c8\n\u0005\u0000\u0000\u00c8"+ - "\u00c9\u0005\u001e\u0000\u0000\u00c9\u00ce\u0003\n\u0005\u0006\u00ca\u00cb"+ - "\n\u0004\u0000\u0000\u00cb\u00cc\u0005/\u0000\u0000\u00cc\u00ce\u0003"+ - "\n\u0005\u0005\u00cd\u00c7\u0001\u0000\u0000\u0000\u00cd\u00ca\u0001\u0000"+ - "\u0000\u0000\u00ce\u00d1\u0001\u0000\u0000\u0000\u00cf\u00cd\u0001\u0000"+ - "\u0000\u0000\u00cf\u00d0\u0001\u0000\u0000\u0000\u00d0\u000b\u0001\u0000"+ - "\u0000\u0000\u00d1\u00cf\u0001\u0000\u0000\u0000\u00d2\u00d4\u0003\u0010"+ - "\b\u0000\u00d3\u00d5\u0005,\u0000\u0000\u00d4\u00d3\u0001\u0000\u0000"+ - "\u0000\u00d4\u00d5\u0001\u0000\u0000\u0000\u00d5\u00d6\u0001\u0000\u0000"+ - "\u0000\u00d6\u00d7\u0005*\u0000\u0000\u00d7\u00d8\u0003j5\u0000\u00d8"+ - "\u00e1\u0001\u0000\u0000\u0000\u00d9\u00db\u0003\u0010\b\u0000\u00da\u00dc"+ - "\u0005,\u0000\u0000\u00db\u00da\u0001\u0000\u0000\u0000\u00db\u00dc\u0001"+ - "\u0000\u0000\u0000\u00dc\u00dd\u0001\u0000\u0000\u0000\u00dd\u00de\u0005"+ - "1\u0000\u0000\u00de\u00df\u0003j5\u0000\u00df\u00e1\u0001\u0000\u0000"+ - "\u0000\u00e0\u00d2\u0001\u0000\u0000\u0000\u00e0\u00d9\u0001\u0000\u0000"+ - "\u0000\u00e1\r\u0001\u0000\u0000\u0000\u00e2\u00e3\u0003:\u001d\u0000"+ - "\u00e3\u00e4\u0005\u0018\u0000\u0000\u00e4\u00e5\u0003D\"\u0000\u00e5"+ - "\u000f\u0001\u0000\u0000\u0000\u00e6\u00ec\u0003\u0012\t\u0000\u00e7\u00e8"+ - "\u0003\u0012\t\u0000\u00e8\u00e9\u0003l6\u0000\u00e9\u00ea\u0003\u0012"+ - "\t\u0000\u00ea\u00ec\u0001\u0000\u0000\u0000\u00eb\u00e6\u0001\u0000\u0000"+ - "\u0000\u00eb\u00e7\u0001\u0000\u0000\u0000\u00ec\u0011\u0001\u0000\u0000"+ - "\u0000\u00ed\u00ee\u0006\t\uffff\uffff\u0000\u00ee\u00f2\u0003\u0014\n"+ - "\u0000\u00ef\u00f0\u0007\u0000\u0000\u0000\u00f0\u00f2\u0003\u0012\t\u0003"+ - "\u00f1\u00ed\u0001\u0000\u0000\u0000\u00f1\u00ef\u0001\u0000\u0000\u0000"+ - "\u00f2\u00fb\u0001\u0000\u0000\u0000\u00f3\u00f4\n\u0002\u0000\u0000\u00f4"+ - "\u00f5\u0007\u0001\u0000\u0000\u00f5\u00fa\u0003\u0012\t\u0003\u00f6\u00f7"+ - "\n\u0001\u0000\u0000\u00f7\u00f8\u0007\u0000\u0000\u0000\u00f8\u00fa\u0003"+ - "\u0012\t\u0002\u00f9\u00f3\u0001\u0000\u0000\u0000\u00f9\u00f6\u0001\u0000"+ - "\u0000\u0000\u00fa\u00fd\u0001\u0000\u0000\u0000\u00fb\u00f9\u0001\u0000"+ - "\u0000\u0000\u00fb\u00fc\u0001\u0000\u0000\u0000\u00fc\u0013\u0001\u0000"+ - "\u0000\u0000\u00fd\u00fb\u0001\u0000\u0000\u0000\u00fe\u00ff\u0006\n\uffff"+ - "\uffff\u0000\u00ff\u0107\u0003D\"\u0000\u0100\u0107\u0003:\u001d\u0000"+ - "\u0101\u0107\u0003\u0016\u000b\u0000\u0102\u0103\u0005+\u0000\u0000\u0103"+ - "\u0104\u0003\n\u0005\u0000\u0104\u0105\u00052\u0000\u0000\u0105\u0107"+ - "\u0001\u0000\u0000\u0000\u0106\u00fe\u0001\u0000\u0000\u0000\u0106\u0100"+ - "\u0001\u0000\u0000\u0000\u0106\u0101\u0001\u0000\u0000\u0000\u0106\u0102"+ - "\u0001\u0000\u0000\u0000\u0107\u010d\u0001\u0000\u0000\u0000\u0108\u0109"+ - "\n\u0001\u0000\u0000\u0109\u010a\u0005!\u0000\u0000\u010a\u010c\u0003"+ - "\u001a\r\u0000\u010b\u0108\u0001\u0000\u0000\u0000\u010c\u010f\u0001\u0000"+ - "\u0000\u0000\u010d\u010b\u0001\u0000\u0000\u0000\u010d\u010e\u0001\u0000"+ - "\u0000\u0000\u010e\u0015\u0001\u0000\u0000\u0000\u010f\u010d\u0001\u0000"+ - "\u0000\u0000\u0110\u0111\u0003\u0018\f\u0000\u0111\u011b\u0005+\u0000"+ - "\u0000\u0112\u011c\u0005=\u0000\u0000\u0113\u0118\u0003\n\u0005\u0000"+ - "\u0114\u0115\u0005\"\u0000\u0000\u0115\u0117\u0003\n\u0005\u0000\u0116"+ - "\u0114\u0001\u0000\u0000\u0000\u0117\u011a\u0001\u0000\u0000\u0000\u0118"+ - "\u0116\u0001\u0000\u0000\u0000\u0118\u0119\u0001\u0000\u0000\u0000\u0119"+ - "\u011c\u0001\u0000\u0000\u0000\u011a\u0118\u0001\u0000\u0000\u0000\u011b"+ - "\u0112\u0001\u0000\u0000\u0000\u011b\u0113\u0001\u0000\u0000\u0000\u011b"+ - "\u011c\u0001\u0000\u0000\u0000\u011c\u011d\u0001\u0000\u0000\u0000\u011d"+ - "\u011e\u00052\u0000\u0000\u011e\u0017\u0001\u0000\u0000\u0000\u011f\u0120"+ - "\u0003H$\u0000\u0120\u0019\u0001\u0000\u0000\u0000\u0121\u0122\u0003@"+ - " \u0000\u0122\u001b\u0001\u0000\u0000\u0000\u0123\u0124\u0005\f\u0000"+ - "\u0000\u0124\u0125\u0003\u001e\u000f\u0000\u0125\u001d\u0001\u0000\u0000"+ - "\u0000\u0126\u012b\u0003 \u0010\u0000\u0127\u0128\u0005\"\u0000\u0000"+ - "\u0128\u012a\u0003 \u0010\u0000\u0129\u0127\u0001\u0000\u0000\u0000\u012a"+ - "\u012d\u0001\u0000\u0000\u0000\u012b\u0129\u0001\u0000\u0000\u0000\u012b"+ - "\u012c\u0001\u0000\u0000\u0000\u012c\u001f\u0001\u0000\u0000\u0000\u012d"+ - "\u012b\u0001\u0000\u0000\u0000\u012e\u012f\u0003:\u001d\u0000\u012f\u0130"+ - "\u0005 \u0000\u0000\u0130\u0132\u0001\u0000\u0000\u0000\u0131\u012e\u0001"+ - "\u0000\u0000\u0000\u0131\u0132\u0001\u0000\u0000\u0000\u0132\u0133\u0001"+ - "\u0000\u0000\u0000\u0133\u0134\u0003\n\u0005\u0000\u0134!\u0001\u0000"+ - "\u0000\u0000\u0135\u0136\u0005\u0006\u0000\u0000\u0136\u013b\u0003$\u0012"+ - "\u0000\u0137\u0138\u0005\"\u0000\u0000\u0138\u013a\u0003$\u0012\u0000"+ - "\u0139\u0137\u0001\u0000\u0000\u0000\u013a\u013d\u0001\u0000\u0000\u0000"+ - "\u013b\u0139\u0001\u0000\u0000\u0000\u013b\u013c\u0001\u0000\u0000\u0000"+ - "\u013c\u013f\u0001\u0000\u0000\u0000\u013d\u013b\u0001\u0000\u0000\u0000"+ - "\u013e\u0140\u0003*\u0015\u0000\u013f\u013e\u0001\u0000\u0000\u0000\u013f"+ - "\u0140\u0001\u0000\u0000\u0000\u0140#\u0001\u0000\u0000\u0000\u0141\u0142"+ - "\u0003&\u0013\u0000\u0142\u0143\u0005\u0018\u0000\u0000\u0143\u0145\u0001"+ - "\u0000\u0000\u0000\u0144\u0141\u0001\u0000\u0000\u0000\u0144\u0145\u0001"+ - "\u0000\u0000\u0000\u0145\u0146\u0001\u0000\u0000\u0000\u0146\u0147\u0003"+ - "(\u0014\u0000\u0147%\u0001\u0000\u0000\u0000\u0148\u0149\u0005L\u0000"+ - "\u0000\u0149\'\u0001\u0000\u0000\u0000\u014a\u014b\u0007\u0002\u0000\u0000"+ - "\u014b)\u0001\u0000\u0000\u0000\u014c\u014f\u0003,\u0016\u0000\u014d\u014f"+ - "\u0003.\u0017\u0000\u014e\u014c\u0001\u0000\u0000\u0000\u014e\u014d\u0001"+ - "\u0000\u0000\u0000\u014f+\u0001\u0000\u0000\u0000\u0150\u0151\u0005K\u0000"+ - "\u0000\u0151\u0156\u0005L\u0000\u0000\u0152\u0153\u0005\"\u0000\u0000"+ - "\u0153\u0155\u0005L\u0000\u0000\u0154\u0152\u0001\u0000\u0000\u0000\u0155"+ - "\u0158\u0001\u0000\u0000\u0000\u0156\u0154\u0001\u0000\u0000\u0000\u0156"+ - "\u0157\u0001\u0000\u0000\u0000\u0157-\u0001\u0000\u0000\u0000\u0158\u0156"+ - "\u0001\u0000\u0000\u0000\u0159\u015a\u0005A\u0000\u0000\u015a\u015b\u0003"+ - ",\u0016\u0000\u015b\u015c\u0005B\u0000\u0000\u015c/\u0001\u0000\u0000"+ - "\u0000\u015d\u015e\u0005\u0013\u0000\u0000\u015e\u0163\u0003$\u0012\u0000"+ - "\u015f\u0160\u0005\"\u0000\u0000\u0160\u0162\u0003$\u0012\u0000\u0161"+ - "\u015f\u0001\u0000\u0000\u0000\u0162\u0165\u0001\u0000\u0000\u0000\u0163"+ - "\u0161\u0001\u0000\u0000\u0000\u0163\u0164\u0001\u0000\u0000\u0000\u0164"+ - "\u0167\u0001\u0000\u0000\u0000\u0165\u0163\u0001\u0000\u0000\u0000\u0166"+ - "\u0168\u00036\u001b\u0000\u0167\u0166\u0001\u0000\u0000\u0000\u0167\u0168"+ - "\u0001\u0000\u0000\u0000\u0168\u016b\u0001\u0000\u0000\u0000\u0169\u016a"+ - "\u0005\u001d\u0000\u0000\u016a\u016c\u0003\u001e\u000f\u0000\u016b\u0169"+ - "\u0001\u0000\u0000\u0000\u016b\u016c\u0001\u0000\u0000\u0000\u016c1\u0001"+ - "\u0000\u0000\u0000\u016d\u016e\u0005\u0004\u0000\u0000\u016e\u016f\u0003"+ - "\u001e\u000f\u0000\u016f3\u0001\u0000\u0000\u0000\u0170\u0172\u0005\u000f"+ - "\u0000\u0000\u0171\u0173\u00036\u001b\u0000\u0172\u0171\u0001\u0000\u0000"+ - "\u0000\u0172\u0173\u0001\u0000\u0000\u0000\u0173\u0176\u0001\u0000\u0000"+ - "\u0000\u0174\u0175\u0005\u001d\u0000\u0000\u0175\u0177\u0003\u001e\u000f"+ - "\u0000\u0176\u0174\u0001\u0000\u0000\u0000\u0176\u0177\u0001\u0000\u0000"+ - "\u0000\u01775\u0001\u0000\u0000\u0000\u0178\u017d\u00038\u001c\u0000\u0179"+ - "\u017a\u0005\"\u0000\u0000\u017a\u017c\u00038\u001c\u0000\u017b\u0179"+ - "\u0001\u0000\u0000\u0000\u017c\u017f\u0001\u0000\u0000\u0000\u017d\u017b"+ - "\u0001\u0000\u0000\u0000\u017d\u017e\u0001\u0000\u0000\u0000\u017e7\u0001"+ - "\u0000\u0000\u0000\u017f\u017d\u0001\u0000\u0000\u0000\u0180\u0183\u0003"+ - " \u0010\u0000\u0181\u0182\u0005\u0010\u0000\u0000\u0182\u0184\u0003\n"+ - "\u0005\u0000\u0183\u0181\u0001\u0000\u0000\u0000\u0183\u0184\u0001\u0000"+ - "\u0000\u0000\u01849\u0001\u0000\u0000\u0000\u0185\u018a\u0003H$\u0000"+ - "\u0186\u0187\u0005$\u0000\u0000\u0187\u0189\u0003H$\u0000\u0188\u0186"+ - "\u0001\u0000\u0000\u0000\u0189\u018c\u0001\u0000\u0000\u0000\u018a\u0188"+ - "\u0001\u0000\u0000\u0000\u018a\u018b\u0001\u0000\u0000\u0000\u018b;\u0001"+ - "\u0000\u0000\u0000\u018c\u018a\u0001\u0000\u0000\u0000\u018d\u0192\u0003"+ - "B!\u0000\u018e\u018f\u0005$\u0000\u0000\u018f\u0191\u0003B!\u0000\u0190"+ - "\u018e\u0001\u0000\u0000\u0000\u0191\u0194\u0001\u0000\u0000\u0000\u0192"+ - "\u0190\u0001\u0000\u0000\u0000\u0192\u0193\u0001\u0000\u0000\u0000\u0193"+ - "=\u0001\u0000\u0000\u0000\u0194\u0192\u0001\u0000\u0000\u0000\u0195\u019a"+ - "\u0003<\u001e\u0000\u0196\u0197\u0005\"\u0000\u0000\u0197\u0199\u0003"+ - "<\u001e\u0000\u0198\u0196\u0001\u0000\u0000\u0000\u0199\u019c\u0001\u0000"+ - "\u0000\u0000\u019a\u0198\u0001\u0000\u0000\u0000\u019a\u019b\u0001\u0000"+ - "\u0000\u0000\u019b?\u0001\u0000\u0000\u0000\u019c\u019a\u0001\u0000\u0000"+ - "\u0000\u019d\u019e\u0007\u0003\u0000\u0000\u019eA\u0001\u0000\u0000\u0000"+ - "\u019f\u01a3\u0005P\u0000\u0000\u01a0\u01a1\u0004!\n\u0000\u01a1\u01a3"+ - "\u0003F#\u0000\u01a2\u019f\u0001\u0000\u0000\u0000\u01a2\u01a0\u0001\u0000"+ - "\u0000\u0000\u01a3C\u0001\u0000\u0000\u0000\u01a4\u01cf\u0005-\u0000\u0000"+ - "\u01a5\u01a6\u0003h4\u0000\u01a6\u01a7\u0005C\u0000\u0000\u01a7\u01cf"+ - "\u0001\u0000\u0000\u0000\u01a8\u01cf\u0003f3\u0000\u01a9\u01cf\u0003h"+ - "4\u0000\u01aa\u01cf\u0003b1\u0000\u01ab\u01cf\u0003F#\u0000\u01ac\u01cf"+ - "\u0003j5\u0000\u01ad\u01ae\u0005A\u0000\u0000\u01ae\u01b3\u0003d2\u0000"+ - "\u01af\u01b0\u0005\"\u0000\u0000\u01b0\u01b2\u0003d2\u0000\u01b1\u01af"+ - "\u0001\u0000\u0000\u0000\u01b2\u01b5\u0001\u0000\u0000\u0000\u01b3\u01b1"+ - "\u0001\u0000\u0000\u0000\u01b3\u01b4\u0001\u0000\u0000\u0000\u01b4\u01b6"+ - "\u0001\u0000\u0000\u0000\u01b5\u01b3\u0001\u0000\u0000\u0000\u01b6\u01b7"+ - "\u0005B\u0000\u0000\u01b7\u01cf\u0001\u0000\u0000\u0000\u01b8\u01b9\u0005"+ - "A\u0000\u0000\u01b9\u01be\u0003b1\u0000\u01ba\u01bb\u0005\"\u0000\u0000"+ - "\u01bb\u01bd\u0003b1\u0000\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bd\u01c0"+ - "\u0001\u0000\u0000\u0000\u01be\u01bc\u0001\u0000\u0000\u0000\u01be\u01bf"+ - "\u0001\u0000\u0000\u0000\u01bf\u01c1\u0001\u0000\u0000\u0000\u01c0\u01be"+ - "\u0001\u0000\u0000\u0000\u01c1\u01c2\u0005B\u0000\u0000\u01c2\u01cf\u0001"+ - "\u0000\u0000\u0000\u01c3\u01c4\u0005A\u0000\u0000\u01c4\u01c9\u0003j5"+ - "\u0000\u01c5\u01c6\u0005\"\u0000\u0000\u01c6\u01c8\u0003j5\u0000\u01c7"+ - "\u01c5\u0001\u0000\u0000\u0000\u01c8\u01cb\u0001\u0000\u0000\u0000\u01c9"+ - "\u01c7\u0001\u0000\u0000\u0000\u01c9\u01ca\u0001\u0000\u0000\u0000\u01ca"+ - "\u01cc\u0001\u0000\u0000\u0000\u01cb\u01c9\u0001\u0000\u0000\u0000\u01cc"+ - "\u01cd\u0005B\u0000\u0000\u01cd\u01cf\u0001\u0000\u0000\u0000\u01ce\u01a4"+ - "\u0001\u0000\u0000\u0000\u01ce\u01a5\u0001\u0000\u0000\u0000\u01ce\u01a8"+ - "\u0001\u0000\u0000\u0000\u01ce\u01a9\u0001\u0000\u0000\u0000\u01ce\u01aa"+ - "\u0001\u0000\u0000\u0000\u01ce\u01ab\u0001\u0000\u0000\u0000\u01ce\u01ac"+ - "\u0001\u0000\u0000\u0000\u01ce\u01ad\u0001\u0000\u0000\u0000\u01ce\u01b8"+ - "\u0001\u0000\u0000\u0000\u01ce\u01c3\u0001\u0000\u0000\u0000\u01cfE\u0001"+ - "\u0000\u0000\u0000\u01d0\u01d3\u00050\u0000\u0000\u01d1\u01d3\u0005@\u0000"+ - "\u0000\u01d2\u01d0\u0001\u0000\u0000\u0000\u01d2\u01d1\u0001\u0000\u0000"+ - "\u0000\u01d3G\u0001\u0000\u0000\u0000\u01d4\u01d8\u0003@ \u0000\u01d5"+ - "\u01d6\u0004$\u000b\u0000\u01d6\u01d8\u0003F#\u0000\u01d7\u01d4\u0001"+ - "\u0000\u0000\u0000\u01d7\u01d5\u0001\u0000\u0000\u0000\u01d8I\u0001\u0000"+ - "\u0000\u0000\u01d9\u01da\u0005\t\u0000\u0000\u01da\u01db\u0005\u001b\u0000"+ - "\u0000\u01dbK\u0001\u0000\u0000\u0000\u01dc\u01dd\u0005\u000e\u0000\u0000"+ - "\u01dd\u01e2\u0003N\'\u0000\u01de\u01df\u0005\"\u0000\u0000\u01df\u01e1"+ - "\u0003N\'\u0000\u01e0\u01de\u0001\u0000\u0000\u0000\u01e1\u01e4\u0001"+ - "\u0000\u0000\u0000\u01e2\u01e0\u0001\u0000\u0000\u0000\u01e2\u01e3\u0001"+ - "\u0000\u0000\u0000\u01e3M\u0001\u0000\u0000\u0000\u01e4\u01e2\u0001\u0000"+ - "\u0000\u0000\u01e5\u01e7\u0003\n\u0005\u0000\u01e6\u01e8\u0007\u0004\u0000"+ - "\u0000\u01e7\u01e6\u0001\u0000\u0000\u0000\u01e7\u01e8\u0001\u0000\u0000"+ - "\u0000\u01e8\u01eb\u0001\u0000\u0000\u0000\u01e9\u01ea\u0005.\u0000\u0000"+ - "\u01ea\u01ec\u0007\u0005\u0000\u0000\u01eb\u01e9\u0001\u0000\u0000\u0000"+ - "\u01eb\u01ec\u0001\u0000\u0000\u0000\u01ecO\u0001\u0000\u0000\u0000\u01ed"+ - "\u01ee\u0005\b\u0000\u0000\u01ee\u01ef\u0003>\u001f\u0000\u01efQ\u0001"+ - "\u0000\u0000\u0000\u01f0\u01f1\u0005\u0002\u0000\u0000\u01f1\u01f2\u0003"+ - ">\u001f\u0000\u01f2S\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005\u000b\u0000"+ - "\u0000\u01f4\u01f9\u0003V+\u0000\u01f5\u01f6\u0005\"\u0000\u0000\u01f6"+ - "\u01f8\u0003V+\u0000\u01f7\u01f5\u0001\u0000\u0000\u0000\u01f8\u01fb\u0001"+ - "\u0000\u0000\u0000\u01f9\u01f7\u0001\u0000\u0000\u0000\u01f9\u01fa\u0001"+ - "\u0000\u0000\u0000\u01faU\u0001\u0000\u0000\u0000\u01fb\u01f9\u0001\u0000"+ - "\u0000\u0000\u01fc\u01fd\u0003<\u001e\u0000\u01fd\u01fe\u0005T\u0000\u0000"+ - "\u01fe\u01ff\u0003<\u001e\u0000\u01ffW\u0001\u0000\u0000\u0000\u0200\u0201"+ - "\u0005\u0001\u0000\u0000\u0201\u0202\u0003\u0014\n\u0000\u0202\u0204\u0003"+ - "j5\u0000\u0203\u0205\u0003^/\u0000\u0204\u0203\u0001\u0000\u0000\u0000"+ - "\u0204\u0205\u0001\u0000\u0000\u0000\u0205Y\u0001\u0000\u0000\u0000\u0206"+ - "\u0207\u0005\u0007\u0000\u0000\u0207\u0208\u0003\u0014\n\u0000\u0208\u0209"+ - "\u0003j5\u0000\u0209[\u0001\u0000\u0000\u0000\u020a\u020b\u0005\n\u0000"+ - "\u0000\u020b\u020c\u0003:\u001d\u0000\u020c]\u0001\u0000\u0000\u0000\u020d"+ - "\u0212\u0003`0\u0000\u020e\u020f\u0005\"\u0000\u0000\u020f\u0211\u0003"+ - "`0\u0000\u0210\u020e\u0001\u0000\u0000\u0000\u0211\u0214\u0001\u0000\u0000"+ - "\u0000\u0212\u0210\u0001\u0000\u0000\u0000\u0212\u0213\u0001\u0000\u0000"+ - "\u0000\u0213_\u0001\u0000\u0000\u0000\u0214\u0212\u0001\u0000\u0000\u0000"+ - "\u0215\u0216\u0003@ \u0000\u0216\u0217\u0005 \u0000\u0000\u0217\u0218"+ - "\u0003D\"\u0000\u0218a\u0001\u0000\u0000\u0000\u0219\u021a\u0007\u0006"+ - "\u0000\u0000\u021ac\u0001\u0000\u0000\u0000\u021b\u021e\u0003f3\u0000"+ - "\u021c\u021e\u0003h4\u0000\u021d\u021b\u0001\u0000\u0000\u0000\u021d\u021c"+ - "\u0001\u0000\u0000\u0000\u021ee\u0001\u0000\u0000\u0000\u021f\u0221\u0007"+ - "\u0000\u0000\u0000\u0220\u021f\u0001\u0000\u0000\u0000\u0220\u0221\u0001"+ - "\u0000\u0000\u0000\u0221\u0222\u0001\u0000\u0000\u0000\u0222\u0223\u0005"+ - "\u001c\u0000\u0000\u0223g\u0001\u0000\u0000\u0000\u0224\u0226\u0007\u0000"+ - "\u0000\u0000\u0225\u0224\u0001\u0000\u0000\u0000\u0225\u0226\u0001\u0000"+ - "\u0000\u0000\u0226\u0227\u0001\u0000\u0000\u0000\u0227\u0228\u0005\u001b"+ - "\u0000\u0000\u0228i\u0001\u0000\u0000\u0000\u0229\u022a\u0005\u001a\u0000"+ - "\u0000\u022ak\u0001\u0000\u0000\u0000\u022b\u022c\u0007\u0007\u0000\u0000"+ - "\u022cm\u0001\u0000\u0000\u0000\u022d\u022e\u0005\u0005\u0000\u0000\u022e"+ - "\u022f\u0003p8\u0000\u022fo\u0001\u0000\u0000\u0000\u0230\u0231\u0005"+ - "A\u0000\u0000\u0231\u0232\u0003\u0002\u0001\u0000\u0232\u0233\u0005B\u0000"+ - "\u0000\u0233q\u0001\u0000\u0000\u0000\u0234\u0235\u0005\r\u0000\u0000"+ - "\u0235\u0236\u0005d\u0000\u0000\u0236s\u0001\u0000\u0000\u0000\u0237\u0238"+ - "\u0005\u0003\u0000\u0000\u0238\u023b\u0005Z\u0000\u0000\u0239\u023a\u0005"+ - "X\u0000\u0000\u023a\u023c\u0003<\u001e\u0000\u023b\u0239\u0001\u0000\u0000"+ - "\u0000\u023b\u023c\u0001\u0000\u0000\u0000\u023c\u0246\u0001\u0000\u0000"+ - "\u0000\u023d\u023e\u0005Y\u0000\u0000\u023e\u0243\u0003v;\u0000\u023f"+ - "\u0240\u0005\"\u0000\u0000\u0240\u0242\u0003v;\u0000\u0241\u023f\u0001"+ - "\u0000\u0000\u0000\u0242\u0245\u0001\u0000\u0000\u0000\u0243\u0241\u0001"+ - "\u0000\u0000\u0000\u0243\u0244\u0001\u0000\u0000\u0000\u0244\u0247\u0001"+ - "\u0000\u0000\u0000\u0245\u0243\u0001\u0000\u0000\u0000\u0246\u023d\u0001"+ - "\u0000\u0000\u0000\u0246\u0247\u0001\u0000\u0000\u0000\u0247u\u0001\u0000"+ - "\u0000\u0000\u0248\u0249\u0003<\u001e\u0000\u0249\u024a\u0005 \u0000\u0000"+ - "\u024a\u024c\u0001\u0000\u0000\u0000\u024b\u0248\u0001\u0000\u0000\u0000"+ - "\u024b\u024c\u0001\u0000\u0000\u0000\u024c\u024d\u0001\u0000\u0000\u0000"+ - "\u024d\u024e\u0003<\u001e\u0000\u024ew\u0001\u0000\u0000\u0000\u024f\u0250"+ - "\u0005\u0012\u0000\u0000\u0250\u0251\u0003$\u0012\u0000\u0251\u0252\u0005"+ - "X\u0000\u0000\u0252\u0253\u0003>\u001f\u0000\u0253y\u0001\u0000\u0000"+ - "\u0000\u0254\u0255\u0005\u0011\u0000\u0000\u0255\u0258\u00036\u001b\u0000"+ - "\u0256\u0257\u0005\u001d\u0000\u0000\u0257\u0259\u0003\u001e\u000f\u0000"+ - "\u0258\u0256\u0001\u0000\u0000\u0000\u0258\u0259\u0001\u0000\u0000\u0000"+ - "\u0259{\u0001\u0000\u0000\u0000:\u0087\u0090\u00a2\u00ae\u00b7\u00bf\u00c5"+ - "\u00cd\u00cf\u00d4\u00db\u00e0\u00eb\u00f1\u00f9\u00fb\u0106\u010d\u0118"+ - "\u011b\u012b\u0131\u013b\u013f\u0144\u014e\u0156\u0163\u0167\u016b\u0172"+ - "\u0176\u017d\u0183\u018a\u0192\u019a\u01a2\u01b3\u01be\u01c9\u01ce\u01d2"+ - "\u01d7\u01e2\u01e7\u01eb\u01f9\u0204\u0212\u021d\u0220\u0225\u023b\u0243"+ - "\u0246\u024b\u0258"; + "\u0004\u0001\u0080\u027b\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ + "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ + "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ + "\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007"+ + "\u000f\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007"+ + "\u0012\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007"+ + "\u0015\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007"+ + "\u0018\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007"+ + "\u001b\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007"+ + "\u001e\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007"+ + "\"\u0002#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007"+ + "\'\u0002(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007"+ + ",\u0002-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u0007"+ + "1\u00022\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u0007"+ + "6\u00027\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007"+ + ";\u0002<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007"+ + "@\u0002A\u0007A\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001\u008e"+ + "\b\u0001\n\u0001\f\u0001\u0091\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0099\b\u0002\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003"+ + "\u00ad\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005"+ + "\u00b9\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0005\u0005\u00c0\b\u0005\n\u0005\f\u0005\u00c3\t\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00ca\b\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00cf\b\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00d7"+ + "\b\u0005\n\u0005\f\u0005\u00da\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006"+ + "\u00de\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ + "\u0003\u0006\u00e5\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006"+ + "\u00ea\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b"+ + "\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00f5\b\b\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0003\t\u00fb\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0005\t\u0103\b\t\n\t\f\t\u0106\t\t\u0001\n\u0001\n\u0001\n\u0001\n"+ + "\u0001\n\u0001\n\u0001\n\u0001\n\u0003\n\u0110\b\n\u0001\n\u0001\n\u0001"+ + "\n\u0005\n\u0115\b\n\n\n\f\n\u0118\t\n\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\u000b\u0005\u000b\u0120\b\u000b\n\u000b"+ + "\f\u000b\u0123\t\u000b\u0003\u000b\u0125\b\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\f\u0001\f\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000f\u0001\u000f\u0001\u000f\u0005\u000f\u0133\b\u000f\n\u000f\f\u000f"+ + "\u0136\t\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0003\u0010\u013b\b"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0005\u0011\u0143\b\u0011\n\u0011\f\u0011\u0146\t\u0011\u0001\u0011"+ + "\u0003\u0011\u0149\b\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012"+ + "\u014e\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0014"+ + "\u0001\u0014\u0001\u0015\u0001\u0015\u0003\u0015\u0158\b\u0015\u0001\u0016"+ + "\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u015e\b\u0016\n\u0016"+ + "\f\u0016\u0161\t\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017"+ + "\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u016b\b\u0018"+ + "\n\u0018\f\u0018\u016e\t\u0018\u0001\u0018\u0003\u0018\u0171\b\u0018\u0001"+ + "\u0018\u0001\u0018\u0003\u0018\u0175\b\u0018\u0001\u0019\u0001\u0019\u0001"+ + "\u0019\u0001\u001a\u0001\u001a\u0003\u001a\u017c\b\u001a\u0001\u001a\u0001"+ + "\u001a\u0003\u001a\u0180\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005"+ + "\u001b\u0185\b\u001b\n\u001b\f\u001b\u0188\t\u001b\u0001\u001c\u0001\u001c"+ + "\u0001\u001c\u0003\u001c\u018d\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d"+ + "\u0005\u001d\u0192\b\u001d\n\u001d\f\u001d\u0195\t\u001d\u0001\u001e\u0001"+ + "\u001e\u0001\u001e\u0005\u001e\u019a\b\u001e\n\u001e\f\u001e\u019d\t\u001e"+ + "\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01a2\b\u001f\n\u001f"+ + "\f\u001f\u01a5\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001!\u0003!\u01ac"+ + "\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ + "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01bb\b\"\n\"\f\"\u01be\t\""+ + "\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01c6\b\"\n\""+ + "\f\"\u01c9\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\""+ + "\u01d1\b\"\n\"\f\"\u01d4\t\"\u0001\"\u0001\"\u0003\"\u01d8\b\"\u0001#"+ + "\u0001#\u0003#\u01dc\b#\u0001$\u0001$\u0001$\u0003$\u01e1\b$\u0001%\u0001"+ + "%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01ea\b&\n&\f&\u01ed\t&\u0001"+ + "\'\u0001\'\u0003\'\u01f1\b\'\u0001\'\u0001\'\u0003\'\u01f5\b\'\u0001("+ + "\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0005"+ + "*\u0201\b*\n*\f*\u0204\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ + ",\u0001,\u0003,\u020e\b,\u0001-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001"+ + ".\u0001/\u0001/\u0001/\u0005/\u021a\b/\n/\f/\u021d\t/\u00010\u00010\u0001"+ + "0\u00010\u00011\u00011\u00012\u00012\u00032\u0227\b2\u00013\u00033\u022a"+ + "\b3\u00013\u00013\u00014\u00034\u022f\b4\u00014\u00014\u00015\u00015\u0001"+ + "6\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u00018\u00019\u0001"+ + "9\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u0245\b:\u0001:\u0001:\u0001"+ + ":\u0001:\u0005:\u024b\b:\n:\f:\u024e\t:\u0003:\u0250\b:\u0001;\u0001;"+ + "\u0001;\u0003;\u0255\b;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001"+ + "<\u0001=\u0001=\u0001=\u0001=\u0003=\u0262\b=\u0001>\u0003>\u0265\b>\u0001"+ + ">\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0003?\u026e\b?\u0001@\u0001"+ + "@\u0001@\u0001@\u0005@\u0274\b@\n@\f@\u0277\t@\u0001A\u0001A\u0001A\u0000"+ + "\u0004\u0002\n\u0012\u0014B\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ + "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ + "TVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0000\t\u0001\u0000@A\u0001\u0000"+ + "BD\u0002\u0000\u001e\u001eQQ\u0001\u0000HI\u0002\u0000##((\u0002\u0000"+ + "++..\u0002\u0000**88\u0002\u000099;?\u0001\u0000\u0016\u0018\u0294\u0000"+ + "\u0084\u0001\u0000\u0000\u0000\u0002\u0087\u0001\u0000\u0000\u0000\u0004"+ + "\u0098\u0001\u0000\u0000\u0000\u0006\u00ac\u0001\u0000\u0000\u0000\b\u00ae"+ + "\u0001\u0000\u0000\u0000\n\u00ce\u0001\u0000\u0000\u0000\f\u00e9\u0001"+ + "\u0000\u0000\u0000\u000e\u00eb\u0001\u0000\u0000\u0000\u0010\u00f4\u0001"+ + "\u0000\u0000\u0000\u0012\u00fa\u0001\u0000\u0000\u0000\u0014\u010f\u0001"+ + "\u0000\u0000\u0000\u0016\u0119\u0001\u0000\u0000\u0000\u0018\u0128\u0001"+ + "\u0000\u0000\u0000\u001a\u012a\u0001\u0000\u0000\u0000\u001c\u012c\u0001"+ + "\u0000\u0000\u0000\u001e\u012f\u0001\u0000\u0000\u0000 \u013a\u0001\u0000"+ + "\u0000\u0000\"\u013e\u0001\u0000\u0000\u0000$\u014d\u0001\u0000\u0000"+ + "\u0000&\u0151\u0001\u0000\u0000\u0000(\u0153\u0001\u0000\u0000\u0000*"+ + "\u0157\u0001\u0000\u0000\u0000,\u0159\u0001\u0000\u0000\u0000.\u0162\u0001"+ + "\u0000\u0000\u00000\u0166\u0001\u0000\u0000\u00002\u0176\u0001\u0000\u0000"+ + "\u00004\u0179\u0001\u0000\u0000\u00006\u0181\u0001\u0000\u0000\u00008"+ + "\u0189\u0001\u0000\u0000\u0000:\u018e\u0001\u0000\u0000\u0000<\u0196\u0001"+ + "\u0000\u0000\u0000>\u019e\u0001\u0000\u0000\u0000@\u01a6\u0001\u0000\u0000"+ + "\u0000B\u01ab\u0001\u0000\u0000\u0000D\u01d7\u0001\u0000\u0000\u0000F"+ + "\u01db\u0001\u0000\u0000\u0000H\u01e0\u0001\u0000\u0000\u0000J\u01e2\u0001"+ + "\u0000\u0000\u0000L\u01e5\u0001\u0000\u0000\u0000N\u01ee\u0001\u0000\u0000"+ + "\u0000P\u01f6\u0001\u0000\u0000\u0000R\u01f9\u0001\u0000\u0000\u0000T"+ + "\u01fc\u0001\u0000\u0000\u0000V\u0205\u0001\u0000\u0000\u0000X\u0209\u0001"+ + "\u0000\u0000\u0000Z\u020f\u0001\u0000\u0000\u0000\\\u0213\u0001\u0000"+ + "\u0000\u0000^\u0216\u0001\u0000\u0000\u0000`\u021e\u0001\u0000\u0000\u0000"+ + "b\u0222\u0001\u0000\u0000\u0000d\u0226\u0001\u0000\u0000\u0000f\u0229"+ + "\u0001\u0000\u0000\u0000h\u022e\u0001\u0000\u0000\u0000j\u0232\u0001\u0000"+ + "\u0000\u0000l\u0234\u0001\u0000\u0000\u0000n\u0236\u0001\u0000\u0000\u0000"+ + "p\u0239\u0001\u0000\u0000\u0000r\u023d\u0001\u0000\u0000\u0000t\u0240"+ + "\u0001\u0000\u0000\u0000v\u0254\u0001\u0000\u0000\u0000x\u0258\u0001\u0000"+ + "\u0000\u0000z\u025d\u0001\u0000\u0000\u0000|\u0264\u0001\u0000\u0000\u0000"+ + "~\u026a\u0001\u0000\u0000\u0000\u0080\u026f\u0001\u0000\u0000\u0000\u0082"+ + "\u0278\u0001\u0000\u0000\u0000\u0084\u0085\u0003\u0002\u0001\u0000\u0085"+ + "\u0086\u0005\u0000\u0000\u0001\u0086\u0001\u0001\u0000\u0000\u0000\u0087"+ + "\u0088\u0006\u0001\uffff\uffff\u0000\u0088\u0089\u0003\u0004\u0002\u0000"+ + "\u0089\u008f\u0001\u0000\u0000\u0000\u008a\u008b\n\u0001\u0000\u0000\u008b"+ + "\u008c\u0005\u001d\u0000\u0000\u008c\u008e\u0003\u0006\u0003\u0000\u008d"+ + "\u008a\u0001\u0000\u0000\u0000\u008e\u0091\u0001\u0000\u0000\u0000\u008f"+ + "\u008d\u0001\u0000\u0000\u0000\u008f\u0090\u0001\u0000\u0000\u0000\u0090"+ + "\u0003\u0001\u0000\u0000\u0000\u0091\u008f\u0001\u0000\u0000\u0000\u0092"+ + "\u0099\u0003n7\u0000\u0093\u0099\u0003\"\u0011\u0000\u0094\u0099\u0003"+ + "\u001c\u000e\u0000\u0095\u0099\u0003r9\u0000\u0096\u0097\u0004\u0002\u0001"+ + "\u0000\u0097\u0099\u00030\u0018\u0000\u0098\u0092\u0001\u0000\u0000\u0000"+ + "\u0098\u0093\u0001\u0000\u0000\u0000\u0098\u0094\u0001\u0000\u0000\u0000"+ + "\u0098\u0095\u0001\u0000\u0000\u0000\u0098\u0096\u0001\u0000\u0000\u0000"+ + "\u0099\u0005\u0001\u0000\u0000\u0000\u009a\u00ad\u00032\u0019\u0000\u009b"+ + "\u00ad\u0003\b\u0004\u0000\u009c\u00ad\u0003P(\u0000\u009d\u00ad\u0003"+ + "J%\u0000\u009e\u00ad\u00034\u001a\u0000\u009f\u00ad\u0003L&\u0000\u00a0"+ + "\u00ad\u0003R)\u0000\u00a1\u00ad\u0003T*\u0000\u00a2\u00ad\u0003X,\u0000"+ + "\u00a3\u00ad\u0003Z-\u0000\u00a4\u00ad\u0003t:\u0000\u00a5\u00ad\u0003"+ + "\\.\u0000\u00a6\u00a7\u0004\u0003\u0002\u0000\u00a7\u00ad\u0003z=\u0000"+ + "\u00a8\u00a9\u0004\u0003\u0003\u0000\u00a9\u00ad\u0003x<\u0000\u00aa\u00ab"+ + "\u0004\u0003\u0004\u0000\u00ab\u00ad\u0003|>\u0000\u00ac\u009a\u0001\u0000"+ + "\u0000\u0000\u00ac\u009b\u0001\u0000\u0000\u0000\u00ac\u009c\u0001\u0000"+ + "\u0000\u0000\u00ac\u009d\u0001\u0000\u0000\u0000\u00ac\u009e\u0001\u0000"+ + "\u0000\u0000\u00ac\u009f\u0001\u0000\u0000\u0000\u00ac\u00a0\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a1\u0001\u0000\u0000\u0000\u00ac\u00a2\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a3\u0001\u0000\u0000\u0000\u00ac\u00a4\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a5\u0001\u0000\u0000\u0000\u00ac\u00a6\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a8\u0001\u0000\u0000\u0000\u00ac\u00aa\u0001\u0000"+ + "\u0000\u0000\u00ad\u0007\u0001\u0000\u0000\u0000\u00ae\u00af\u0005\u0010"+ + "\u0000\u0000\u00af\u00b0\u0003\n\u0005\u0000\u00b0\t\u0001\u0000\u0000"+ + "\u0000\u00b1\u00b2\u0006\u0005\uffff\uffff\u0000\u00b2\u00b3\u00051\u0000"+ + "\u0000\u00b3\u00cf\u0003\n\u0005\b\u00b4\u00cf\u0003\u0010\b\u0000\u00b5"+ + "\u00cf\u0003\f\u0006\u0000\u00b6\u00b8\u0003\u0010\b\u0000\u00b7\u00b9"+ + "\u00051\u0000\u0000\u00b8\u00b7\u0001\u0000\u0000\u0000\u00b8\u00b9\u0001"+ + "\u0000\u0000\u0000\u00b9\u00ba\u0001\u0000\u0000\u0000\u00ba\u00bb\u0005"+ + ",\u0000\u0000\u00bb\u00bc\u00050\u0000\u0000\u00bc\u00c1\u0003\u0010\b"+ + "\u0000\u00bd\u00be\u0005\'\u0000\u0000\u00be\u00c0\u0003\u0010\b\u0000"+ + "\u00bf\u00bd\u0001\u0000\u0000\u0000\u00c0\u00c3\u0001\u0000\u0000\u0000"+ + "\u00c1\u00bf\u0001\u0000\u0000\u0000\u00c1\u00c2\u0001\u0000\u0000\u0000"+ + "\u00c2\u00c4\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001\u0000\u0000\u0000"+ + "\u00c4\u00c5\u00057\u0000\u0000\u00c5\u00cf\u0001\u0000\u0000\u0000\u00c6"+ + "\u00c7\u0003\u0010\b\u0000\u00c7\u00c9\u0005-\u0000\u0000\u00c8\u00ca"+ + "\u00051\u0000\u0000\u00c9\u00c8\u0001\u0000\u0000\u0000\u00c9\u00ca\u0001"+ + "\u0000\u0000\u0000\u00ca\u00cb\u0001\u0000\u0000\u0000\u00cb\u00cc\u0005"+ + "2\u0000\u0000\u00cc\u00cf\u0001\u0000\u0000\u0000\u00cd\u00cf\u0003\u000e"+ + "\u0007\u0000\u00ce\u00b1\u0001\u0000\u0000\u0000\u00ce\u00b4\u0001\u0000"+ + "\u0000\u0000\u00ce\u00b5\u0001\u0000\u0000\u0000\u00ce\u00b6\u0001\u0000"+ + "\u0000\u0000\u00ce\u00c6\u0001\u0000\u0000\u0000\u00ce\u00cd\u0001\u0000"+ + "\u0000\u0000\u00cf\u00d8\u0001\u0000\u0000\u0000\u00d0\u00d1\n\u0005\u0000"+ + "\u0000\u00d1\u00d2\u0005\"\u0000\u0000\u00d2\u00d7\u0003\n\u0005\u0006"+ + "\u00d3\u00d4\n\u0004\u0000\u0000\u00d4\u00d5\u00054\u0000\u0000\u00d5"+ + "\u00d7\u0003\n\u0005\u0005\u00d6\u00d0\u0001\u0000\u0000\u0000\u00d6\u00d3"+ + "\u0001\u0000\u0000\u0000\u00d7\u00da\u0001\u0000\u0000\u0000\u00d8\u00d6"+ + "\u0001\u0000\u0000\u0000\u00d8\u00d9\u0001\u0000\u0000\u0000\u00d9\u000b"+ + "\u0001\u0000\u0000\u0000\u00da\u00d8\u0001\u0000\u0000\u0000\u00db\u00dd"+ + "\u0003\u0010\b\u0000\u00dc\u00de\u00051\u0000\u0000\u00dd\u00dc\u0001"+ + "\u0000\u0000\u0000\u00dd\u00de\u0001\u0000\u0000\u0000\u00de\u00df\u0001"+ + "\u0000\u0000\u0000\u00df\u00e0\u0005/\u0000\u0000\u00e0\u00e1\u0003j5"+ + "\u0000\u00e1\u00ea\u0001\u0000\u0000\u0000\u00e2\u00e4\u0003\u0010\b\u0000"+ + "\u00e3\u00e5\u00051\u0000\u0000\u00e4\u00e3\u0001\u0000\u0000\u0000\u00e4"+ + "\u00e5\u0001\u0000\u0000\u0000\u00e5\u00e6\u0001\u0000\u0000\u0000\u00e6"+ + "\u00e7\u00056\u0000\u0000\u00e7\u00e8\u0003j5\u0000\u00e8\u00ea\u0001"+ + "\u0000\u0000\u0000\u00e9\u00db\u0001\u0000\u0000\u0000\u00e9\u00e2\u0001"+ + "\u0000\u0000\u0000\u00ea\r\u0001\u0000\u0000\u0000\u00eb\u00ec\u0003:"+ + "\u001d\u0000\u00ec\u00ed\u0005&\u0000\u0000\u00ed\u00ee\u0003D\"\u0000"+ + "\u00ee\u000f\u0001\u0000\u0000\u0000\u00ef\u00f5\u0003\u0012\t\u0000\u00f0"+ + "\u00f1\u0003\u0012\t\u0000\u00f1\u00f2\u0003l6\u0000\u00f2\u00f3\u0003"+ + "\u0012\t\u0000\u00f3\u00f5\u0001\u0000\u0000\u0000\u00f4\u00ef\u0001\u0000"+ + "\u0000\u0000\u00f4\u00f0\u0001\u0000\u0000\u0000\u00f5\u0011\u0001\u0000"+ + "\u0000\u0000\u00f6\u00f7\u0006\t\uffff\uffff\u0000\u00f7\u00fb\u0003\u0014"+ + "\n\u0000\u00f8\u00f9\u0007\u0000\u0000\u0000\u00f9\u00fb\u0003\u0012\t"+ + "\u0003\u00fa\u00f6\u0001\u0000\u0000\u0000\u00fa\u00f8\u0001\u0000\u0000"+ + "\u0000\u00fb\u0104\u0001\u0000\u0000\u0000\u00fc\u00fd\n\u0002\u0000\u0000"+ + "\u00fd\u00fe\u0007\u0001\u0000\u0000\u00fe\u0103\u0003\u0012\t\u0003\u00ff"+ + "\u0100\n\u0001\u0000\u0000\u0100\u0101\u0007\u0000\u0000\u0000\u0101\u0103"+ + "\u0003\u0012\t\u0002\u0102\u00fc\u0001\u0000\u0000\u0000\u0102\u00ff\u0001"+ + "\u0000\u0000\u0000\u0103\u0106\u0001\u0000\u0000\u0000\u0104\u0102\u0001"+ + "\u0000\u0000\u0000\u0104\u0105\u0001\u0000\u0000\u0000\u0105\u0013\u0001"+ + "\u0000\u0000\u0000\u0106\u0104\u0001\u0000\u0000\u0000\u0107\u0108\u0006"+ + "\n\uffff\uffff\u0000\u0108\u0110\u0003D\"\u0000\u0109\u0110\u0003:\u001d"+ + "\u0000\u010a\u0110\u0003\u0016\u000b\u0000\u010b\u010c\u00050\u0000\u0000"+ + "\u010c\u010d\u0003\n\u0005\u0000\u010d\u010e\u00057\u0000\u0000\u010e"+ + "\u0110\u0001\u0000\u0000\u0000\u010f\u0107\u0001\u0000\u0000\u0000\u010f"+ + "\u0109\u0001\u0000\u0000\u0000\u010f\u010a\u0001\u0000\u0000\u0000\u010f"+ + "\u010b\u0001\u0000\u0000\u0000\u0110\u0116\u0001\u0000\u0000\u0000\u0111"+ + "\u0112\n\u0001\u0000\u0000\u0112\u0113\u0005%\u0000\u0000\u0113\u0115"+ + "\u0003\u001a\r\u0000\u0114\u0111\u0001\u0000\u0000\u0000\u0115\u0118\u0001"+ + "\u0000\u0000\u0000\u0116\u0114\u0001\u0000\u0000\u0000\u0116\u0117\u0001"+ + "\u0000\u0000\u0000\u0117\u0015\u0001\u0000\u0000\u0000\u0118\u0116\u0001"+ + "\u0000\u0000\u0000\u0119\u011a\u0003\u0018\f\u0000\u011a\u0124\u00050"+ + "\u0000\u0000\u011b\u0125\u0005B\u0000\u0000\u011c\u0121\u0003\n\u0005"+ + "\u0000\u011d\u011e\u0005\'\u0000\u0000\u011e\u0120\u0003\n\u0005\u0000"+ + "\u011f\u011d\u0001\u0000\u0000\u0000\u0120\u0123\u0001\u0000\u0000\u0000"+ + "\u0121\u011f\u0001\u0000\u0000\u0000\u0121\u0122\u0001\u0000\u0000\u0000"+ + "\u0122\u0125\u0001\u0000\u0000\u0000\u0123\u0121\u0001\u0000\u0000\u0000"+ + "\u0124\u011b\u0001\u0000\u0000\u0000\u0124\u011c\u0001\u0000\u0000\u0000"+ + "\u0124\u0125\u0001\u0000\u0000\u0000\u0125\u0126\u0001\u0000\u0000\u0000"+ + "\u0126\u0127\u00057\u0000\u0000\u0127\u0017\u0001\u0000\u0000\u0000\u0128"+ + "\u0129\u0003H$\u0000\u0129\u0019\u0001\u0000\u0000\u0000\u012a\u012b\u0003"+ + "@ \u0000\u012b\u001b\u0001\u0000\u0000\u0000\u012c\u012d\u0005\f\u0000"+ + "\u0000\u012d\u012e\u0003\u001e\u000f\u0000\u012e\u001d\u0001\u0000\u0000"+ + "\u0000\u012f\u0134\u0003 \u0010\u0000\u0130\u0131\u0005\'\u0000\u0000"+ + "\u0131\u0133\u0003 \u0010\u0000\u0132\u0130\u0001\u0000\u0000\u0000\u0133"+ + "\u0136\u0001\u0000\u0000\u0000\u0134\u0132\u0001\u0000\u0000\u0000\u0134"+ + "\u0135\u0001\u0000\u0000\u0000\u0135\u001f\u0001\u0000\u0000\u0000\u0136"+ + "\u0134\u0001\u0000\u0000\u0000\u0137\u0138\u0003:\u001d\u0000\u0138\u0139"+ + "\u0005$\u0000\u0000\u0139\u013b\u0001\u0000\u0000\u0000\u013a\u0137\u0001"+ + "\u0000\u0000\u0000\u013a\u013b\u0001\u0000\u0000\u0000\u013b\u013c\u0001"+ + "\u0000\u0000\u0000\u013c\u013d\u0003\n\u0005\u0000\u013d!\u0001\u0000"+ + "\u0000\u0000\u013e\u013f\u0005\u0006\u0000\u0000\u013f\u0144\u0003$\u0012"+ + "\u0000\u0140\u0141\u0005\'\u0000\u0000\u0141\u0143\u0003$\u0012\u0000"+ + "\u0142\u0140\u0001\u0000\u0000\u0000\u0143\u0146\u0001\u0000\u0000\u0000"+ + "\u0144\u0142\u0001\u0000\u0000\u0000\u0144\u0145\u0001\u0000\u0000\u0000"+ + "\u0145\u0148\u0001\u0000\u0000\u0000\u0146\u0144\u0001\u0000\u0000\u0000"+ + "\u0147\u0149\u0003*\u0015\u0000\u0148\u0147\u0001\u0000\u0000\u0000\u0148"+ + "\u0149\u0001\u0000\u0000\u0000\u0149#\u0001\u0000\u0000\u0000\u014a\u014b"+ + "\u0003&\u0013\u0000\u014b\u014c\u0005&\u0000\u0000\u014c\u014e\u0001\u0000"+ + "\u0000\u0000\u014d\u014a\u0001\u0000\u0000\u0000\u014d\u014e\u0001\u0000"+ + "\u0000\u0000\u014e\u014f\u0001\u0000\u0000\u0000\u014f\u0150\u0003(\u0014"+ + "\u0000\u0150%\u0001\u0000\u0000\u0000\u0151\u0152\u0005Q\u0000\u0000\u0152"+ + "\'\u0001\u0000\u0000\u0000\u0153\u0154\u0007\u0002\u0000\u0000\u0154)"+ + "\u0001\u0000\u0000\u0000\u0155\u0158\u0003,\u0016\u0000\u0156\u0158\u0003"+ + ".\u0017\u0000\u0157\u0155\u0001\u0000\u0000\u0000\u0157\u0156\u0001\u0000"+ + "\u0000\u0000\u0158+\u0001\u0000\u0000\u0000\u0159\u015a\u0005P\u0000\u0000"+ + "\u015a\u015f\u0005Q\u0000\u0000\u015b\u015c\u0005\'\u0000\u0000\u015c"+ + "\u015e\u0005Q\u0000\u0000\u015d\u015b\u0001\u0000\u0000\u0000\u015e\u0161"+ + "\u0001\u0000\u0000\u0000\u015f\u015d\u0001\u0000\u0000\u0000\u015f\u0160"+ + "\u0001\u0000\u0000\u0000\u0160-\u0001\u0000\u0000\u0000\u0161\u015f\u0001"+ + "\u0000\u0000\u0000\u0162\u0163\u0005F\u0000\u0000\u0163\u0164\u0003,\u0016"+ + "\u0000\u0164\u0165\u0005G\u0000\u0000\u0165/\u0001\u0000\u0000\u0000\u0166"+ + "\u0167\u0005\u0013\u0000\u0000\u0167\u016c\u0003$\u0012\u0000\u0168\u0169"+ + "\u0005\'\u0000\u0000\u0169\u016b\u0003$\u0012\u0000\u016a\u0168\u0001"+ + "\u0000\u0000\u0000\u016b\u016e\u0001\u0000\u0000\u0000\u016c\u016a\u0001"+ + "\u0000\u0000\u0000\u016c\u016d\u0001\u0000\u0000\u0000\u016d\u0170\u0001"+ + "\u0000\u0000\u0000\u016e\u016c\u0001\u0000\u0000\u0000\u016f\u0171\u0003"+ + "6\u001b\u0000\u0170\u016f\u0001\u0000\u0000\u0000\u0170\u0171\u0001\u0000"+ + "\u0000\u0000\u0171\u0174\u0001\u0000\u0000\u0000\u0172\u0173\u0005!\u0000"+ + "\u0000\u0173\u0175\u0003\u001e\u000f\u0000\u0174\u0172\u0001\u0000\u0000"+ + "\u0000\u0174\u0175\u0001\u0000\u0000\u0000\u01751\u0001\u0000\u0000\u0000"+ + "\u0176\u0177\u0005\u0004\u0000\u0000\u0177\u0178\u0003\u001e\u000f\u0000"+ + "\u01783\u0001\u0000\u0000\u0000\u0179\u017b\u0005\u000f\u0000\u0000\u017a"+ + "\u017c\u00036\u001b\u0000\u017b\u017a\u0001\u0000\u0000\u0000\u017b\u017c"+ + "\u0001\u0000\u0000\u0000\u017c\u017f\u0001\u0000\u0000\u0000\u017d\u017e"+ + "\u0005!\u0000\u0000\u017e\u0180\u0003\u001e\u000f\u0000\u017f\u017d\u0001"+ + "\u0000\u0000\u0000\u017f\u0180\u0001\u0000\u0000\u0000\u01805\u0001\u0000"+ + "\u0000\u0000\u0181\u0186\u00038\u001c\u0000\u0182\u0183\u0005\'\u0000"+ + "\u0000\u0183\u0185\u00038\u001c\u0000\u0184\u0182\u0001\u0000\u0000\u0000"+ + "\u0185\u0188\u0001\u0000\u0000\u0000\u0186\u0184\u0001\u0000\u0000\u0000"+ + "\u0186\u0187\u0001\u0000\u0000\u0000\u01877\u0001\u0000\u0000\u0000\u0188"+ + "\u0186\u0001\u0000\u0000\u0000\u0189\u018c\u0003 \u0010\u0000\u018a\u018b"+ + "\u0005\u0010\u0000\u0000\u018b\u018d\u0003\n\u0005\u0000\u018c\u018a\u0001"+ + "\u0000\u0000\u0000\u018c\u018d\u0001\u0000\u0000\u0000\u018d9\u0001\u0000"+ + "\u0000\u0000\u018e\u0193\u0003H$\u0000\u018f\u0190\u0005)\u0000\u0000"+ + "\u0190\u0192\u0003H$\u0000\u0191\u018f\u0001\u0000\u0000\u0000\u0192\u0195"+ + "\u0001\u0000\u0000\u0000\u0193\u0191\u0001\u0000\u0000\u0000\u0193\u0194"+ + "\u0001\u0000\u0000\u0000\u0194;\u0001\u0000\u0000\u0000\u0195\u0193\u0001"+ + "\u0000\u0000\u0000\u0196\u019b\u0003B!\u0000\u0197\u0198\u0005)\u0000"+ + "\u0000\u0198\u019a\u0003B!\u0000\u0199\u0197\u0001\u0000\u0000\u0000\u019a"+ + "\u019d\u0001\u0000\u0000\u0000\u019b\u0199\u0001\u0000\u0000\u0000\u019b"+ + "\u019c\u0001\u0000\u0000\u0000\u019c=\u0001\u0000\u0000\u0000\u019d\u019b"+ + "\u0001\u0000\u0000\u0000\u019e\u01a3\u0003<\u001e\u0000\u019f\u01a0\u0005"+ + "\'\u0000\u0000\u01a0\u01a2\u0003<\u001e\u0000\u01a1\u019f\u0001\u0000"+ + "\u0000\u0000\u01a2\u01a5\u0001\u0000\u0000\u0000\u01a3\u01a1\u0001\u0000"+ + "\u0000\u0000\u01a3\u01a4\u0001\u0000\u0000\u0000\u01a4?\u0001\u0000\u0000"+ + "\u0000\u01a5\u01a3\u0001\u0000\u0000\u0000\u01a6\u01a7\u0007\u0003\u0000"+ + "\u0000\u01a7A\u0001\u0000\u0000\u0000\u01a8\u01ac\u0005U\u0000\u0000\u01a9"+ + "\u01aa\u0004!\n\u0000\u01aa\u01ac\u0003F#\u0000\u01ab\u01a8\u0001\u0000"+ + "\u0000\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01acC\u0001\u0000\u0000"+ + "\u0000\u01ad\u01d8\u00052\u0000\u0000\u01ae\u01af\u0003h4\u0000\u01af"+ + "\u01b0\u0005H\u0000\u0000\u01b0\u01d8\u0001\u0000\u0000\u0000\u01b1\u01d8"+ + "\u0003f3\u0000\u01b2\u01d8\u0003h4\u0000\u01b3\u01d8\u0003b1\u0000\u01b4"+ + "\u01d8\u0003F#\u0000\u01b5\u01d8\u0003j5\u0000\u01b6\u01b7\u0005F\u0000"+ + "\u0000\u01b7\u01bc\u0003d2\u0000\u01b8\u01b9\u0005\'\u0000\u0000\u01b9"+ + "\u01bb\u0003d2\u0000\u01ba\u01b8\u0001\u0000\u0000\u0000\u01bb\u01be\u0001"+ + "\u0000\u0000\u0000\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bc\u01bd\u0001"+ + "\u0000\u0000\u0000\u01bd\u01bf\u0001\u0000\u0000\u0000\u01be\u01bc\u0001"+ + "\u0000\u0000\u0000\u01bf\u01c0\u0005G\u0000\u0000\u01c0\u01d8\u0001\u0000"+ + "\u0000\u0000\u01c1\u01c2\u0005F\u0000\u0000\u01c2\u01c7\u0003b1\u0000"+ + "\u01c3\u01c4\u0005\'\u0000\u0000\u01c4\u01c6\u0003b1\u0000\u01c5\u01c3"+ + "\u0001\u0000\u0000\u0000\u01c6\u01c9\u0001\u0000\u0000\u0000\u01c7\u01c5"+ + "\u0001\u0000\u0000\u0000\u01c7\u01c8\u0001\u0000\u0000\u0000\u01c8\u01ca"+ + "\u0001\u0000\u0000\u0000\u01c9\u01c7\u0001\u0000\u0000\u0000\u01ca\u01cb"+ + "\u0005G\u0000\u0000\u01cb\u01d8\u0001\u0000\u0000\u0000\u01cc\u01cd\u0005"+ + "F\u0000\u0000\u01cd\u01d2\u0003j5\u0000\u01ce\u01cf\u0005\'\u0000\u0000"+ + "\u01cf\u01d1\u0003j5\u0000\u01d0\u01ce\u0001\u0000\u0000\u0000\u01d1\u01d4"+ + "\u0001\u0000\u0000\u0000\u01d2\u01d0\u0001\u0000\u0000\u0000\u01d2\u01d3"+ + "\u0001\u0000\u0000\u0000\u01d3\u01d5\u0001\u0000\u0000\u0000\u01d4\u01d2"+ + "\u0001\u0000\u0000\u0000\u01d5\u01d6\u0005G\u0000\u0000\u01d6\u01d8\u0001"+ + "\u0000\u0000\u0000\u01d7\u01ad\u0001\u0000\u0000\u0000\u01d7\u01ae\u0001"+ + "\u0000\u0000\u0000\u01d7\u01b1\u0001\u0000\u0000\u0000\u01d7\u01b2\u0001"+ + "\u0000\u0000\u0000\u01d7\u01b3\u0001\u0000\u0000\u0000\u01d7\u01b4\u0001"+ + "\u0000\u0000\u0000\u01d7\u01b5\u0001\u0000\u0000\u0000\u01d7\u01b6\u0001"+ + "\u0000\u0000\u0000\u01d7\u01c1\u0001\u0000\u0000\u0000\u01d7\u01cc\u0001"+ + "\u0000\u0000\u0000\u01d8E\u0001\u0000\u0000\u0000\u01d9\u01dc\u00055\u0000"+ + "\u0000\u01da\u01dc\u0005E\u0000\u0000\u01db\u01d9\u0001\u0000\u0000\u0000"+ + "\u01db\u01da\u0001\u0000\u0000\u0000\u01dcG\u0001\u0000\u0000\u0000\u01dd"+ + "\u01e1\u0003@ \u0000\u01de\u01df\u0004$\u000b\u0000\u01df\u01e1\u0003"+ + "F#\u0000\u01e0\u01dd\u0001\u0000\u0000\u0000\u01e0\u01de\u0001\u0000\u0000"+ + "\u0000\u01e1I\u0001\u0000\u0000\u0000\u01e2\u01e3\u0005\t\u0000\u0000"+ + "\u01e3\u01e4\u0005\u001f\u0000\u0000\u01e4K\u0001\u0000\u0000\u0000\u01e5"+ + "\u01e6\u0005\u000e\u0000\u0000\u01e6\u01eb\u0003N\'\u0000\u01e7\u01e8"+ + "\u0005\'\u0000\u0000\u01e8\u01ea\u0003N\'\u0000\u01e9\u01e7\u0001\u0000"+ + "\u0000\u0000\u01ea\u01ed\u0001\u0000\u0000\u0000\u01eb\u01e9\u0001\u0000"+ + "\u0000\u0000\u01eb\u01ec\u0001\u0000\u0000\u0000\u01ecM\u0001\u0000\u0000"+ + "\u0000\u01ed\u01eb\u0001\u0000\u0000\u0000\u01ee\u01f0\u0003\n\u0005\u0000"+ + "\u01ef\u01f1\u0007\u0004\u0000\u0000\u01f0\u01ef\u0001\u0000\u0000\u0000"+ + "\u01f0\u01f1\u0001\u0000\u0000\u0000\u01f1\u01f4\u0001\u0000\u0000\u0000"+ + "\u01f2\u01f3\u00053\u0000\u0000\u01f3\u01f5\u0007\u0005\u0000\u0000\u01f4"+ + "\u01f2\u0001\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000\u0000\u01f5"+ + "O\u0001\u0000\u0000\u0000\u01f6\u01f7\u0005\b\u0000\u0000\u01f7\u01f8"+ + "\u0003>\u001f\u0000\u01f8Q\u0001\u0000\u0000\u0000\u01f9\u01fa\u0005\u0002"+ + "\u0000\u0000\u01fa\u01fb\u0003>\u001f\u0000\u01fbS\u0001\u0000\u0000\u0000"+ + "\u01fc\u01fd\u0005\u000b\u0000\u0000\u01fd\u0202\u0003V+\u0000\u01fe\u01ff"+ + "\u0005\'\u0000\u0000\u01ff\u0201\u0003V+\u0000\u0200\u01fe\u0001\u0000"+ + "\u0000\u0000\u0201\u0204\u0001\u0000\u0000\u0000\u0202\u0200\u0001\u0000"+ + "\u0000\u0000\u0202\u0203\u0001\u0000\u0000\u0000\u0203U\u0001\u0000\u0000"+ + "\u0000\u0204\u0202\u0001\u0000\u0000\u0000\u0205\u0206\u0003<\u001e\u0000"+ + "\u0206\u0207\u0005Y\u0000\u0000\u0207\u0208\u0003<\u001e\u0000\u0208W"+ + "\u0001\u0000\u0000\u0000\u0209\u020a\u0005\u0001\u0000\u0000\u020a\u020b"+ + "\u0003\u0014\n\u0000\u020b\u020d\u0003j5\u0000\u020c\u020e\u0003^/\u0000"+ + "\u020d\u020c\u0001\u0000\u0000\u0000\u020d\u020e\u0001\u0000\u0000\u0000"+ + "\u020eY\u0001\u0000\u0000\u0000\u020f\u0210\u0005\u0007\u0000\u0000\u0210"+ + "\u0211\u0003\u0014\n\u0000\u0211\u0212\u0003j5\u0000\u0212[\u0001\u0000"+ + "\u0000\u0000\u0213\u0214\u0005\n\u0000\u0000\u0214\u0215\u0003:\u001d"+ + "\u0000\u0215]\u0001\u0000\u0000\u0000\u0216\u021b\u0003`0\u0000\u0217"+ + "\u0218\u0005\'\u0000\u0000\u0218\u021a\u0003`0\u0000\u0219\u0217\u0001"+ + "\u0000\u0000\u0000\u021a\u021d\u0001\u0000\u0000\u0000\u021b\u0219\u0001"+ + "\u0000\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c_\u0001\u0000"+ + "\u0000\u0000\u021d\u021b\u0001\u0000\u0000\u0000\u021e\u021f\u0003@ \u0000"+ + "\u021f\u0220\u0005$\u0000\u0000\u0220\u0221\u0003D\"\u0000\u0221a\u0001"+ + "\u0000\u0000\u0000\u0222\u0223\u0007\u0006\u0000\u0000\u0223c\u0001\u0000"+ + "\u0000\u0000\u0224\u0227\u0003f3\u0000\u0225\u0227\u0003h4\u0000\u0226"+ + "\u0224\u0001\u0000\u0000\u0000\u0226\u0225\u0001\u0000\u0000\u0000\u0227"+ + "e\u0001\u0000\u0000\u0000\u0228\u022a\u0007\u0000\u0000\u0000\u0229\u0228"+ + "\u0001\u0000\u0000\u0000\u0229\u022a\u0001\u0000\u0000\u0000\u022a\u022b"+ + "\u0001\u0000\u0000\u0000\u022b\u022c\u0005 \u0000\u0000\u022cg\u0001\u0000"+ + "\u0000\u0000\u022d\u022f\u0007\u0000\u0000\u0000\u022e\u022d\u0001\u0000"+ + "\u0000\u0000\u022e\u022f\u0001\u0000\u0000\u0000\u022f\u0230\u0001\u0000"+ + "\u0000\u0000\u0230\u0231\u0005\u001f\u0000\u0000\u0231i\u0001\u0000\u0000"+ + "\u0000\u0232\u0233\u0005\u001e\u0000\u0000\u0233k\u0001\u0000\u0000\u0000"+ + "\u0234\u0235\u0007\u0007\u0000\u0000\u0235m\u0001\u0000\u0000\u0000\u0236"+ + "\u0237\u0005\u0005\u0000\u0000\u0237\u0238\u0003p8\u0000\u0238o\u0001"+ + "\u0000\u0000\u0000\u0239\u023a\u0005F\u0000\u0000\u023a\u023b\u0003\u0002"+ + "\u0001\u0000\u023b\u023c\u0005G\u0000\u0000\u023cq\u0001\u0000\u0000\u0000"+ + "\u023d\u023e\u0005\r\u0000\u0000\u023e\u023f\u0005i\u0000\u0000\u023f"+ + "s\u0001\u0000\u0000\u0000\u0240\u0241\u0005\u0003\u0000\u0000\u0241\u0244"+ + "\u0005_\u0000\u0000\u0242\u0243\u0005]\u0000\u0000\u0243\u0245\u0003<"+ + "\u001e\u0000\u0244\u0242\u0001\u0000\u0000\u0000\u0244\u0245\u0001\u0000"+ + "\u0000\u0000\u0245\u024f\u0001\u0000\u0000\u0000\u0246\u0247\u0005^\u0000"+ + "\u0000\u0247\u024c\u0003v;\u0000\u0248\u0249\u0005\'\u0000\u0000\u0249"+ + "\u024b\u0003v;\u0000\u024a\u0248\u0001\u0000\u0000\u0000\u024b\u024e\u0001"+ + "\u0000\u0000\u0000\u024c\u024a\u0001\u0000\u0000\u0000\u024c\u024d\u0001"+ + "\u0000\u0000\u0000\u024d\u0250\u0001\u0000\u0000\u0000\u024e\u024c\u0001"+ + "\u0000\u0000\u0000\u024f\u0246\u0001\u0000\u0000\u0000\u024f\u0250\u0001"+ + "\u0000\u0000\u0000\u0250u\u0001\u0000\u0000\u0000\u0251\u0252\u0003<\u001e"+ + "\u0000\u0252\u0253\u0005$\u0000\u0000\u0253\u0255\u0001\u0000\u0000\u0000"+ + "\u0254\u0251\u0001\u0000\u0000\u0000\u0254\u0255\u0001\u0000\u0000\u0000"+ + "\u0255\u0256\u0001\u0000\u0000\u0000\u0256\u0257\u0003<\u001e\u0000\u0257"+ + "w\u0001\u0000\u0000\u0000\u0258\u0259\u0005\u0012\u0000\u0000\u0259\u025a"+ + "\u0003$\u0012\u0000\u025a\u025b\u0005]\u0000\u0000\u025b\u025c\u0003>"+ + "\u001f\u0000\u025cy\u0001\u0000\u0000\u0000\u025d\u025e\u0005\u0011\u0000"+ + "\u0000\u025e\u0261\u00036\u001b\u0000\u025f\u0260\u0005!\u0000\u0000\u0260"+ + "\u0262\u0003\u001e\u000f\u0000\u0261\u025f\u0001\u0000\u0000\u0000\u0261"+ + "\u0262\u0001\u0000\u0000\u0000\u0262{\u0001\u0000\u0000\u0000\u0263\u0265"+ + "\u0007\b\u0000\u0000\u0264\u0263\u0001\u0000\u0000\u0000\u0264\u0265\u0001"+ + "\u0000\u0000\u0000\u0265\u0266\u0001\u0000\u0000\u0000\u0266\u0267\u0005"+ + "\u0014\u0000\u0000\u0267\u0268\u0003~?\u0000\u0268\u0269\u0003\u0080@"+ + "\u0000\u0269}\u0001\u0000\u0000\u0000\u026a\u026d\u0003@ \u0000\u026b"+ + "\u026c\u0005Y\u0000\u0000\u026c\u026e\u0003@ \u0000\u026d\u026b\u0001"+ + "\u0000\u0000\u0000\u026d\u026e\u0001\u0000\u0000\u0000\u026e\u007f\u0001"+ + "\u0000\u0000\u0000\u026f\u0270\u0005]\u0000\u0000\u0270\u0275\u0003\u0082"+ + "A\u0000\u0271\u0272\u0005\'\u0000\u0000\u0272\u0274\u0003\u0082A\u0000"+ + "\u0273\u0271\u0001\u0000\u0000\u0000\u0274\u0277\u0001\u0000\u0000\u0000"+ + "\u0275\u0273\u0001\u0000\u0000\u0000\u0275\u0276\u0001\u0000\u0000\u0000"+ + "\u0276\u0081\u0001\u0000\u0000\u0000\u0277\u0275\u0001\u0000\u0000\u0000"+ + "\u0278\u0279\u0003\u0010\b\u0000\u0279\u0083\u0001\u0000\u0000\u0000="+ + "\u008f\u0098\u00ac\u00b8\u00c1\u00c9\u00ce\u00d6\u00d8\u00dd\u00e4\u00e9"+ + "\u00f4\u00fa\u0102\u0104\u010f\u0116\u0121\u0124\u0134\u013a\u0144\u0148"+ + "\u014d\u0157\u015f\u016c\u0170\u0174\u017b\u017f\u0186\u018c\u0193\u019b"+ + "\u01a3\u01ab\u01bc\u01c7\u01d2\u01d7\u01db\u01e0\u01eb\u01f0\u01f4\u0202"+ + "\u020d\u021b\u0226\u0229\u022e\u0244\u024c\u024f\u0254\u0261\u0264\u026d"+ + "\u0275"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index 556a97657635a..6071219839bab 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -1052,6 +1052,54 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

    The default implementation does nothing.

    */ @Override public void exitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterJoinTarget(EsqlBaseParser.JoinTargetContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitJoinTarget(EsqlBaseParser.JoinTargetContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterJoinCondition(EsqlBaseParser.JoinConditionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitJoinCondition(EsqlBaseParser.JoinConditionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx) { } /** * {@inheritDoc} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index 56b6999615f50..afe7146923791 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -622,4 +622,32 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitJoinTarget(EsqlBaseParser.JoinTargetContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitJoinCondition(EsqlBaseParser.JoinConditionContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx) { return visitChildren(ctx); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index cf658c4a73141..0faca2541c9ad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -941,4 +941,44 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#joinCommand}. + * @param ctx the parse tree + */ + void enterJoinCommand(EsqlBaseParser.JoinCommandContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#joinCommand}. + * @param ctx the parse tree + */ + void exitJoinCommand(EsqlBaseParser.JoinCommandContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#joinTarget}. + * @param ctx the parse tree + */ + void enterJoinTarget(EsqlBaseParser.JoinTargetContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#joinTarget}. + * @param ctx the parse tree + */ + void exitJoinTarget(EsqlBaseParser.JoinTargetContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#joinCondition}. + * @param ctx the parse tree + */ + void enterJoinCondition(EsqlBaseParser.JoinConditionContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#joinCondition}. + * @param ctx the parse tree + */ + void exitJoinCondition(EsqlBaseParser.JoinConditionContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#joinPredicate}. + * @param ctx the parse tree + */ + void enterJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#joinPredicate}. + * @param ctx the parse tree + */ + void exitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index 86c1d1aafc33a..e91cd6670e971 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -567,4 +567,28 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#joinCommand}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#joinTarget}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitJoinTarget(EsqlBaseParser.JoinTargetContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#joinCondition}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitJoinCondition(EsqlBaseParser.JoinConditionContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#joinPredicate}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index f83af534eaa72..99e03b3653f79 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -53,6 +53,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Rename; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.UnresolvedRelation; +import org.elasticsearch.xpack.esql.plan.logical.join.LookupJoin; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.joni.exception.SyntaxException; @@ -68,6 +69,7 @@ import java.util.Set; import java.util.function.Function; +import static java.util.Collections.emptyList; import static org.elasticsearch.common.logging.HeaderWarning.addWarning; import static org.elasticsearch.xpack.esql.core.util.StringUtils.WILDCARD; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputExpressions; @@ -502,7 +504,7 @@ public LogicalPlan visitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) @Override public PlanFactory visitLookupCommand(EsqlBaseParser.LookupCommandContext ctx) { if (false == Build.current().isSnapshot()) { - throw new ParsingException(source(ctx), "LOOKUP is in preview and only available in SNAPSHOT build"); + throw new ParsingException(source(ctx), "LOOKUP__ is in preview and only available in SNAPSHOT build"); } var source = source(ctx); @@ -524,4 +526,42 @@ public PlanFactory visitLookupCommand(EsqlBaseParser.LookupCommandContext ctx) { return p -> new Lookup(source, p, tableName, matchFields, null /* localRelation will be resolved later*/); } + public PlanFactory visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { + var source = source(ctx); + if (false == Build.current().isSnapshot()) { + throw new ParsingException(source, "JOIN is in preview and only available in SNAPSHOT build"); + } + + if (ctx.type != null && ctx.type.getType() != EsqlBaseParser.DEV_JOIN_LOOKUP) { + String joinType = ctx.type == null ? "(INNER)" : ctx.type.getText(); + throw new ParsingException(source, "only LOOKUP JOIN available, {} JOIN unsupported at the moment", joinType); + } + + var target = ctx.joinTarget(); + UnresolvedRelation right = new UnresolvedRelation( + source(target), + new TableIdentifier(source(target.index), null, visitIdentifier(target.index)), + false, + emptyList(), + IndexMode.LOOKUP, + null, + "???" + ); + + var condition = ctx.joinCondition(); + + // ON only with qualified names + var predicates = expressions(condition.joinPredicate()); + List joinFields = new ArrayList<>(predicates.size()); + for (var f : predicates) { + // verify each field is an unresolved attribute + if (f instanceof UnresolvedAttribute ua) { + joinFields.add(ua); + } else { + throw new ParsingException(f.source(), "JOIN ON clause only supports fields at the moment, found [{}]", f.sourceText()); + } + } + + return p -> new LookupJoin(source, p, right, joinFields); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java index 0705ae7f778cd..484a655fc2988 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java @@ -18,7 +18,7 @@ public class ParsingException extends EsqlClientException { public ParsingException(String message, Exception cause, int line, int charPositionInLine) { super(message, cause); this.line = line; - this.charPositionInLine = charPositionInLine; + this.charPositionInLine = charPositionInLine + 1; } ParsingException(String message, Object... args) { @@ -42,7 +42,7 @@ public int getLineNumber() { } public int getColumnNumber() { - return charPositionInLine + 1; + return charPositionInLine; } public String getErrorMessage() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java new file mode 100644 index 0000000000000..b3c273cbfa1bb --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java @@ -0,0 +1,113 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.xpack.esql.plan.logical.Aggregate; +import org.elasticsearch.xpack.esql.plan.logical.Dissect; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; +import org.elasticsearch.xpack.esql.plan.logical.EsRelation; +import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.Grok; +import org.elasticsearch.xpack.esql.plan.logical.InlineStats; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.Lookup; +import org.elasticsearch.xpack.esql.plan.logical.MvExpand; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; +import org.elasticsearch.xpack.esql.plan.logical.Project; +import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.logical.join.InlineJoin; +import org.elasticsearch.xpack.esql.plan.logical.join.Join; +import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; +import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; +import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; +import org.elasticsearch.xpack.esql.plan.physical.DissectExec; +import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; +import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; +import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.EvalExec; +import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec; +import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; +import org.elasticsearch.xpack.esql.plan.physical.ExchangeSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; +import org.elasticsearch.xpack.esql.plan.physical.FilterExec; +import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; +import org.elasticsearch.xpack.esql.plan.physical.GrokExec; +import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; +import org.elasticsearch.xpack.esql.plan.physical.LimitExec; +import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; +import org.elasticsearch.xpack.esql.plan.physical.OrderExec; +import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; +import org.elasticsearch.xpack.esql.plan.physical.ShowExec; +import org.elasticsearch.xpack.esql.plan.physical.SubqueryExec; +import org.elasticsearch.xpack.esql.plan.physical.TopNExec; + +import java.util.ArrayList; +import java.util.List; + +public class PlanWritables { + + public static List getNamedWriteables() { + List entries = new ArrayList<>(); + entries.addAll(logical()); + entries.addAll(phsyical()); + return entries; + } + + public static List logical() { + return List.of( + Aggregate.ENTRY, + Dissect.ENTRY, + Enrich.ENTRY, + EsRelation.ENTRY, + EsqlProject.ENTRY, + Eval.ENTRY, + Filter.ENTRY, + Grok.ENTRY, + InlineStats.ENTRY, + InlineJoin.ENTRY, + Join.ENTRY, + LocalRelation.ENTRY, + Limit.ENTRY, + Lookup.ENTRY, + MvExpand.ENTRY, + OrderBy.ENTRY, + Project.ENTRY, + TopN.ENTRY + ); + } + + public static List phsyical() { + return List.of( + AggregateExec.ENTRY, + DissectExec.ENTRY, + EnrichExec.ENTRY, + EsQueryExec.ENTRY, + EsSourceExec.ENTRY, + EvalExec.ENTRY, + ExchangeExec.ENTRY, + ExchangeSinkExec.ENTRY, + ExchangeSourceExec.ENTRY, + FieldExtractExec.ENTRY, + FilterExec.ENTRY, + FragmentExec.ENTRY, + GrokExec.ENTRY, + HashJoinExec.ENTRY, + LimitExec.ENTRY, + LocalSourceExec.ENTRY, + MvExpandExec.ENTRY, + OrderExec.ENTRY, + ProjectExec.ENTRY, + ShowExec.ENTRY, + SubqueryExec.ENTRY, + TopNExec.ENTRY + ); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/QueryPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/QueryPlan.java index e34e0b8e27863..ef8c3983faf2e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/QueryPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/QueryPlan.java @@ -59,12 +59,17 @@ public AttributeSet inputSet() { */ public List expressions() { if (lazyExpressions == null) { - lazyExpressions = new ArrayList<>(); - forEachPropertyOnly(Object.class, e -> doForEachExpression(e, lazyExpressions::add)); + lazyExpressions = computeExpressions(); } return lazyExpressions; } + protected List computeExpressions() { + List expressions = new ArrayList<>(); + forEachPropertyOnly(Object.class, e -> doForEachExpression(e, expressions::add)); + return expressions; + } + /** * The attributes required to be in the {@link QueryPlan#inputSet()} for this plan to be valid. * Excludes generated references. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java index eb72009638396..794a52b8e3f89 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java @@ -181,7 +181,12 @@ public boolean equals(Object obj) { @Override public String nodeString() { - return nodeName() + "[" + index + "]" + NodeUtils.limitedToString(attrs); + return nodeName() + + "[" + + index + + "]" + + (indexMode != IndexMode.STANDARD ? "[" + indexMode.name() + "]" : "") + + NodeUtils.limitedToString(attrs); } public static IndexMode readIndexMode(StreamInput in) throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java index 9e854450a2d34..4211f8a0d45b6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java @@ -20,7 +20,7 @@ import org.elasticsearch.xpack.esql.plan.logical.join.InlineJoin; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import java.io.IOException; import java.util.ArrayList; @@ -118,7 +118,7 @@ private JoinConfig joinConfig() { } } } - return new JoinConfig(JoinType.LEFT, namedGroupings, leftFields, rightFields); + return new JoinConfig(JoinTypes.LEFT, namedGroupings, leftFields, rightFields); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java index e07dd9e14649e..e845c25bd3b32 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java @@ -6,15 +6,10 @@ */ package org.elasticsearch.xpack.esql.plan.logical; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.capabilities.Resolvable; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.plan.QueryPlan; -import org.elasticsearch.xpack.esql.plan.logical.join.InlineJoin; -import org.elasticsearch.xpack.esql.plan.logical.join.Join; -import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; -import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import java.util.List; @@ -23,29 +18,6 @@ * For example, a logical plan in English would be: "I want to get from DEN to SFO". */ public abstract class LogicalPlan extends QueryPlan implements Resolvable { - public static List getNamedWriteables() { - return List.of( - Aggregate.ENTRY, - Dissect.ENTRY, - Enrich.ENTRY, - EsRelation.ENTRY, - EsqlProject.ENTRY, - Eval.ENTRY, - Filter.ENTRY, - Grok.ENTRY, - InlineStats.ENTRY, - InlineJoin.ENTRY, - Join.ENTRY, - LocalRelation.ENTRY, - Limit.ENTRY, - Lookup.ENTRY, - MvExpand.ENTRY, - OrderBy.ENTRY, - Project.ENTRY, - TopN.ENTRY - ); - } - /** * Order is important in the enum; any values should be added at the end. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java index 70f8a24cfc87e..6e7f421003292 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java @@ -19,7 +19,7 @@ import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import java.io.IOException; @@ -114,7 +114,7 @@ public JoinConfig joinConfig() { } } } - return new JoinConfig(JoinType.LEFT, matchFields, leftFields, rightFields); + return new JoinConfig(JoinTypes.LEFT, matchFields, leftFields, rightFields); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java index 0dfbe4936e4e3..384c3f7a340ae 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java @@ -25,6 +25,10 @@ public class UnresolvedRelation extends LeafPlan implements Unresolvable { private final TableIdentifier table; private final boolean frozen; private final List metadataFields; + /* + * Expected indexMode based on the declaration - used later for verification + * at resolution time. + */ private final IndexMode indexMode; private final String unresolvedMsg; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java index f9be61ed2c8d7..0e182646d914a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java @@ -10,9 +10,8 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.AttributeSet; -import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -23,12 +22,12 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Objects; -import java.util.Set; -import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; +import static org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.LEFT; +import static org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.RIGHT; public class Join extends BinaryPlan { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Join", Join::new); @@ -92,11 +91,6 @@ protected NodeInfo info() { ); } - @Override - public Join replaceChildren(LogicalPlan left, LogicalPlan right) { - return new Join(source(), left, right, config); - } - @Override public List output() { if (lazyOutput == null) { @@ -106,35 +100,42 @@ public List output() { } /** - * Merge output fields. - * Currently only implemented for LEFT JOINs; the rightOutput shadows the leftOutput, except for any attributes that - * occur in the join's matchFields. + * Combine the two lists of attributes into one. + * In case of (name) conflicts, specify which sides wins, that is overrides the other column - the left or the right. */ public static List computeOutput(List leftOutput, List rightOutput, JoinConfig config) { - AttributeSet matchFieldSet = new AttributeSet(config.matchFields()); - Set matchFieldNames = new HashSet<>(Expressions.names(config.matchFields())); - return switch (config.type()) { - case LEFT -> { - // Right side becomes nullable. - List fieldsAddedFromRight = removeCollisionsWithMatchFields(rightOutput, matchFieldSet, matchFieldNames); - yield mergeOutputAttributes(fieldsAddedFromRight, leftOutput); - } - default -> throw new UnsupportedOperationException("Other JOINs than LEFT not supported"); - }; + JoinType joinType = config.type(); + List output; + // TODO: make the other side nullable + if (LEFT.equals(joinType)) { + // right side becomes nullable and overrides left + // output = merge(leftOutput, makeNullable(rightOutput)); + output = merge(leftOutput, rightOutput); + } else if (RIGHT.equals(joinType)) { + // left side becomes nullable and overrides right + // output = merge(makeNullable(leftOutput), rightOutput); + output = merge(leftOutput, rightOutput); + } else { + throw new IllegalArgumentException(joinType.joinName() + " unsupported"); + } + return output; } - private static List removeCollisionsWithMatchFields( - List attributes, - AttributeSet matchFields, - Set matchFieldNames - ) { - List result = new ArrayList<>(); - for (Attribute attr : attributes) { - if ((matchFields.contains(attr) || matchFieldNames.contains(attr.name())) == false) { - result.add(attr); - } + /** + * Merge the two lists of attributes into one and preserves order. + */ + private static List merge(List left, List right) { + // use linked hash map to preserve order + Map nameToAttribute = Maps.newLinkedHashMapWithExpectedSize(left.size() + right.size()); + for (Attribute a : left) { + nameToAttribute.put(a.name(), a); + } + for (Attribute a : right) { + // override the existing entry in place + nameToAttribute.compute(a.name(), (name, existing) -> a); } - return result; + + return new ArrayList<>(nameToAttribute.values()); } /** @@ -160,7 +161,7 @@ public static List makeReference(List output) { return out; } - public static List makeNullable(List output) { + private static List makeNullable(List output) { List out = new ArrayList<>(output.size()); for (Attribute a : output) { out.add(a.withNullability(Nullability.TRUE)); @@ -181,6 +182,15 @@ public boolean resolved() { return childrenResolved() && expressionsResolved(); } + public Join withConfig(JoinConfig config) { + return new Join(source(), left(), right(), config); + } + + @Override + public Join replaceChildren(LogicalPlan left, LogicalPlan right) { + return new Join(source(), left, right, config); + } + @Override public String commandName() { return "JOIN"; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinConfig.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinConfig.java index 68ad50f2f67a0..383606d6ccbed 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinConfig.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinConfig.java @@ -22,12 +22,16 @@ * @param leftFields matched with the right fields * @param rightFields matched with the left fields */ +// TODO: this class needs refactoring into a more general form (expressions) since it's currently contains +// both the condition (equi-join) between the left and right field as well as the output of the join keys +// which makes sense only for USING clause - which is better resolved in the analyzer (based on the names) +// hence why for now the attributes are set inside the analyzer public record JoinConfig(JoinType type, List matchFields, List leftFields, List rightFields) implements Writeable { public JoinConfig(StreamInput in) throws IOException { this( - JoinType.readFrom(in), + JoinTypes.readFrom(in), in.readNamedWriteableCollectionAsList(Attribute.class), in.readNamedWriteableCollectionAsList(Attribute.class), in.readNamedWriteableCollectionAsList(Attribute.class) @@ -43,6 +47,9 @@ public void writeTo(StreamOutput out) throws IOException { } public boolean expressionsResolved() { - return Resolvables.resolved(matchFields) && Resolvables.resolved(leftFields) && Resolvables.resolved(rightFields); + return type.resolved() + && Resolvables.resolved(matchFields) + && Resolvables.resolved(leftFields) + && Resolvables.resolved(rightFields); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinType.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinType.java index c3095efc9e623..a309387b1f0a2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinType.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinType.java @@ -7,46 +7,15 @@ package org.elasticsearch.xpack.esql.plan.logical.join; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import java.io.IOException; +public interface JoinType extends Writeable { -public enum JoinType implements Writeable { - INNER(0, "INNER"), - LEFT(1, "LEFT OUTER"), - RIGHT(2, "RIGHT OUTER"), - FULL(3, "FULL OUTER"), - CROSS(4, "CROSS"); - - private final byte id; - private final String name; - - JoinType(int id, String name) { - this.id = (byte) id; - this.name = name; - } - - @Override - public String toString() { - return name; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeByte(id); + default String joinName() { + return getClass().getSimpleName(); } - public static JoinType readFrom(StreamInput in) throws IOException { - byte id = in.readByte(); - return switch (id) { - case 0 -> INNER; - case 1 -> LEFT; - case 2 -> RIGHT; - case 3 -> FULL; - case 4 -> CROSS; - default -> throw new IllegalArgumentException("unsupported join [" + id + "]"); - }; + default boolean resolved() { + return true; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinTypes.java new file mode 100644 index 0000000000000..9d3471bc356f7 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinTypes.java @@ -0,0 +1,155 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.plan.logical.join; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.Maps; +import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; +import org.elasticsearch.xpack.esql.core.expression.Attribute; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Utility class defining the concrete types of joins supported by ESQL. + */ +public class JoinTypes { + + private JoinTypes() {} + + public static JoinType INNER = CoreJoinType.INNER; + public static JoinType LEFT = CoreJoinType.LEFT; + public static JoinType RIGHT = CoreJoinType.RIGHT; + public static JoinType FULL = CoreJoinType.FULL; + public static JoinType CROSS = CoreJoinType.CROSS; + + private static Map JOIN_TYPES; + + static { + CoreJoinType[] types = CoreJoinType.values(); + JOIN_TYPES = Maps.newMapWithExpectedSize(types.length); + for (CoreJoinType type : types) { + JOIN_TYPES.put(type.id, type); + } + } + + /** + * The predefined core join types. Implements as enum for easy comparison and serialization. + */ + private enum CoreJoinType implements JoinType { + INNER(1, "INNER"), + LEFT(2, "LEFT OUTER"), + RIGHT(3, "RIGHT OUTER"), + FULL(4, "FULL OUTER"), + CROSS(5, "CROSS"); + + private final String name; + private final byte id; + + CoreJoinType(int id, String name) { + this.id = (byte) id; + this.name = name; + } + + @Override + public String joinName() { + return name; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeByte(id); + } + } + + /** + * Join type for the USING clause - shorthand for defining an equi-join (equality join meaning the condition checks if columns across + * each side of the join are equal). + * One important difference is that the USING clause returns the join column only once, at the beginning of the result set. + */ + public static class UsingJoinType implements JoinType { + private final List columns; + private final JoinType coreJoin; + + public UsingJoinType(JoinType coreJoin, List columns) { + this.columns = columns; + this.coreJoin = coreJoin; + } + + @Override + public String joinName() { + return coreJoin.joinName() + " USING " + columns.toString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new IllegalArgumentException("USING join type should not be serialized due to being rewritten"); + } + + public JoinType coreJoin() { + return coreJoin; + } + + public List columns() { + return columns; + } + + @Override + public boolean resolved() { + return Resolvables.resolved(columns); + } + + @Override + public int hashCode() { + return Objects.hash(columns, coreJoin); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UsingJoinType that = (UsingJoinType) o; + return Objects.equals(columns, that.columns) && coreJoin == that.coreJoin; + } + + @Override + public String toString() { + return joinName(); + } + } + + /** + * Private class so it doesn't get used yet it is defined to showcase why the join type was defined as an interface instead of a simpler + * enum. + */ + private abstract static class NaturalJoinType implements JoinType { + + private final JoinType joinType; + + private NaturalJoinType(JoinType joinType) { + this.joinType = joinType; + } + + @Override + public String joinName() { + return "NATURAL " + joinType.joinName(); + } + } + + public static JoinType readFrom(StreamInput in) throws IOException { + byte id = in.readByte(); + JoinType type = JOIN_TYPES.get(id); + if (type == null) { + throw new IllegalArgumentException("unsupported join [" + id + "]"); + } + ; + return type; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java new file mode 100644 index 0000000000000..2ee9213f45b36 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java @@ -0,0 +1,103 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.logical.join; + +import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.Project; +import org.elasticsearch.xpack.esql.plan.logical.SurrogateLogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.UsingJoinType; + +import java.util.List; +import java.util.Objects; + +import static java.util.Collections.emptyList; +import static org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.LEFT; + +/** + * Lookup join - specialized LEFT (OUTER) JOIN between the main left side and a lookup index (index_mode = lookup) on the right. + */ +public class LookupJoin extends Join implements SurrogateLogicalPlan { + + private final List output; + + public LookupJoin(Source source, LogicalPlan left, LogicalPlan right, List joinFields) { + this(source, left, right, new UsingJoinType(LEFT, joinFields), emptyList(), emptyList(), emptyList(), emptyList()); + } + + public LookupJoin( + Source source, + LogicalPlan left, + LogicalPlan right, + JoinType type, + List joinFields, + List leftFields, + List rightFields, + List output + ) { + this(source, left, right, new JoinConfig(type, joinFields, leftFields, rightFields), output); + } + + public LookupJoin(Source source, LogicalPlan left, LogicalPlan right, JoinConfig joinConfig, List output) { + super(source, left, right, joinConfig); + this.output = output; + } + + /** + * Translate the expression into a regular join with a Projection on top, to deal with serialization & co. + */ + @Override + public LogicalPlan surrogate() { + JoinConfig cfg = config(); + JoinConfig newConfig = new JoinConfig(LEFT, cfg.matchFields(), cfg.leftFields(), cfg.rightFields()); + Join normalized = new Join(source(), left(), right(), newConfig); + // TODO: decide whether to introduce USING or just basic ON semantics - keep the ordering out for now + return new Project(source(), normalized, output); + } + + public List output() { + return output; + } + + @Override + public Join replaceChildren(LogicalPlan left, LogicalPlan right) { + return new LookupJoin(source(), left, right, config(), output); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create( + this, + LookupJoin::new, + left(), + right(), + config().type(), + config().matchFields(), + config().leftFields(), + config().rightFields(), + output + ); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), output); + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj) == false) { + return false; + } + + LookupJoin other = (LookupJoin) obj; + return Objects.equals(output, other.output); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java index 4574c3720f8ee..5ae3702993fcb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java @@ -91,7 +91,7 @@ public List rightFields() { public Set addedFields() { if (lazyAddedFields == null) { - lazyAddedFields = outputSet(); + lazyAddedFields = new AttributeSet(output()); lazyAddedFields.removeAll(left().output()); } return lazyAddedFields; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java new file mode 100644 index 0000000000000..e01451ceaecac --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java @@ -0,0 +1,162 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.physical; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.AttributeSet; +import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.Objects; + +public class LookupJoinExec extends BinaryExec implements EstimatesRowSize { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + PhysicalPlan.class, + "LookupJoinExec", + LookupJoinExec::new + ); + + private final List matchFields; + private final List leftFields; + private final List rightFields; + private final List output; + private List lazyAddedFields; + + public LookupJoinExec( + Source source, + PhysicalPlan left, + PhysicalPlan lookup, + List matchFields, + List leftFields, + List rightFields, + List output + ) { + super(source, left, lookup); + this.matchFields = matchFields; + this.leftFields = leftFields; + this.rightFields = rightFields; + this.output = output; + } + + private LookupJoinExec(StreamInput in) throws IOException { + super(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(PhysicalPlan.class), in.readNamedWriteable(PhysicalPlan.class)); + this.matchFields = in.readNamedWriteableCollectionAsList(Attribute.class); + this.leftFields = in.readNamedWriteableCollectionAsList(Attribute.class); + this.rightFields = in.readNamedWriteableCollectionAsList(Attribute.class); + this.output = in.readNamedWriteableCollectionAsList(Attribute.class); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeNamedWriteableCollection(matchFields); + out.writeNamedWriteableCollection(leftFields); + out.writeNamedWriteableCollection(rightFields); + out.writeNamedWriteableCollection(output); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + public PhysicalPlan lookup() { + return right(); + } + + public List matchFields() { + return matchFields; + } + + public List leftFields() { + return leftFields; + } + + public List rightFields() { + return rightFields; + } + + public List addedFields() { + if (lazyAddedFields == null) { + AttributeSet set = outputSet(); + set.removeAll(left().output()); + for (Attribute m : matchFields) { + set.removeIf(a -> a.name().equals(m.name())); + } + lazyAddedFields = new ArrayList<>(set); + lazyAddedFields.sort(Comparator.comparing(Attribute::name)); + } + return lazyAddedFields; + } + + @Override + public PhysicalPlan estimateRowSize(State state) { + state.add(false, output); + return this; + } + + @Override + public List output() { + return output; + } + + @Override + public AttributeSet inputSet() { + // TODO: this is a hack since the right side is always materialized - instead this should + // return the _doc so the extraction can happen lazily + return left().outputSet(); + } + + @Override + protected AttributeSet computeReferences() { + // TODO: same as above - once lazy materialization of both sides lands, this needs updating + return Expressions.references(leftFields); + } + + @Override + public LookupJoinExec replaceChildren(PhysicalPlan left, PhysicalPlan right) { + return new LookupJoinExec(source(), left, right, matchFields, leftFields, rightFields, output); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, LookupJoinExec::new, left(), right(), matchFields, leftFields, rightFields, output); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (super.equals(o) == false) { + return false; + } + LookupJoinExec hash = (LookupJoinExec) o; + return matchFields.equals(hash.matchFields) + && leftFields.equals(hash.leftFields) + && rightFields.equals(hash.rightFields) + && output.equals(hash.output); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), matchFields, leftFields, rightFields, output); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/PhysicalPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/PhysicalPlan.java index ecf78908d6d3e..d2935ccb75b66 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/PhysicalPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/PhysicalPlan.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.plan.physical; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.plan.QueryPlan; @@ -20,34 +19,6 @@ * PhysicalPlan = take Delta, DEN to SJC, then SJC to SFO */ public abstract class PhysicalPlan extends QueryPlan { - public static List getNamedWriteables() { - return List.of( - AggregateExec.ENTRY, - DissectExec.ENTRY, - EnrichExec.ENTRY, - EsQueryExec.ENTRY, - EsSourceExec.ENTRY, - EvalExec.ENTRY, - ExchangeExec.ENTRY, - ExchangeSinkExec.ENTRY, - ExchangeSourceExec.ENTRY, - FieldExtractExec.ENTRY, - FilterExec.ENTRY, - FragmentExec.ENTRY, - GrokExec.ENTRY, - HashJoinExec.ENTRY, - LimitExec.ENTRY, - LocalSourceExec.ENTRY, - MvExpandExec.ENTRY, - OrderExec.ENTRY, - ProjectExec.ENTRY, - RowExec.ENTRY, - ShowExec.ENTRY, - SubqueryExec.ENTRY, - TopNExec.ENTRY - ); - } - public PhysicalPlan(Source source, List children) { super(source, children); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/RowExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/RowExec.java deleted file mode 100644 index 3a104d4bc292b..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/RowExec.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.plan.physical; - -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.expression.Alias; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Expressions; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class RowExec extends LeafExec { - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(PhysicalPlan.class, "RowExec", RowExec::new); - - private final List fields; - - public RowExec(Source source, List fields) { - super(source); - this.fields = fields; - } - - private RowExec(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), in.readCollectionAsList(Alias::new)); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - Source.EMPTY.writeTo(out); - out.writeCollection(fields()); - } - - @Override - public String getWriteableName() { - return ENTRY.name; - } - - public List fields() { - return fields; - } - - @Override - public List output() { - return Expressions.asAttributes(fields); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, RowExec::new, fields); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - RowExec constant = (RowExec) o; - return Objects.equals(fields, constant.fields); - } - - @Override - public int hashCode() { - return Objects.hash(fields); - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index 3e81c2a2c1101..605e0d7c3109c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Rate; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialAggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; +import org.elasticsearch.xpack.esql.expression.function.aggregate.StdDev; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.aggregate.ToPartial; import org.elasticsearch.xpack.esql.expression.function.aggregate.Top; @@ -48,9 +49,6 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; - /** * Static class used to convert aggregate expressions to the named expressions that represent their intermediate state. *

    @@ -78,6 +76,7 @@ final class AggregateMapper { Min.class, Percentile.class, SpatialCentroid.class, + StdDev.class, Sum.class, Values.class, Top.class, @@ -171,7 +170,7 @@ private static Stream, Tuple>> typeAndNames(Class types = List.of("Int", "Long", "Double", "Boolean", "BytesRef"); } else if (Top.class.isAssignableFrom(clazz)) { types = List.of("Boolean", "Int", "Long", "Double", "Ip", "BytesRef"); - } else if (Rate.class.isAssignableFrom(clazz)) { + } else if (Rate.class.isAssignableFrom(clazz) || StdDev.class.isAssignableFrom(clazz)) { types = List.of("Int", "Long", "Double"); } else if (FromPartial.class.isAssignableFrom(clazz) || ToPartial.class.isAssignableFrom(clazz)) { types = List.of(""); // no type diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java index 12dc77e6e7c59..6fac7bab2bd80 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java @@ -86,7 +86,6 @@ public final class EsqlExpressionTranslators { new ExpressionTranslators.IsNotNulls(), new ExpressionTranslators.Nots(), new ExpressionTranslators.Likes(), - new ExpressionTranslators.StringQueries(), new ExpressionTranslators.MultiMatches(), new MatchFunctionTranslator(), new QueryStringFunctionTranslator(), @@ -536,7 +535,7 @@ protected Query asQuery(Match match, TranslatorHandler handler) { public static class QueryStringFunctionTranslator extends ExpressionTranslator { @Override protected Query asQuery(QueryString queryString, TranslatorHandler handler) { - return new QueryStringQuery(queryString.source(), queryString.queryAsText(), Map.of(), null); + return new QueryStringQuery(queryString.source(), queryString.queryAsText(), Map.of(), Map.of()); } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 0d0b8dda5fc74..1ffc652e54337 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -31,7 +31,6 @@ import org.elasticsearch.compute.operator.Operator.OperatorFactory; import org.elasticsearch.compute.operator.OutputOperator.OutputOperatorFactory; import org.elasticsearch.compute.operator.RowInTableLookupOperator; -import org.elasticsearch.compute.operator.RowOperator.RowOperatorFactory; import org.elasticsearch.compute.operator.ShowOperator; import org.elasticsearch.compute.operator.SinkOperator; import org.elasticsearch.compute.operator.SinkOperator.SinkOperatorFactory; @@ -47,6 +46,7 @@ import org.elasticsearch.compute.operator.topn.TopNOperator.TopNOperatorFactory; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.tasks.CancellableTask; @@ -63,6 +63,8 @@ import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.enrich.EnrichLookupOperator; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; +import org.elasticsearch.xpack.esql.enrich.LookupFromIndexOperator; +import org.elasticsearch.xpack.esql.enrich.LookupFromIndexService; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.evaluator.command.GrokEvaluatorExtracter; import org.elasticsearch.xpack.esql.expression.Order; @@ -81,11 +83,11 @@ import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; import org.elasticsearch.xpack.esql.plan.physical.OutputExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; -import org.elasticsearch.xpack.esql.plan.physical.RowExec; import org.elasticsearch.xpack.esql.plan.physical.ShowExec; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; @@ -125,6 +127,7 @@ public class LocalExecutionPlanner { private final ExchangeSourceHandler exchangeSourceHandler; private final ExchangeSinkHandler exchangeSinkHandler; private final EnrichLookupService enrichLookupService; + private final LookupFromIndexService lookupFromIndexService; private final PhysicalOperationProviders physicalOperationProviders; public LocalExecutionPlanner( @@ -138,6 +141,7 @@ public LocalExecutionPlanner( ExchangeSourceHandler exchangeSourceHandler, ExchangeSinkHandler exchangeSinkHandler, EnrichLookupService enrichLookupService, + LookupFromIndexService lookupFromIndexService, PhysicalOperationProviders physicalOperationProviders ) { this.sessionId = sessionId; @@ -149,6 +153,7 @@ public LocalExecutionPlanner( this.exchangeSourceHandler = exchangeSourceHandler; this.exchangeSinkHandler = exchangeSinkHandler; this.enrichLookupService = enrichLookupService; + this.lookupFromIndexService = lookupFromIndexService; this.physicalOperationProviders = physicalOperationProviders; this.configuration = configuration; } @@ -213,8 +218,6 @@ else if (node instanceof EsQueryExec esQuery) { return planEsQueryNode(esQuery, context); } else if (node instanceof EsStatsQueryExec statsQuery) { return planEsStats(statsQuery, context); - } else if (node instanceof RowExec row) { - return planRow(row, context); } else if (node instanceof LocalSourceExec localSource) { return planLocal(localSource, context); } else if (node instanceof ShowExec show) { @@ -225,8 +228,10 @@ else if (node instanceof EsQueryExec esQuery) { // lookups and joins else if (node instanceof EnrichExec enrich) { return planEnrich(enrich, context); - } else if (node instanceof HashJoinExec lookup) { - return planHashJoin(lookup, context); + } else if (node instanceof HashJoinExec join) { + return planHashJoin(join, context); + } else if (node instanceof LookupJoinExec join) { + return planLookupJoin(join, context); } // output else if (node instanceof OutputExec outputExec) { @@ -353,11 +358,10 @@ private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerConte case VERSION -> TopNEncoder.VERSION; case BOOLEAN, NULL, BYTE, SHORT, INTEGER, LONG, DOUBLE, FLOAT, HALF_FLOAT, DATETIME, DATE_NANOS, DATE_PERIOD, TIME_DURATION, OBJECT, SCALED_FLOAT, UNSIGNED_LONG, DOC_DATA_TYPE, TSID_DATA_TYPE -> TopNEncoder.DEFAULT_SORTABLE; - case GEO_POINT, CARTESIAN_POINT, GEO_SHAPE, CARTESIAN_SHAPE, COUNTER_LONG, COUNTER_INTEGER, COUNTER_DOUBLE -> + case GEO_POINT, CARTESIAN_POINT, GEO_SHAPE, CARTESIAN_SHAPE, COUNTER_LONG, COUNTER_INTEGER, COUNTER_DOUBLE, SOURCE -> TopNEncoder.DEFAULT_UNSORTABLE; // unsupported fields are encoded as BytesRef, we'll use the same encoder; all values should be null at this point case PARTIAL_AGG, UNSUPPORTED -> TopNEncoder.UNSUPPORTED; - case SOURCE -> throw new EsqlIllegalArgumentException("No TopN sorting encoder for type " + inverse.get(channel).type()); }; } List orders = topNExec.order().stream().map(order -> { @@ -489,7 +493,8 @@ private PhysicalOperation planEnrich(EnrichExec enrich, LocalExecutionPlannerCon enrichIndex, enrich.matchType(), enrich.policyMatchField(), - enrich.enrichFields() + enrich.enrichFields(), + enrich.source() ), layout ); @@ -558,15 +563,57 @@ private PhysicalOperation planHashJoin(HashJoinExec join, LocalExecutionPlannerC return source.with(new ProjectOperatorFactory(projection), layout); } - private ExpressionEvaluator.Factory toEvaluator(Expression exp, Layout layout) { - return EvalMapper.toEvaluator(exp, layout); + private PhysicalOperation planLookupJoin(LookupJoinExec join, LocalExecutionPlannerContext context) { + PhysicalOperation source = plan(join.left(), context); + Layout.Builder layoutBuilder = source.layout.builder(); + for (Attribute f : join.addedFields()) { + layoutBuilder.append(f); + } + Layout layout = layoutBuilder.build(); + + // TODO: this works when the join happens on the coordinator + /* + * But when it happens on the data node we get a + * \_FieldExtractExec[language_code{f}#15, language_name{f}#16]<[]> + * \_EsQueryExec[languages_lookup], indexMode[lookup], query[][_doc{f}#18], limit[], sort[] estimatedRowSize[62] + * Which we'd prefer not to do - at least for now. We already know the fields we're loading + * and don't want any local planning. + */ + EsQueryExec localSourceExec = (EsQueryExec) join.lookup(); + if (localSourceExec.indexMode() != IndexMode.LOOKUP) { + throw new IllegalArgumentException("can't plan [" + join + "]"); + } + List matchFields = new ArrayList<>(join.matchFields().size()); + for (Attribute m : join.matchFields()) { + Layout.ChannelAndType t = source.layout.get(m.id()); + if (t == null) { + throw new IllegalArgumentException("can't plan [" + join + "][" + m + "]"); + } + matchFields.add(t); + } + if (matchFields.size() != 1) { + throw new IllegalArgumentException("can't plan [" + join + "]"); + } + + return source.with( + new LookupFromIndexOperator.Factory( + sessionId, + parentTask, + context.queryPragmas().enrichMaxWorkers(), + matchFields.getFirst().channel(), + lookupFromIndexService, + matchFields.getFirst().type(), + localSourceExec.index().name(), + join.matchFields().getFirst().name(), + join.addedFields().stream().map(f -> (NamedExpression) f).toList(), + join.source() + ), + layout + ); } - private PhysicalOperation planRow(RowExec row, LocalExecutionPlannerContext context) { - List obj = row.fields().stream().map(f -> f.child().fold()).toList(); - Layout.Builder layout = new Layout.Builder(); - layout.append(row.output()); - return PhysicalOperation.fromSource(new RowOperatorFactory(obj), layout.build()); + private ExpressionEvaluator.Factory toEvaluator(Expression exp, Layout layout) { + return EvalMapper.toEvaluator(exp, layout); } private PhysicalOperation planLocal(LocalSourceExec localSourceExec, LocalExecutionPlannerContext context) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java index ceffae704cff0..fc52f2d5a9d23 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.planner.mapper; import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; @@ -21,11 +22,12 @@ import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; import org.elasticsearch.xpack.esql.plan.physical.OrderExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; @@ -98,26 +100,36 @@ private PhysicalPlan mapBinary(BinaryPlan binary) { // special handling for inlinejoin - join + subquery which has to be executed first (async) and replaced by its result if (binary instanceof Join join) { JoinConfig config = join.config(); - if (config.type() != JoinType.LEFT) { + if (config.type() != JoinTypes.LEFT) { throw new EsqlIllegalArgumentException("unsupported join type [" + config.type() + "]"); } PhysicalPlan left = map(binary.left()); PhysicalPlan right = map(binary.right()); - if (right instanceof LocalSourceExec == false) { - throw new EsqlIllegalArgumentException("right side of a join must be a local source"); + // if the right is data we can use a hash join directly + if (right instanceof LocalSourceExec localData) { + return new HashJoinExec( + join.source(), + left, + localData, + config.matchFields(), + config.leftFields(), + config.rightFields(), + join.output() + ); + } + if (right instanceof EsSourceExec source && source.indexMode() == IndexMode.LOOKUP) { + return new LookupJoinExec( + join.source(), + left, + right, + config.matchFields(), + config.leftFields(), + config.rightFields(), + join.output() + ); } - - return new HashJoinExec( - join.source(), - left, - right, - config.matchFields(), - config.leftFields(), - config.rightFields(), - join.output() - ); } return MapperUtils.unsupported(binary); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java index b717af650b7a6..23e6f4fb91d18 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.planner.mapper; import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.util.Holder; @@ -23,13 +24,14 @@ import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec; import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; import org.elasticsearch.xpack.esql.plan.physical.OrderExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; @@ -178,7 +180,7 @@ private PhysicalPlan mapUnary(UnaryPlan unary) { private PhysicalPlan mapBinary(BinaryPlan bp) { if (bp instanceof Join join) { JoinConfig config = join.config(); - if (config.type() != JoinType.LEFT) { + if (config.type() != JoinTypes.LEFT) { throw new EsqlIllegalArgumentException("unsupported join type [" + config.type() + "]"); } @@ -190,7 +192,7 @@ private PhysicalPlan mapBinary(BinaryPlan bp) { } PhysicalPlan right = map(bp.right()); - // no fragment means lookup + // if the right is data we can use a hash join directly if (right instanceof LocalSourceExec localData) { return new HashJoinExec( join.source(), @@ -202,6 +204,19 @@ private PhysicalPlan mapBinary(BinaryPlan bp) { join.output() ); } + if (right instanceof FragmentExec fragment + && fragment.fragment() instanceof EsRelation relation + && relation.indexMode() == IndexMode.LOOKUP) { + return new LookupJoinExec( + join.source(), + left, + right, + config.matchFields(), + config.leftFields(), + config.rightFields(), + join.output() + ); + } } return MapperUtils.unsupported(bp); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java index 213e33f3712b1..e881eabb38c43 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java @@ -24,7 +24,6 @@ import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Project; -import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; @@ -39,7 +38,6 @@ import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; -import org.elasticsearch.xpack.esql.plan.physical.RowExec; import org.elasticsearch.xpack.esql.plan.physical.ShowExec; import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders; @@ -52,10 +50,6 @@ class MapperUtils { private MapperUtils() {} static PhysicalPlan mapLeaf(LeafPlan p) { - if (p instanceof Row row) { - return new RowExec(row.source(), row.fields()); - } - if (p instanceof LocalRelation local) { return new LocalSourceExec(local.source(), local.output(), local.supplier()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 76de337ded5c6..eeed811674f60 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -61,6 +61,7 @@ import org.elasticsearch.xpack.esql.action.EsqlSearchShardsAction; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; +import org.elasticsearch.xpack.esql.enrich.LookupFromIndexService; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSourceExec; import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; @@ -81,6 +82,7 @@ import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; import static org.elasticsearch.xpack.esql.plugin.EsqlPlugin.ESQL_WORKER_THREAD_POOL_NAME; @@ -98,13 +100,16 @@ public class ComputeService { private final DriverTaskRunner driverRunner; private final ExchangeService exchangeService; private final EnrichLookupService enrichLookupService; + private final LookupFromIndexService lookupFromIndexService; private final ClusterService clusterService; + private final AtomicLong childSessionIdGenerator = new AtomicLong(); public ComputeService( SearchService searchService, TransportService transportService, ExchangeService exchangeService, EnrichLookupService enrichLookupService, + LookupFromIndexService lookupFromIndexService, ClusterService clusterService, ThreadPool threadPool, BigArrays bigArrays, @@ -125,6 +130,7 @@ public ComputeService( this.driverRunner = new DriverTaskRunner(transportService, this.esqlExecutor); this.exchangeService = exchangeService; this.enrichLookupService = enrichLookupService; + this.lookupFromIndexService = lookupFromIndexService; this.clusterService = clusterService; } @@ -163,7 +169,7 @@ public void execute( return; } var computeContext = new ComputeContext( - sessionId, + newChildSession(sessionId), RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, List.of(), configuration, @@ -326,14 +332,15 @@ private void startComputeOnDataNodes( // the new remote exchange sink, and initialize the computation on the target node via data-node-request. for (DataNode node : dataNodeResult.dataNodes()) { var queryPragmas = configuration.pragmas(); + var childSessionId = newChildSession(sessionId); ExchangeService.openExchange( transportService, node.connection, - sessionId, + childSessionId, queryPragmas.exchangeBufferSize(), esqlExecutor, refs.acquire().delegateFailureAndWrap((l, unused) -> { - var remoteSink = exchangeService.newRemoteSink(parentTask, sessionId, transportService, node.connection); + var remoteSink = exchangeService.newRemoteSink(parentTask, childSessionId, transportService, node.connection); exchangeSource.addRemoteSink(remoteSink, queryPragmas.concurrentExchangeClients()); ActionListener computeResponseListener = computeListener.acquireCompute(clusterAlias); var dataNodeListener = ActionListener.runBefore(computeResponseListener, () -> l.onResponse(null)); @@ -341,7 +348,7 @@ private void startComputeOnDataNodes( node.connection, DATA_ACTION_NAME, new DataNodeRequest( - sessionId, + childSessionId, configuration, clusterAlias, node.shardIds, @@ -374,17 +381,18 @@ private void startComputeOnRemoteClusters( var linkExchangeListeners = ActionListener.releaseAfter(computeListener.acquireAvoid(), exchangeSource.addEmptySink()); try (RefCountingListener refs = new RefCountingListener(linkExchangeListeners)) { for (RemoteCluster cluster : clusters) { + final var childSessionId = newChildSession(sessionId); ExchangeService.openExchange( transportService, cluster.connection, - sessionId, + childSessionId, queryPragmas.exchangeBufferSize(), esqlExecutor, refs.acquire().delegateFailureAndWrap((l, unused) -> { - var remoteSink = exchangeService.newRemoteSink(rootTask, sessionId, transportService, cluster.connection); + var remoteSink = exchangeService.newRemoteSink(rootTask, childSessionId, transportService, cluster.connection); exchangeSource.addRemoteSink(remoteSink, queryPragmas.concurrentExchangeClients()); var remotePlan = new RemoteClusterPlan(plan, cluster.concreteIndices, cluster.originalIndices); - var clusterRequest = new ClusterComputeRequest(cluster.clusterAlias, sessionId, configuration, remotePlan); + var clusterRequest = new ClusterComputeRequest(cluster.clusterAlias, childSessionId, configuration, remotePlan); var clusterListener = ActionListener.runBefore( computeListener.acquireCompute(cluster.clusterAlias()), () -> l.onResponse(null) @@ -429,6 +437,7 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, context.exchangeSource(), context.exchangeSink(), enrichLookupService, + lookupFromIndexService, new EsPhysicalOperationProviders(contexts) ); @@ -907,4 +916,8 @@ public List searchExecutionContexts() { return searchContexts.stream().map(ctx -> ctx.getSearchExecutionContext()).toList(); } } + + private String newChildSession(String session) { + return session + "/" + childSessionIdGenerator.incrementAndGet(); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 266f07d22eaf5..a347a6947bf67 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.plugin; import org.elasticsearch.Build; -import org.elasticsearch.Version; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; @@ -16,7 +15,6 @@ import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import java.util.Collections; -import java.util.Map; import java.util.Set; /** @@ -48,34 +46,11 @@ public class EsqlFeatures implements FeatureSpecification { */ private static final NodeFeature ST_X_Y = new NodeFeature("esql.st_x_y"); - /** - * When we added the warnings for multivalued fields emitting {@code null} - * when they touched multivalued fields. Added in #102417. - */ - private static final NodeFeature MV_WARN = new NodeFeature("esql.mv_warn"); - - /** - * Support for loading {@code geo_point} and {@code cartesian_point} fields. Added in #102177. - */ - private static final NodeFeature SPATIAL_POINTS = new NodeFeature("esql.spatial_points"); - /** * Changed precision of {@code geo_point} and {@code cartesian_point} fields, by loading from source into WKB. Done in #103691. */ private static final NodeFeature SPATIAL_POINTS_FROM_SOURCE = new NodeFeature("esql.spatial_points_from_source"); - /** - * When we added the warnings when conversion functions fail. Like {@code TO_INT('foo')}. - * Added in ESQL-1183. - */ - private static final NodeFeature CONVERT_WARN = new NodeFeature("esql.convert_warn"); - - /** - * When we flipped the return type of {@code POW} to always return a double. Changed - * in #102183. - */ - private static final NodeFeature POW_DOUBLE = new NodeFeature("esql.pow_double"); - /** * Support for loading {@code geo_shape} and {@code cartesian_shape} fields. Done in #104269. */ @@ -152,12 +127,6 @@ public class EsqlFeatures implements FeatureSpecification { */ public static final NodeFeature METADATA_FIELDS = new NodeFeature("esql.metadata_fields"); - /** - * Support for loading values over enrich. This is supported by all versions of ESQL but not - * the unit test CsvTests. - */ - public static final NodeFeature ENRICH_LOAD = new NodeFeature("esql.enrich_load"); - /** * Support for timespan units abbreviations */ @@ -215,16 +184,4 @@ public Set getFeatures() { return features; } } - - @Override - public Map getHistoricalFeatures() { - return Map.ofEntries( - Map.entry(TransportEsqlStatsAction.ESQL_STATS_FEATURE, Version.V_8_11_0), - Map.entry(MV_WARN, Version.V_8_12_0), - Map.entry(SPATIAL_POINTS, Version.V_8_12_0), - Map.entry(CONVERT_WARN, Version.V_8_12_0), - Map.entry(POW_DOUBLE, Version.V_8_12_0), - Map.entry(ENRICH_LOAD, Version.V_8_12_0) - ); - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java index e9b9f571e880e..67948fe717f2f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java @@ -21,8 +21,8 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.lucene.LuceneOperator; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.AbstractPageMappingOperator; @@ -38,6 +38,7 @@ import org.elasticsearch.compute.operator.exchange.ExchangeSourceOperator; import org.elasticsearch.compute.operator.topn.TopNOperatorStatus; import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestController; @@ -45,6 +46,7 @@ import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.esql.EsqlInfoTransportAction; @@ -58,17 +60,10 @@ import org.elasticsearch.xpack.esql.action.RestEsqlDeleteAsyncResultAction; import org.elasticsearch.xpack.esql.action.RestEsqlGetAsyncResultAction; import org.elasticsearch.xpack.esql.action.RestEsqlQueryAction; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.enrich.EnrichLookupOperator; import org.elasticsearch.xpack.esql.execution.PlanExecutor; -import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; -import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; -import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; +import org.elasticsearch.xpack.esql.plan.PlanWritables; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.esql.session.IndexResolver; @@ -116,7 +111,7 @@ public Collection createComponents(PluginServices services) { BlockFactory blockFactory = new BlockFactory(circuitBreaker, bigArrays, maxPrimitiveArrayBlockSize); setupSharedSecrets(); return List.of( - new PlanExecutor(new IndexResolver(services.client()), services.telemetryProvider().getMeterRegistry()), + new PlanExecutor(new IndexResolver(services.client()), services.telemetryProvider().getMeterRegistry(), getLicenseState()), new ExchangeService(services.clusterService().getSettings(), services.threadPool(), ThreadPool.Names.SEARCH, blockFactory), blockFactory ); @@ -131,6 +126,11 @@ private void setupSharedSecrets() { } } + // to be overriden by tests + protected XPackLicenseState getLicenseState() { + return XPackPlugin.getSharedLicenseState(); + } + /** * The settings defined by the ESQL plugin. * @@ -192,18 +192,10 @@ public List getNamedWriteables() { entries.add(SingleValueQuery.ENTRY); entries.add(AsyncOperator.Status.ENTRY); entries.add(EnrichLookupOperator.Status.ENTRY); - entries.addAll(Block.getNamedWriteables()); - entries.addAll(Attribute.getNamedWriteables()); - entries.add(UnsupportedAttribute.ENTRY); // TODO combine with above once these are in the same project - entries.addAll(NamedExpression.getNamedWriteables()); - entries.add(UnsupportedAttribute.NAMED_EXPRESSION_ENTRY); // TODO combine with above once these are in the same project - entries.addAll(Expression.getNamedWriteables()); - entries.add(UnsupportedAttribute.EXPRESSION_ENTRY); // TODO combine with above once these are in the same project - entries.addAll(EsqlScalarFunction.getNamedWriteables()); - entries.addAll(AggregateFunction.getNamedWriteables()); - entries.addAll(LogicalPlan.getNamedWriteables()); - entries.addAll(PhysicalPlan.getNamedWriteables()); - entries.addAll(FullTextFunction.getNamedWriteables()); + + entries.addAll(BlockWritables.getNamedWriteables()); + entries.addAll(ExpressionWritables.getNamedWriteables()); + entries.addAll(PlanWritables.getNamedWriteables()); return entries; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index 04e5fdc4b3bd2..fdc6e06a11032 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -101,6 +101,7 @@ public TransportEsqlQueryAction( transportService, exchangeService, enrichLookupService, + lookupFromIndexService, clusterService, threadPool, bigArrays, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java index 985dcf118ac54..4067fc5a4e065 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -34,8 +33,6 @@ public class TransportEsqlStatsAction extends TransportNodesAction< EsqlStatsResponse.NodeStatsResponse, Void> { - static final NodeFeature ESQL_STATS_FEATURE = new NodeFeature("esql.stats_node"); - // the plan executor holds the metrics private final FeatureService featureService; private final PlanExecutor planExecutor; @@ -63,13 +60,7 @@ public TransportEsqlStatsAction( @Override protected DiscoveryNode[] resolveRequest(EsqlStatsRequest request, ClusterState clusterState) { - if (featureService.clusterHasFeature(clusterState, ESQL_STATS_FEATURE)) { - // use the whole cluster - return super.resolveRequest(request, clusterState); - } else { - // not all nodes in the cluster have upgraded to esql - just use this node for now - return new DiscoveryNode[] { clusterService.localNode() }; - } + return super.resolveRequest(request, clusterState); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index c576d15f92608..9630a520e8654 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -292,10 +292,10 @@ private void preAnalyze( var unresolvedPolicies = preAnalysis.enriches.stream() .map(e -> new EnrichPolicyResolver.UnresolvedPolicy((String) e.policyName().fold(), e.mode())) .collect(Collectors.toSet()); + final List indices = preAnalysis.indices; + // TODO: make a separate call for lookup indices final Set targetClusters = enrichPolicyResolver.groupIndicesPerCluster( - preAnalysis.indices.stream() - .flatMap(t -> Arrays.stream(Strings.commaDelimitedListToStringArray(t.id().index()))) - .toArray(String[]::new) + indices.stream().flatMap(t -> Arrays.stream(Strings.commaDelimitedListToStringArray(t.id().index()))).toArray(String[]::new) ).keySet(); enrichPolicyResolver.resolvePolicies(targetClusters, unresolvedPolicies, listener.delegateFailureAndWrap((l, enrichResolution) -> { // first we need the match_fields names from enrich policies and THEN, with an updated list of fields, we call field_caps API diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java index 0be8cf820d345..f61be4b59830e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java @@ -98,9 +98,8 @@ public IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResp // TODO flattened is simpler - could we get away with that? String[] names = fieldsCaps.keySet().toArray(new String[0]); Arrays.sort(names); - Set forbiddenFields = new HashSet<>(); Map rootFields = new HashMap<>(); - name: for (String name : names) { + for (String name : names) { Map fields = rootFields; String fullName = name; boolean isAlias = false; @@ -111,9 +110,6 @@ public IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResp break; } String parent = name.substring(0, nextDot); - if (forbiddenFields.contains(parent)) { - continue name; - } EsField obj = fields.get(parent); if (obj == null) { obj = new EsField(parent, OBJECT, new HashMap<>(), false, true); @@ -125,16 +121,10 @@ public IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResp fields = obj.getProperties(); name = name.substring(nextDot + 1); } - - List caps = fieldsCaps.get(fullName); - if (allNested(caps)) { - forbiddenFields.add(name); - continue; - } // TODO we're careful to make isAlias match IndexResolver - but do we use it? EsField field = firstUnsupportedParent == null - ? createField(fieldCapsResponse, name, fullName, caps, isAlias) + ? createField(fieldCapsResponse, name, fullName, fieldsCaps.get(fullName), isAlias) : new UnsupportedEsField( fullName, firstUnsupportedParent.getOriginalType(), @@ -164,15 +154,6 @@ public IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResp return IndexResolution.valid(new EsIndex(indexPattern, rootFields, concreteIndices), concreteIndices.keySet(), unavailableRemotes); } - private boolean allNested(List caps) { - for (IndexFieldCapabilities cap : caps) { - if (false == cap.type().equalsIgnoreCase("nested")) { - return false; - } - } - return true; - } - private static Map> collectFieldCaps(FieldCapabilitiesResponse fieldCapsResponse) { Set seenHashes = new HashSet<>(); Map> fieldsCaps = new HashMap<>(); @@ -278,6 +259,8 @@ private static FieldCapabilitiesRequest createFieldCapsRequest(String index, Set // lenient because we throw our own errors looking at the response e.g. if something was not resolved // also because this way security doesn't throw authorization exceptions but rather honors ignore_unavailable req.indicesOptions(FIELD_CAPS_INDICES_OPTIONS); + // we ignore the nested data type fields starting with https://github.com/elastic/elasticsearch/pull/111495 + req.filters("-nested"); req.setMergeResults(false); return req; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 348ca4acd100e..012720db9efd9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -58,6 +58,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; +import org.elasticsearch.xpack.esql.enrich.LookupFromIndexService; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.index.EsIndex; @@ -235,7 +236,10 @@ public final void test() throws Throwable { * are tested in integration tests. */ assumeFalse("metadata fields aren't supported", testCase.requiredCapabilities.contains(cap(EsqlFeatures.METADATA_FIELDS))); - assumeFalse("enrich can't load fields in csv tests", testCase.requiredCapabilities.contains(cap(EsqlFeatures.ENRICH_LOAD))); + assumeFalse( + "enrich can't load fields in csv tests", + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.ENRICH_LOAD.capabilityName()) + ); assumeFalse( "can't use match in csv tests", testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.capabilityName()) @@ -253,7 +257,10 @@ public final void test() throws Throwable { "can't use MATCH function in csv tests", testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.MATCH_FUNCTION.capabilityName()) ); - + assumeFalse( + "lookup join disabled for csv tests", + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP.capabilityName()) + ); if (Build.current().isSnapshot()) { assertThat( "Capability is not included in the enabled list capabilities on a snapshot build. Spelling mistake?", @@ -542,6 +549,7 @@ void executeSubPlan( exchangeSource, exchangeSink, Mockito.mock(EnrichLookupService.class), + Mockito.mock(LookupFromIndexService.class), physicalOperationProviders ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java index 0a9e8b1b90681..9c24ec96dddf8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.ExistsQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; @@ -24,15 +24,11 @@ import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.index.query.WildcardQueryBuilder; import org.elasticsearch.test.EqualsHashCodeTestUtils; -import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; -import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; -import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; +import org.elasticsearch.xpack.esql.plan.PlanWritables; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; @@ -115,17 +111,9 @@ public static NamedWriteableRegistry writableRegistry() { entries.add(new NamedWriteableRegistry.Entry(QueryBuilder.class, RegexpQueryBuilder.NAME, RegexpQueryBuilder::new)); entries.add(new NamedWriteableRegistry.Entry(QueryBuilder.class, ExistsQueryBuilder.NAME, ExistsQueryBuilder::new)); entries.add(SingleValueQuery.ENTRY); - entries.addAll(Attribute.getNamedWriteables()); - entries.add(UnsupportedAttribute.ENTRY); - entries.addAll(NamedExpression.getNamedWriteables()); - entries.add(UnsupportedAttribute.NAMED_EXPRESSION_ENTRY); - entries.addAll(Expression.getNamedWriteables()); - entries.addAll(EsqlScalarFunction.getNamedWriteables()); - entries.addAll(AggregateFunction.getNamedWriteables()); - entries.addAll(Block.getNamedWriteables()); - entries.addAll(LogicalPlan.getNamedWriteables()); - entries.addAll(PhysicalPlan.getNamedWriteables()); - entries.addAll(FullTextFunction.getNamedWriteables()); + entries.addAll(ExpressionWritables.getNamedWriteables()); + entries.addAll(PlanWritables.getNamedWriteables()); + entries.addAll(BlockWritables.getNamedWriteables()); return new NamedWriteableRegistry(entries); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java index 2f3aa09868637..134981d3c3b0c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java @@ -9,7 +9,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.operator.AbstractPageMappingOperator; import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.compute.operator.DriverSleeps; @@ -39,7 +39,7 @@ protected EsqlQueryResponse.Profile mutateInstance(EsqlQueryResponse.Profile ins @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Stream.concat(Stream.of(AbstractPageMappingOperator.Status.ENTRY), Block.getNamedWriteables().stream()).toList() + Stream.concat(Stream.of(AbstractPageMappingOperator.Status.ENTRY), BlockWritables.getNamedWriteables().stream()).toList() ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 4aaf4f6cccf0f..35364089127cc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -99,7 +100,7 @@ public void blockFactoryEmpty() { @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Stream.concat(Stream.of(AbstractPageMappingOperator.Status.ENTRY), Block.getNamedWriteables().stream()).toList() + Stream.concat(Stream.of(AbstractPageMappingOperator.Status.ENTRY), BlockWritables.getNamedWriteables().stream()).toList() ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index c1b2adddfc838..2770ed1f336ae 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1909,11 +1909,11 @@ public void testLookup() { String query = """ FROM test | RENAME languages AS int - | LOOKUP int_number_names ON int + | LOOKUP_🐔 int_number_names ON int """; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); - assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP_🐔' expecting {")); return; } LogicalPlan plan = analyze(query); @@ -1945,18 +1945,14 @@ public void testLookup() { .item(startsWith("job{f}")) .item(startsWith("job.raw{f}")) /* - * Int is a reference here because we renamed it in project. - * If we hadn't it'd be a field and that'd be fine. + * Int key is returned as a full field (despite the rename) */ - .item(containsString("int{r}")) + .item(containsString("int{f}")) .item(startsWith("last_name{f}")) .item(startsWith("long_noidx{f}")) .item(startsWith("salary{f}")) /* - * It's important that name is returned as a *reference* here - * instead of a field. If it were a field we'd use SearchStats - * on it and discover that it doesn't exist in the index. It doesn't! - * We don't expect it to. It exists only in the lookup table. + * As is the name column from the right side. */ .item(containsString("name{f}")) ); @@ -1965,11 +1961,11 @@ public void testLookup() { public void testLookupMissingField() { String query = """ FROM test - | LOOKUP int_number_names ON garbage + | LOOKUP_🐔 int_number_names ON garbage """; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); - assertThat(e.getMessage(), containsString("line 2:3: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 2:3: mismatched input 'LOOKUP_🐔' expecting {")); return; } var e = expectThrows(VerificationException.class, () -> analyze(query)); @@ -1979,11 +1975,11 @@ public void testLookupMissingField() { public void testLookupMissingTable() { String query = """ FROM test - | LOOKUP garbage ON a + | LOOKUP_🐔 garbage ON a """; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); - assertThat(e.getMessage(), containsString("line 2:3: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 2:3: mismatched input 'LOOKUP_🐔' expecting {")); return; } var e = expectThrows(VerificationException.class, () -> analyze(query)); @@ -1994,11 +1990,11 @@ public void testLookupMatchTypeWrong() { String query = """ FROM test | RENAME last_name AS int - | LOOKUP int_number_names ON int + | LOOKUP_🐔 int_number_names ON int """; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); - assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP_🐔' expecting {")); return; } var e = expectThrows(VerificationException.class, () -> analyze(query)); @@ -2321,8 +2317,6 @@ public void testInvalidNamedParamsForIdentifierPatterns() { } public void testFromEnrichAndMatchColonUsage() { - assumeTrue("Match operator is available just for snapshots", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - LogicalPlan plan = analyze(""" from *:test | EVAL x = to_string(languages) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 8b364a603405c..7b2f85b80b3b6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -195,13 +195,13 @@ public void testUnsupportedAndMultiTypedFields() { if (EsqlCapabilities.Cap.LOOKUP_V4.isEnabled()) { // LOOKUP with unsupported type assertEquals( - "1:41: column type mismatch, table column was [integer] and original column was [unsupported]", - error("from test* | lookup int_number_names on int", analyzer) + "1:43: column type mismatch, table column was [integer] and original column was [unsupported]", + error("from test* | lookup_🐔 int_number_names on int", analyzer) ); // LOOKUP with multi-typed field assertEquals( - "1:44: column type mismatch, table column was [double] and original column was [unsupported]", - error("from test* | lookup double_number_names on double", analyzer) + "1:46: column type mismatch, table column was [double] and original column was [unsupported]", + error("from test* | lookup_🐔 double_number_names on double", analyzer) ); } @@ -404,6 +404,11 @@ public void testAggFilterOnBucketingOrAggFunctions() { query("from test | stats max(languages) WHERE bucket(salary, 10) > 1 by bucket(salary, 10)"); // but fails if it's different + assertEquals( + "1:32: can only use grouping function [bucket(a, 3)] part of the BY clause", + error("row a = 1 | stats sum(a) where bucket(a, 3) > -1 by bucket(a,2)") + ); + assertEquals( "1:40: can only use grouping function [bucket(salary, 10)] part of the BY clause", error("from test | stats max(languages) WHERE bucket(salary, 10) > 1 by emp_no") @@ -771,40 +776,40 @@ public void testWrongInputParam() { public void testPeriodAndDurationInRowAssignment() { for (var unit : TIME_DURATIONS) { - assertEquals("1:5: cannot use [1 " + unit + "] directly in a row assignment", error("row a = 1 " + unit)); + assertEquals("1:9: cannot use [1 " + unit + "] directly in a row assignment", error("row a = 1 " + unit)); assertEquals( - "1:5: cannot use [1 " + unit + "::time_duration] directly in a row assignment", + "1:9: cannot use [1 " + unit + "::time_duration] directly in a row assignment", error("row a = 1 " + unit + "::time_duration") ); assertEquals( - "1:5: cannot use [\"1 " + unit + "\"::time_duration] directly in a row assignment", + "1:9: cannot use [\"1 " + unit + "\"::time_duration] directly in a row assignment", error("row a = \"1 " + unit + "\"::time_duration") ); assertEquals( - "1:5: cannot use [to_timeduration(1 " + unit + ")] directly in a row assignment", + "1:9: cannot use [to_timeduration(1 " + unit + ")] directly in a row assignment", error("row a = to_timeduration(1 " + unit + ")") ); assertEquals( - "1:5: cannot use [to_timeduration(\"1 " + unit + "\")] directly in a row assignment", + "1:9: cannot use [to_timeduration(\"1 " + unit + "\")] directly in a row assignment", error("row a = to_timeduration(\"1 " + unit + "\")") ); } for (var unit : DATE_PERIODS) { - assertEquals("1:5: cannot use [1 " + unit + "] directly in a row assignment", error("row a = 1 " + unit)); + assertEquals("1:9: cannot use [1 " + unit + "] directly in a row assignment", error("row a = 1 " + unit)); assertEquals( - "1:5: cannot use [1 " + unit + "::date_period] directly in a row assignment", + "1:9: cannot use [1 " + unit + "::date_period] directly in a row assignment", error("row a = 1 " + unit + "::date_period") ); assertEquals( - "1:5: cannot use [\"1 " + unit + "\"::date_period] directly in a row assignment", + "1:9: cannot use [\"1 " + unit + "\"::date_period] directly in a row assignment", error("row a = \"1 " + unit + "\"::date_period") ); assertEquals( - "1:5: cannot use [to_dateperiod(1 " + unit + ")] directly in a row assignment", + "1:9: cannot use [to_dateperiod(1 " + unit + ")] directly in a row assignment", error("row a = to_dateperiod(1 " + unit + ")") ); assertEquals( - "1:5: cannot use [to_dateperiod(\"1 " + unit + "\")] directly in a row assignment", + "1:9: cannot use [to_dateperiod(\"1 " + unit + "\")] directly in a row assignment", error("row a = to_dateperiod(\"1 " + unit + "\")") ); } @@ -1159,8 +1164,6 @@ public void testMatchInsideEval() throws Exception { } public void testMatchFilter() throws Exception { - assumeTrue("Match operator is available just for snapshots", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - assertEquals( "1:19: first argument of [salary:\"100\"] must be [string], found value [salary] type [integer]", error("from test | where salary:\"100\"") @@ -1190,7 +1193,6 @@ public void testMatchFunctionNotAllowedAfterCommands() throws Exception { } public void testMatchFunctionAndOperatorHaveCorrectErrorMessages() throws Exception { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); assertEquals( "1:24: [MATCH] function cannot be used after LIMIT", error("from test | limit 10 | where match(first_name, \"Anna\")") @@ -1271,7 +1273,6 @@ public void testMatchFunctionOnlyAllowedInWhere() throws Exception { } public void testMatchOperatornOnlyAllowedInWhere() throws Exception { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); checkFullTextFunctionsOnlyAllowedInWhere(":", "first_name:\"Anna\"", "operator"); } @@ -1317,8 +1318,6 @@ public void testMatchFunctionWithDisjunctions() { } public void testMatchOperatorWithDisjunctions() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - checkWithDisjunctions(":", "first_name : \"Anna\"", "operator"); } @@ -1374,7 +1373,6 @@ public void testMatchFunctionWithNonBooleanFunctions() { } public void testMatchOperatorWithNonBooleanFunctions() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); checkFullTextFunctionsWithNonBooleanFunctions(":", "first_name:\"Anna\"", "operator"); } @@ -1452,8 +1450,6 @@ public void testMatchFunctionCurrentlyUnsupportedBehaviour() throws Exception { "1:68: Unknown column [first_name]", error("from test | stats max_salary = max(salary) by emp_no | where match(first_name, \"Anna\")") ); - - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); assertEquals( "1:62: Unknown column [first_name]", error("from test | stats max_salary = max(salary) by emp_no | where first_name : \"Anna\"") @@ -1473,8 +1469,6 @@ public void testMatchFunctionNullArgs() throws Exception { public void testMatchTargetsExistingField() throws Exception { assertEquals("1:39: Unknown column [first_name]", error("from test | keep emp_no | where match(first_name, \"Anna\")")); - - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); assertEquals("1:33: Unknown column [first_name]", error("from test | keep emp_no | where first_name : \"Anna\"")); } @@ -1738,6 +1732,8 @@ public void testIntervalAsString() { } public void testCategorizeSingleGrouping() { + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE.isEnabled()); + query("from test | STATS COUNT(*) BY CATEGORIZE(first_name)"); query("from test | STATS COUNT(*) BY cat = CATEGORIZE(first_name)"); @@ -1765,6 +1761,8 @@ public void testCategorizeSingleGrouping() { } public void testCategorizeNestedGrouping() { + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE.isEnabled()); + query("from test | STATS COUNT(*) BY CATEGORIZE(LENGTH(first_name)::string)"); assertEquals( @@ -1778,6 +1776,8 @@ public void testCategorizeNestedGrouping() { } public void testCategorizeWithinAggregations() { + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE.isEnabled()); + query("from test | STATS MV_COUNT(cat), COUNT(*) BY cat = CATEGORIZE(first_name)"); assertEquals( @@ -1799,6 +1799,13 @@ public void testCategorizeWithinAggregations() { ); } + public void testSortByAggregate() { + assertEquals("1:18: Aggregate functions are not allowed in SORT [COUNT]", error("ROW a = 1 | SORT count(*)")); + assertEquals("1:28: Aggregate functions are not allowed in SORT [COUNT]", error("ROW a = 1 | SORT to_string(count(*))")); + assertEquals("1:22: Aggregate functions are not allowed in SORT [MAX]", error("ROW a = 1 | SORT 1 + max(a)")); + assertEquals("1:18: Aggregate functions are not allowed in SORT [COUNT]", error("FROM test | SORT count(*)")); + } + private void query(String query) { defaultAnalyzer.analyze(parser.createStatement(query)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java index a2aa447c748e9..6dd0c5fe88afd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java @@ -8,20 +8,11 @@ package org.elasticsearch.xpack.esql.expression; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.Node; import org.elasticsearch.xpack.esql.expression.function.ReferenceAttributeTests; -import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; -import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; -import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.plan.AbstractNodeSerializationTests; -import java.util.ArrayList; -import java.util.List; - public abstract class AbstractExpressionSerializationTests extends AbstractNodeSerializationTests { public static Expression randomChild() { return ReferenceAttributeTests.randomReferenceAttribute(false); @@ -29,17 +20,7 @@ public static Expression randomChild() { @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { - List entries = new ArrayList<>(NamedExpression.getNamedWriteables()); - entries.addAll(Expression.getNamedWriteables()); - entries.addAll(Attribute.getNamedWriteables()); - entries.addAll(EsqlScalarFunction.getNamedWriteables()); - entries.addAll(AggregateFunction.getNamedWriteables()); - entries.addAll(FullTextFunction.getNamedWriteables()); - entries.add(UnsupportedAttribute.ENTRY); - entries.add(UnsupportedAttribute.NAMED_EXPRESSION_ENTRY); - entries.add(UnsupportedAttribute.EXPRESSION_ENTRY); - entries.add(org.elasticsearch.xpack.esql.expression.Order.ENTRY); - return new NamedWriteableRegistry(entries); + return new NamedWriteableRegistry(ExpressionWritables.getNamedWriteables()); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java index ccbed01994bf7..7bb8ab3e147e2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java @@ -11,23 +11,18 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.test.AbstractWireTestCase; import org.elasticsearch.xpack.esql.core.expression.Alias; -import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.tree.SourceTests; import org.elasticsearch.xpack.esql.expression.function.ReferenceAttributeTests; -import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.in; public class AliasTests extends AbstractWireTestCase { public static Alias randomAlias() { @@ -76,10 +71,6 @@ protected Alias copyInstance(Alias instance, TransportVersion version) throws IO @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { - List entries = new ArrayList<>(NamedExpression.getNamedWriteables()); - entries.addAll(Attribute.getNamedWriteables()); - entries.add(UnsupportedAttribute.ENTRY); - entries.addAll(Expression.getNamedWriteables()); - return new NamedWriteableRegistry(entries); + return new NamedWriteableRegistry(ExpressionWritables.allExpressions()); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAttributeTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAttributeTestCase.java index a9750acdb1b84..d59e309790ad2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAttributeTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAttributeTestCase.java @@ -15,13 +15,12 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.session.Configuration; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; import java.util.Objects; import static org.hamcrest.Matchers.sameInstance; @@ -52,9 +51,7 @@ protected final ExtraAttribute mutateInstance(ExtraAttribute instance) { @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { - List entries = new ArrayList<>(Attribute.getNamedWriteables()); - entries.add(UnsupportedAttribute.ENTRY); - return new NamedWriteableRegistry(entries); + return new NamedWriteableRegistry(ExpressionWritables.attributes()); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 181b8d52bf888..7802d74d2264f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -44,6 +44,7 @@ import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; @@ -130,7 +131,9 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { entry("mod", Mod.class), entry("neg", Neg.class), entry("is_null", IsNull.class), - entry("is_not_null", IsNotNull.class) + entry("is_not_null", IsNotNull.class), + // Match operator is both a function and an operator + entry("match_operator", Match.class) ); private static EsqlFunctionRegistry functionRegistry = new EsqlFunctionRegistry().snapshotRegistry(); @@ -813,6 +816,10 @@ private static String buildSignatureSvg(String name) throws IOException { if (unaryOperator != null) { return RailRoadDiagram.unaryOperator(unaryOperator); } + String searchOperator = searchOperator(name); + if (searchOperator != null) { + return RailRoadDiagram.searchOperator(searchOperator); + } FunctionDefinition definition = definition(name); if (definition != null) { return RailRoadDiagram.functionSignature(definition); @@ -862,7 +869,7 @@ public static void renderDocs() throws IOException { return; } String name = functionName(); - if (binaryOperator(name) != null || unaryOperator(name) != null || likeOrInOperator(name)) { + if (binaryOperator(name) != null || unaryOperator(name) != null || searchOperator(name) != null || likeOrInOperator(name)) { renderDocsForOperators(name); return; } @@ -1258,6 +1265,16 @@ private static String binaryOperator(String name) { }; } + /** + * If this test is a for a search operator return its symbol, otherwise return {@code null}. + */ + private static String searchOperator(String name) { + return switch (name) { + case "match_operator" -> ":"; + default -> null; + }; + } + /** * If this tests is for a unary operator return its symbol, otherwise return {@code null}. * This is functionally the reverse of {@link ExpressionBuilder#visitArithmeticUnary}. diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java new file mode 100644 index 0000000000000..98f36d339976c --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java @@ -0,0 +1,138 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function; + +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicensedFeature; +import org.elasticsearch.license.TestUtils; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.license.internal.XPackLicenseStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.analysis.Analyzer; +import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; +import org.elasticsearch.xpack.esql.analysis.Verifier; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.function.Function; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.parser.EsqlParser; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.stats.Metrics; + +import java.util.List; + +import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyzerDefaultMapping; +import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.defaultEnrichResolution; +import static org.hamcrest.Matchers.containsString; + +public class CheckLicenseTests extends ESTestCase { + + private final EsqlParser parser = new EsqlParser(); + private final String esql = "from tests | eval license() | LIMIT 10"; + + public void testLicense() { + for (License.OperationMode functionLicense : License.OperationMode.values()) { + final LicensedFeature functionLicenseFeature = random().nextBoolean() + ? LicensedFeature.momentary("test", "license", functionLicense) + : LicensedFeature.persistent("test", "license", functionLicense); + final EsqlFunctionRegistry.FunctionBuilder builder = (source, expression, cfg) -> { + final LicensedFunction licensedFunction = new LicensedFunction(source); + licensedFunction.setLicensedFeature(functionLicenseFeature); + return licensedFunction; + }; + for (License.OperationMode operationMode : License.OperationMode.values()) { + if (License.OperationMode.TRIAL != operationMode && License.OperationMode.compare(operationMode, functionLicense) < 0) { + // non-compliant license + final VerificationException ex = expectThrows(VerificationException.class, () -> analyze(builder, operationMode)); + assertThat(ex.getMessage(), containsString("current license is non-compliant for function [license()]")); + } else { + // compliant license + assertNotNull(analyze(builder, operationMode)); + } + } + } + } + + private LogicalPlan analyze(EsqlFunctionRegistry.FunctionBuilder builder, License.OperationMode operationMode) { + final FunctionDefinition def = EsqlFunctionRegistry.def(LicensedFunction.class, builder, "license"); + final EsqlFunctionRegistry registry = new EsqlFunctionRegistry(def) { + @Override + public EsqlFunctionRegistry snapshotRegistry() { + return this; + } + }; + return analyzer(registry, operationMode).analyze(parser.createStatement(esql)); + } + + private static Analyzer analyzer(EsqlFunctionRegistry registry, License.OperationMode operationMode) { + return new Analyzer( + new AnalyzerContext(EsqlTestUtils.TEST_CFG, registry, analyzerDefaultMapping(), defaultEnrichResolution()), + new Verifier(new Metrics(new EsqlFunctionRegistry()), getLicenseState(operationMode)) + ); + } + + private static XPackLicenseState getLicenseState(License.OperationMode operationMode) { + final TestUtils.UpdatableLicenseState licenseState = new TestUtils.UpdatableLicenseState(); + licenseState.update(new XPackLicenseStatus(operationMode, true, null)); + return licenseState; + } + + // It needs to be public because we run validation on it via reflection in org.elasticsearch.xpack.esql.tree.EsqlNodeSubclassTests. + // This test prevents to add the license as constructor parameter too. + public static class LicensedFunction extends Function { + + private LicensedFeature licensedFeature; + + public LicensedFunction(Source source) { + super(source, List.of()); + } + + void setLicensedFeature(LicensedFeature licensedFeature) { + this.licensedFeature = licensedFeature; + } + + @Override + public boolean checkLicense(XPackLicenseState state) { + if (licensedFeature instanceof LicensedFeature.Momentary momentary) { + return momentary.check(state); + } else { + return licensedFeature.checkWithoutTracking(state); + } + } + + @Override + public DataType dataType() { + return DataType.KEYWORD; + } + + @Override + public Expression replaceChildren(List newChildren) { + throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + + @Override + public void writeTo(StreamOutput out) { + throw new UnsupportedOperationException(); + } + } + +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java index 7fe67707a7976..775ca45bfa124 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java @@ -262,6 +262,42 @@ public static List dateCases(int minRows, int maxRows) { return cases; } + /** + * + * Generate cases for {@link DataType#DATE_NANOS}. + * + */ + public static List dateNanosCases(int minRows, int maxRows) { + List cases = new ArrayList<>(); + addSuppliers(cases, minRows, maxRows, "<1970-01-01T00:00:00.000000000Z>", DataType.DATE_NANOS, () -> 0L); + addSuppliers( + cases, + minRows, + maxRows, + "", + DataType.DATE_NANOS, + () -> ESTestCase.randomLongBetween(0, 10 * (long) 10e11) + ); + addSuppliers( + cases, + minRows, + maxRows, + "", + DataType.DATE_NANOS, + () -> ESTestCase.randomLongBetween(10 * (long) 10e11, Long.MAX_VALUE) + ); + addSuppliers( + cases, + minRows, + maxRows, + "", + DataType.DATE_NANOS, + () -> ESTestCase.randomLongBetween(Long.MAX_VALUE / 100 * 99, Long.MAX_VALUE) + ); + + return cases; + } + public static List booleanCases(int minRows, int maxRows) { List cases = new ArrayList<>(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java index df0737feadd8d..43e2ededeff0e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java @@ -89,6 +89,18 @@ static String binaryOperator(String operator) throws IOException { return toSvg(new Sequence(expressions.toArray(Expression[]::new))); } + /** + * Generate a railroad diagram for a search operator. The output would look like + * {@code field : value}. + */ + static String searchOperator(String operator) throws IOException { + List expressions = new ArrayList<>(); + expressions.add(new Literal("field")); + expressions.add(new Syntax(operator)); + expressions.add(new Literal("query")); + return toSvg(new Sequence(expressions.toArray(Expression[]::new))); + } + /** * Generate a railroad diagram for unary operator. The output would look like * {@code -v}. diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java index 5e23083d7c810..fff2d824fc710 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java @@ -52,6 +52,7 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.longCases(1, 1000, Long.MIN_VALUE, Long.MAX_VALUE, true), MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true), MultiRowTestCaseSupplier.dateCases(1, 1000), + MultiRowTestCaseSupplier.dateNanosCases(1, 1000), MultiRowTestCaseSupplier.booleanCases(1, 1000), MultiRowTestCaseSupplier.ipCases(1, 1000), MultiRowTestCaseSupplier.versionCases(1, 1000), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java index 9756804a1ec0f..7d4b46f2a902a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java @@ -90,6 +90,15 @@ public static Iterable parameters() { equalTo(200L) ) ), + new TestCaseSupplier( + List.of(DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of(TestCaseSupplier.TypedData.multiRow(List.of(200L), DataType.DATE_NANOS, "field")), + "Max[field=Attribute[channel=0]]", + DataType.DATE_NANOS, + equalTo(200L) + ) + ), new TestCaseSupplier( List.of(DataType.BOOLEAN), () -> new TestCaseSupplier.TestCase( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java index 171181496c889..58ef8d86017a8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java @@ -90,6 +90,15 @@ public static Iterable parameters() { equalTo(200L) ) ), + new TestCaseSupplier( + List.of(DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of(TestCaseSupplier.TypedData.multiRow(List.of(200L), DataType.DATE_NANOS, "field")), + "Min[field=Attribute[channel=0]]", + DataType.DATE_NANOS, + equalTo(200L) + ) + ), new TestCaseSupplier( List.of(DataType.BOOLEAN), () -> new TestCaseSupplier.TestCase( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java new file mode 100644 index 0000000000000..85b96e29d1f6a --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.compute.aggregation.WelfordAlgorithm; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractAggregationTestCase; +import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.equalTo; + +public class StdDevTests extends AbstractAggregationTestCase { + public StdDevTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + var suppliers = new ArrayList(); + + Stream.of( + MultiRowTestCaseSupplier.intCases(1, 1000, Integer.MIN_VALUE, Integer.MAX_VALUE, true), + MultiRowTestCaseSupplier.longCases(1, 1000, Long.MIN_VALUE, Long.MAX_VALUE, true), + MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true) + ).flatMap(List::stream).map(StdDevTests::makeSupplier).collect(Collectors.toCollection(() -> suppliers)); + + return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + } + + @Override + protected Expression build(Source source, List args) { + return new StdDev(source, args.get(0)); + } + + private static TestCaseSupplier makeSupplier(TestCaseSupplier.TypedDataSupplier fieldSupplier) { + return new TestCaseSupplier(List.of(fieldSupplier.type()), () -> { + var fieldTypedData = fieldSupplier.get(); + var fieldValues = fieldTypedData.multiRowData(); + + WelfordAlgorithm welfordAlgorithm = new WelfordAlgorithm(); + + for (var fieldValue : fieldValues) { + var value = ((Number) fieldValue).doubleValue(); + welfordAlgorithm.add(value); + } + var result = welfordAlgorithm.evaluate(); + var expected = Double.isInfinite(result) ? null : result; + return new TestCaseSupplier.TestCase( + List.of(fieldTypedData), + "StdDev[field=Attribute[channel=0]]", + DataType.DOUBLE, + equalTo(expected) + ); + }); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java index 55320543d0ec3..29faceee7497e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java @@ -45,6 +45,7 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.longCases(1, 1000, Long.MIN_VALUE, Long.MAX_VALUE, true), MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true), MultiRowTestCaseSupplier.dateCases(1, 1000), + MultiRowTestCaseSupplier.dateNanosCases(1, 1000), MultiRowTestCaseSupplier.booleanCases(1, 1000), MultiRowTestCaseSupplier.ipCases(1, 1000), MultiRowTestCaseSupplier.versionCases(1, 1000), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java new file mode 100644 index 0000000000000..32e9670286ef7 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.util.LinkedList; +import java.util.List; +import java.util.function.Supplier; + +/** + * This class is only used to generates docs for the match operator - all testing is done in {@link MatchTests} + */ +@FunctionName("match_operator") +public class MatchOperatorTests extends MatchTests { + + public MatchOperatorTests(@Name("TestCase") Supplier testCaseSupplier) { + super(testCaseSupplier); + } + + @ParametersFactory + public static Iterable parameters() { + // Have a minimal test so that we can generate the appropriate types in the docs + List suppliers = new LinkedList<>(); + addPositiveTestCase(List.of(DataType.KEYWORD, DataType.KEYWORD), suppliers); + addPositiveTestCase(List.of(DataType.TEXT, DataType.TEXT), suppliers); + addPositiveTestCase(List.of(DataType.KEYWORD, DataType.TEXT), suppliers); + addPositiveTestCase(List.of(DataType.TEXT, DataType.KEYWORD), suppliers); + return parameterSuppliersFromTypedData(suppliers); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java index 6d0c45a972299..6a4a7404135f9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java @@ -36,19 +36,11 @@ public MatchTests(@Name("TestCase") Supplier testCase @ParametersFactory public static Iterable parameters() { - Set supportedTextParams = Set.of(DataType.KEYWORD, DataType.TEXT); - Set supportedNumericParams = Set.of(DataType.DOUBLE, DataType.INTEGER); - Set supportedFuzzinessParams = Set.of(DataType.INTEGER, DataType.KEYWORD, DataType.TEXT); - List> supportedPerPosition = List.of( - supportedTextParams, - supportedTextParams, - supportedNumericParams, - supportedFuzzinessParams - ); + List> supportedPerPosition = supportedParams(); List suppliers = new LinkedList<>(); for (DataType fieldType : DataType.stringTypes()) { for (DataType queryType : DataType.stringTypes()) { - addPositiveTestCase(List.of(fieldType, queryType), supportedPerPosition, suppliers); + addPositiveTestCase(List.of(fieldType, queryType), suppliers); addNonFieldTestCase(List.of(fieldType, queryType), supportedPerPosition, suppliers); } } @@ -61,11 +53,20 @@ public static Iterable parameters() { ); } - private static void addPositiveTestCase( - List paramDataTypes, - List> supportedPerPosition, - List suppliers - ) { + protected static List> supportedParams() { + Set supportedTextParams = Set.of(DataType.KEYWORD, DataType.TEXT); + Set supportedNumericParams = Set.of(DataType.DOUBLE, DataType.INTEGER); + Set supportedFuzzinessParams = Set.of(DataType.INTEGER, DataType.KEYWORD, DataType.TEXT); + List> supportedPerPosition = List.of( + supportedTextParams, + supportedTextParams, + supportedNumericParams, + supportedFuzzinessParams + ); + return supportedPerPosition; + } + + protected static void addPositiveTestCase(List paramDataTypes, List suppliers) { // Positive case - creates an ES field from the field parameter type suppliers.add( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java index 65f5653f27e1a..11894cf5b847b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java @@ -14,7 +14,6 @@ import org.elasticsearch.geometry.Geometry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.plugin.EsqlCorePlugin; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; @@ -397,9 +396,6 @@ protected static void dateNanos( DataType expectedDataType, BiFunction> matcher ) { - if (EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG.isEnabled() == false) { - return; - } cases.add( new TestCaseSupplier( name + "(epoch nanos)", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java index e8f9f26a76f43..0d114b4964920 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java @@ -113,8 +113,8 @@ public static Iterable parameters() { "rhs", (l, r) -> ((Number) l).longValue() < ((Number) r).longValue(), DataType.BOOLEAN, - TestCaseSupplier.dateCases(), - TestCaseSupplier.dateCases(), + TestCaseSupplier.dateNanosCases(), + TestCaseSupplier.dateNanosCases(), List.of(), false ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/fulltext/StringQuerySerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/fulltext/StringQuerySerializationTests.java deleted file mode 100644 index ff00a161e1bb1..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/fulltext/StringQuerySerializationTests.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.expression.predicate.operator.fulltext; - -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; - -import java.io.IOException; - -public class StringQuerySerializationTests extends AbstractFulltextSerializationTests { - - private static final String COMMA = ","; - - @Override - protected final StringQueryPredicate createTestInstance() { - return new StringQueryPredicate(randomSource(), randomAlphaOfLength(randomIntBetween(1, 16)), randomOptionOrNull()); - } - - @Override - protected StringQueryPredicate mutateInstance(StringQueryPredicate instance) throws IOException { - var query = instance.query(); - var options = instance.options(); - if (randomBoolean()) { - query = randomValueOtherThan(query, () -> randomAlphaOfLength(randomIntBetween(1, 16))); - } else { - options = randomValueOtherThan(options, this::randomOptionOrNull); - } - return new StringQueryPredicate(instance.source(), query, options); - } -} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java index d3e1710a715af..9a1a30b892b22 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; @@ -23,10 +24,10 @@ import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; import org.elasticsearch.xpack.esql.expression.function.FieldAttributeTests; import org.elasticsearch.xpack.esql.expression.function.MetadataAttributeTests; import org.elasticsearch.xpack.esql.expression.function.ReferenceAttributeTests; -import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttributeTests; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.type.EsFieldTests; @@ -280,9 +281,8 @@ private Column randomColumn() { static { List writeables = new ArrayList<>(); - writeables.addAll(Block.getNamedWriteables()); - writeables.addAll(Attribute.getNamedWriteables()); - writeables.add(UnsupportedAttribute.ENTRY); + writeables.addAll(BlockWritables.getNamedWriteables()); + writeables.addAll(ExpressionWritables.attributes()); REGISTRY = new NamedWriteableRegistry(new ArrayList<>(new HashSet<>(writeables))); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 073a51ee69114..269b4806680a6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -22,10 +22,10 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.EsqlTestUtils.TestSearchStats; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; @@ -145,7 +145,7 @@ private Analyzer makeAnalyzer(String mappingFileName, EnrichResolution enrichRes return new Analyzer( new AnalyzerContext(config, new EsqlFunctionRegistry(), getIndexResult, enrichResolution), - new Verifier(new Metrics(new EsqlFunctionRegistry())) + new Verifier(new Metrics(new EsqlFunctionRegistry()), new XPackLicenseState(() -> 0L)) ); } @@ -1092,8 +1092,6 @@ public void testMissingFieldsDoNotGetExtracted() { * estimatedRowSize[324] */ public void testSingleMatchFilterPushdown() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - var plan = plannerOptimizer.plan(""" from test | where first_name:"Anna" @@ -1124,8 +1122,6 @@ public void testSingleMatchFilterPushdown() { * [_doc{f}#22], limit[1000], sort[[FieldSort[field=emp_no{f}#12, direction=ASC, nulls=LAST]]] estimatedRowSize[336] */ public void testMultipleMatchFilterPushdown() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - String query = """ from test | where first_name:"Anna" and first_name:"Anneke" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index c29f111488f96..a11a9cef82989 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -111,7 +111,7 @@ import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.InlineJoin; import org.elasticsearch.xpack.esql.plan.logical.join.Join; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; @@ -2141,7 +2141,7 @@ public void testLimitThenSortBeforeMvExpand() { mvExpand = as(topN.child(), MvExpand.class); var limit = as(mvExpand.child(), Limit.class); assertThat(limit.limit().fold(), equalTo(7300)); - as(limit.child(), Row.class); + as(limit.child(), LocalRelation.class); } /** @@ -2286,7 +2286,7 @@ public void testSortMvExpand() { var expand = as(plan, MvExpand.class); assertThat(expand.limit(), equalTo(1000)); var topN = as(expand.child(), TopN.class); - var row = as(topN.child(), Row.class); + var row = as(topN.child(), LocalRelation.class); } /** @@ -2327,7 +2327,7 @@ public void testWhereMvExpand() { assertThat(expand.limit(), equalTo(1000)); var limit2 = as(expand.child(), Limit.class); assertThat(limit2.limit().fold(), is(1000)); - var row = as(limit2.child(), Row.class); + var row = as(limit2.child(), LocalRelation.class); } private static List orderNames(TopN topN) { @@ -2563,7 +2563,7 @@ public void testSimplifyRLikeMatchAll() { public void testRLikeWrongPattern() { String query = "from test | where first_name rlike \"(?i)(^|[^a-zA-Z0-9_-])nmap($|\\\\.)\""; - String error = "line 1:20: Invalid regex pattern for RLIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + String error = "line 1:19: Invalid regex pattern for RLIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + "[invalid range: from (95) cannot be > to (93)]"; ParsingException e = expectThrows(ParsingException.class, () -> plan(query)); assertThat(e.getMessage(), is(error)); @@ -2571,7 +2571,7 @@ public void testRLikeWrongPattern() { public void testLikeWrongPattern() { String query = "from test | where first_name like \"(?i)(^|[^a-zA-Z0-9_-])nmap($|\\\\.)\""; - String error = "line 1:20: Invalid pattern for LIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + String error = "line 1:19: Invalid pattern for LIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + "[Invalid sequence - escape character is not followed by special wildcard char]"; ParsingException e = expectThrows(ParsingException.class, () -> plan(query)); assertThat(e.getMessage(), is(error)); @@ -3545,7 +3545,7 @@ public void testMvExpandFoldable() { var filterProp = ((GreaterThan) filter.condition()).left(); assertTrue(expand.expanded().semanticEquals(filterProp)); assertFalse(expand.target().semanticEquals(filterProp)); - var row = as(expand.child(), Row.class); + var row = as(expand.child(), LocalRelation.class); } /** @@ -3564,7 +3564,7 @@ public void testRenameStatsDropGroup() { var limit = as(plan, Limit.class); var agg = as(limit.child(), Aggregate.class); assertThat(Expressions.names(agg.groupings()), contains("a")); - var row = as(agg.child(), Row.class); + var row = as(agg.child(), LocalRelation.class); } /** @@ -3583,7 +3583,7 @@ public void testMultipleRenameStatsDropGroup() { var limit = as(plan, Limit.class); var agg = as(limit.child(), Aggregate.class); assertThat(Expressions.names(agg.groupings()), contains("a", "b")); - var row = as(agg.child(), Row.class); + var row = as(agg.child(), LocalRelation.class); } /** @@ -5624,6 +5624,7 @@ protected List filteredWarnings() { * 9]]], BytesRefVectorBlock[vector=BytesRefArrayVector[positions=10]]]] * } */ + @AwaitsFix(bugUrl = "lookup functionality is not yet implemented") public void testLookupSimple() { String query = """ FROM test @@ -5650,7 +5651,7 @@ public void testLookupSimple() { var limit = as(left.child(), Limit.class); assertThat(limit.limit().fold(), equalTo(1000)); - assertThat(join.config().type(), equalTo(JoinType.LEFT)); + assertThat(join.config().type(), equalTo(JoinTypes.LEFT)); assertThat(join.config().matchFields().stream().map(Object::toString).toList(), matchesList().item(startsWith("int{r}"))); assertThat(join.config().leftFields().size(), equalTo(1)); assertThat(join.config().rightFields().size(), equalTo(1)); @@ -5703,6 +5704,7 @@ public void testLookupSimple() { * 9]]], BytesRefVectorBlock[vector=BytesRefArrayVector[positions=10]]]] * } */ + @AwaitsFix(bugUrl = "lookup functionality is not yet implemented") public void testLookupStats() { String query = """ FROM test @@ -5738,7 +5740,7 @@ public void testLookupStats() { assertThat(left.output().toString(), containsString("int{r}")); as(left.child(), EsRelation.class); - assertThat(join.config().type(), equalTo(JoinType.LEFT)); + assertThat(join.config().type(), equalTo(JoinTypes.LEFT)); assertThat(join.config().matchFields().stream().map(Object::toString).toList(), matchesList().item(startsWith("int{r}"))); assertThat(join.config().leftFields().size(), equalTo(1)); assertThat(join.config().rightFields().size(), equalTo(1)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index eb115ed7b2948..f3ba11457a715 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -93,7 +93,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.logical.join.Join; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; @@ -114,7 +114,6 @@ import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; -import org.elasticsearch.xpack.esql.plan.physical.RowExec; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import org.elasticsearch.xpack.esql.plan.physical.UnaryExec; import org.elasticsearch.xpack.esql.planner.PlannerUtils; @@ -2751,7 +2750,7 @@ public void testSpatialTypesAndStatsUseDocValuesNestedLiteral() { assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); var eval = as(agg.child(), EvalExec.class); - as(eval.child(), RowExec.class); + as(eval.child(), LocalSourceExec.class); // Now optimize the plan and assert the same plan again, since no FieldExtractExec is added var optimized = optimizedPlan(plan); @@ -2765,7 +2764,7 @@ public void testSpatialTypesAndStatsUseDocValuesNestedLiteral() { assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); eval = as(agg.child(), EvalExec.class); - as(eval.child(), RowExec.class); + as(eval.child(), LocalSourceExec.class); } /** @@ -6423,11 +6422,12 @@ public void testMaxQueryDepthPlusExpressionDepth() { assertThat(e.getMessage(), containsString("ESQL statement exceeded the maximum query depth allowed (" + MAX_QUERY_DEPTH + ")")); } + @AwaitsFix(bugUrl = "lookup functionality is not yet implemented") public void testLookupSimple() { String query = """ FROM test | RENAME languages AS int - | LOOKUP int_number_names ON int"""; + | LOOKUP_🐔 int_number_names ON int"""; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP' expecting {")); @@ -6468,18 +6468,19 @@ public void testLookupSimple() { * \_EsQueryExec[...] * } */ + @AwaitsFix(bugUrl = "lookup functionality is not yet implemented") public void testLookupThenProject() { String query = """ FROM employees | SORT emp_no | LIMIT 4 | RENAME languages AS int - | LOOKUP int_number_names ON int + | LOOKUP_🐔 int_number_names ON int | RENAME int AS languages, name AS lang_name | KEEP emp_no, languages, lang_name"""; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); - assertThat(e.getMessage(), containsString("line 5:3: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 5:3: mismatched input 'LOOKUP_🐔' expecting {")); return; } PhysicalPlan plan = optimizedPlan(physicalPlan(query)); @@ -6526,17 +6527,18 @@ public void testLookupThenProject() { * \_LocalRelation[[int{f}#24, name{f}#25],[...]] * } */ + @AwaitsFix(bugUrl = "lookup functionality is not yet implemented") public void testLookupThenTopN() { String query = """ FROM employees | RENAME languages AS int - | LOOKUP int_number_names ON int + | LOOKUP_🐔 int_number_names ON int | RENAME name AS languages | KEEP languages, emp_no | SORT languages ASC, emp_no ASC"""; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); - assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP_🐔' expecting {")); return; } var plan = physicalPlan(query); @@ -6553,7 +6555,7 @@ public void testLookupThenTopN() { matchesList().item(startsWith("name{f}")).item(startsWith("emp_no{f}")) ); Join join = as(innerTopN.child(), Join.class); - assertThat(join.config().type(), equalTo(JoinType.LEFT)); + assertThat(join.config().type(), equalTo(JoinTypes.LEFT)); assertMap(join.config().matchFields().stream().map(Objects::toString).toList(), matchesList().item(startsWith("int{r}"))); Project innerProject = as(join.left(), Project.class); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java index 0177747d27243..710637c05a900 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java @@ -134,7 +134,7 @@ public void testStringLiteralsExceptions() { ); var number = "1" + IntStream.range(0, 309).mapToObj(ignored -> "0").collect(Collectors.joining()); - assertParsingException(() -> parse("row foo == " + number), "line 1:13: Number [" + number + "] is too large"); + assertParsingException(() -> parse("row foo == " + number), "line 1:12: Number [" + number + "] is too large"); } public void testBooleanLiteralsCondition() { @@ -442,20 +442,20 @@ public void testOverflowingValueForDuration() { for (String unit : List.of("milliseconds", "seconds", "minutes", "hours")) { assertParsingException( () -> parse("row x = 9223372036854775808 " + unit), // unsigned_long (Long.MAX_VALUE + 1) - "line 1:10: Number [9223372036854775808] outside of [" + unit + "] range" + "line 1:9: Number [9223372036854775808] outside of [" + unit + "] range" ); assertParsingException( () -> parse("row x = 18446744073709551616 " + unit), // double (UNSIGNED_LONG_MAX + 1) - "line 1:10: Number [18446744073709551616] outside of [" + unit + "] range" + "line 1:9: Number [18446744073709551616] outside of [" + unit + "] range" ); } assertParsingException( () -> parse("row x = 153722867280912931 minutes"), // Long.MAX_VALUE / 60 + 1 - "line 1:10: Number [153722867280912931] outside of [minutes] range" + "line 1:9: Number [153722867280912931] outside of [minutes] range" ); assertParsingException( () -> parse("row x = 2562047788015216 hours"), // Long.MAX_VALUE / 3600 + 1 - "line 1:10: Number [2562047788015216] outside of [hours] range" + "line 1:9: Number [2562047788015216] outside of [hours] range" ); } @@ -463,12 +463,12 @@ public void testOverflowingValueForPeriod() { for (String unit : List.of("days", "weeks", "months", "years")) { assertParsingException( () -> parse("row x = 2147483648 " + unit), // long (Integer.MAX_VALUE + 1) - "line 1:10: Number [2147483648] outside of [" + unit + "] range" + "line 1:9: Number [2147483648] outside of [" + unit + "] range" ); } assertParsingException( () -> parse("row x = 306783379 weeks"), // Integer.MAX_VALUE / 7 + 1 - "line 1:10: Number [306783379] outside of [weeks] range" + "line 1:9: Number [306783379] outside of [weeks] range" ); } @@ -544,7 +544,7 @@ public void testWildcardProjectAwayPatterns() { } public void testForbidWildcardProjectAway() { - assertParsingException(() -> dropExpression("foo, *"), "line 1:21: Removing all fields is not allowed [*]"); + assertParsingException(() -> dropExpression("foo, *"), "line 1:20: Removing all fields is not allowed [*]"); } public void testForbidMultipleIncludeStar() { @@ -608,7 +608,7 @@ public void testMultipleProjectPatterns() { } public void testForbidWildcardProjectRename() { - assertParsingException(() -> renameExpression("b* AS a*"), "line 1:18: Using wildcards [*] in RENAME is not allowed [b* AS a*]"); + assertParsingException(() -> renameExpression("b* AS a*"), "line 1:17: Using wildcards [*] in RENAME is not allowed [b* AS a*]"); } public void testSimplifyInWithSingleElementList() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 0f46c1f44e8d3..69c00eb395fdb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -491,25 +491,25 @@ private void clusterAndIndexAsIndexPattern(String command, String clusterAndInde public void testStringAsLookupIndexPattern() { assumeTrue("requires snapshot build", Build.current().isSnapshot()); - assertStringAsLookupIndexPattern("foo", "ROW x = 1 | LOOKUP \"foo\" ON j"); + assertStringAsLookupIndexPattern("foo", "ROW x = 1 | LOOKUP_🐔 \"foo\" ON j"); assertStringAsLookupIndexPattern("test-*", """ - ROW x = 1 | LOOKUP "test-*" ON j + ROW x = 1 | LOOKUP_🐔 "test-*" ON j """); - assertStringAsLookupIndexPattern("test-*", "ROW x = 1 | LOOKUP test-* ON j"); - assertStringAsLookupIndexPattern("123-test@foo_bar+baz1", "ROW x = 1 | LOOKUP 123-test@foo_bar+baz1 ON j"); + assertStringAsLookupIndexPattern("test-*", "ROW x = 1 | LOOKUP_🐔 test-* ON j"); + assertStringAsLookupIndexPattern("123-test@foo_bar+baz1", "ROW x = 1 | LOOKUP_🐔 123-test@foo_bar+baz1 ON j"); assertStringAsLookupIndexPattern("foo, test-*, abc, xyz", """ - ROW x = 1 | LOOKUP "foo, test-*, abc, xyz" ON j + ROW x = 1 | LOOKUP_🐔 "foo, test-*, abc, xyz" ON j """); - assertStringAsLookupIndexPattern("", "ROW x = 1 | LOOKUP ON j"); + assertStringAsLookupIndexPattern("", "ROW x = 1 | LOOKUP_🐔 ON j"); assertStringAsLookupIndexPattern( "", - "ROW x = 1 | LOOKUP \"\" ON j" + "ROW x = 1 | LOOKUP_🐔 \"\" ON j" ); - assertStringAsLookupIndexPattern("foo", "ROW x = 1 | LOOKUP \"\"\"foo\"\"\" ON j"); - assertStringAsLookupIndexPattern("`backtick`", "ROW x = 1 | LOOKUP `backtick` ON j"); - assertStringAsLookupIndexPattern("``multiple`back``ticks```", "ROW x = 1 | LOOKUP ``multiple`back``ticks``` ON j"); - assertStringAsLookupIndexPattern(".dot", "ROW x = 1 | LOOKUP .dot ON j"); + assertStringAsLookupIndexPattern("foo", "ROW x = 1 | LOOKUP_🐔 \"\"\"foo\"\"\" ON j"); + assertStringAsLookupIndexPattern("`backtick`", "ROW x = 1 | LOOKUP_🐔 `backtick` ON j"); + assertStringAsLookupIndexPattern("``multiple`back``ticks```", "ROW x = 1 | LOOKUP_🐔 ``multiple`back``ticks``` ON j"); + assertStringAsLookupIndexPattern(".dot", "ROW x = 1 | LOOKUP_🐔 .dot ON j"); clusterAndIndexAsLookupIndexPattern("cluster:index"); clusterAndIndexAsLookupIndexPattern("cluster:.index"); clusterAndIndexAsLookupIndexPattern("cluster*:index*"); @@ -519,16 +519,16 @@ public void testStringAsLookupIndexPattern() { } private void clusterAndIndexAsLookupIndexPattern(String clusterAndIndex) { - assertStringAsLookupIndexPattern(clusterAndIndex, "ROW x = 1 | LOOKUP " + clusterAndIndex + " ON j"); - assertStringAsLookupIndexPattern(clusterAndIndex, "ROW x = 1 | LOOKUP \"" + clusterAndIndex + "\"" + " ON j"); + assertStringAsLookupIndexPattern(clusterAndIndex, "ROW x = 1 | LOOKUP_🐔 " + clusterAndIndex + " ON j"); + assertStringAsLookupIndexPattern(clusterAndIndex, "ROW x = 1 | LOOKUP_🐔 \"" + clusterAndIndex + "\"" + " ON j"); } public void testInvalidCharacterInIndexPattern() { Map commands = new HashMap<>(); - commands.put("FROM {}", "line 1:7: "); + commands.put("FROM {}", "line 1:6: "); if (Build.current().isSnapshot()) { - commands.put("METRICS {}", "line 1:10: "); - commands.put("ROW x = 1 | LOOKUP {} ON j", "line 1:21: "); + commands.put("METRICS {}", "line 1:9: "); + commands.put("ROW x = 1 | LOOKUP_🐔 {} ON j", "line 1:22: "); } String lineNumber; for (String command : commands.keySet()) { @@ -568,11 +568,11 @@ public void testInvalidCharacterInIndexPattern() { // comma separated indices, with exclusions // Invalid index names after removing exclusion fail, when there is no index name with wildcard before it for (String command : commands.keySet()) { - if (command.contains("LOOKUP")) { + if (command.contains("LOOKUP_🐔")) { continue; } - lineNumber = command.contains("FROM") ? "line 1:21: " : "line 1:24: "; + lineNumber = command.contains("FROM") ? "line 1:20: " : "line 1:23: "; expectInvalidIndexNameErrorWithLineNumber(command, "indexpattern, --indexpattern", lineNumber, "-indexpattern"); expectInvalidIndexNameErrorWithLineNumber(command, "indexpattern, \"--indexpattern\"", lineNumber, "-indexpattern"); expectInvalidIndexNameErrorWithLineNumber(command, "\"indexpattern, --indexpattern\"", commands.get(command), "-indexpattern"); @@ -582,10 +582,10 @@ public void testInvalidCharacterInIndexPattern() { // Invalid index names, except invalid DateMath, are ignored if there is an index name with wildcard before it String dateMathError = "unit [D] not supported for date math [/D]"; for (String command : commands.keySet()) { - if (command.contains("LOOKUP")) { + if (command.contains("LOOKUP_🐔")) { continue; } - lineNumber = command.contains("FROM") ? "line 1:10: " : "line 1:13: "; + lineNumber = command.contains("FROM") ? "line 1:9: " : "line 1:12: "; clustersAndIndices(command, "*", "-index#pattern"); clustersAndIndices(command, "index*", "-index#pattern"); clustersAndIndices(command, "*", "-<--logstash-{now/M{yyyy.MM}}>"); @@ -646,17 +646,17 @@ public void testInvalidQuotingAsMetricsIndexPattern() { public void testInvalidQuotingAsLookupIndexPattern() { assumeTrue("requires snapshot builds", Build.current().isSnapshot()); - expectError("ROW x = 1 | LOOKUP \"foo ON j", ": token recognition error at: '\"foo ON j'"); - expectError("ROW x = 1 | LOOKUP \"\"\"foo ON j", ": token recognition error at: '\"foo ON j'"); + expectError("ROW x = 1 | LOOKUP_🐔 \"foo ON j", ": token recognition error at: '\"foo ON j'"); + expectError("ROW x = 1 | LOOKUP_🐔 \"\"\"foo ON j", ": token recognition error at: '\"foo ON j'"); - expectError("ROW x = 1 | LOOKUP foo\" ON j", ": token recognition error at: '\" ON j'"); - expectError("ROW x = 1 | LOOKUP foo\"\"\" ON j", ": token recognition error at: '\" ON j'"); + expectError("ROW x = 1 | LOOKUP_🐔 foo\" ON j", ": token recognition error at: '\" ON j'"); + expectError("ROW x = 1 | LOOKUP_🐔 foo\"\"\" ON j", ": token recognition error at: '\" ON j'"); - expectError("ROW x = 1 | LOOKUP \"foo\"bar\" ON j", ": token recognition error at: '\" ON j'"); - expectError("ROW x = 1 | LOOKUP \"foo\"\"bar\" ON j", ": extraneous input '\"bar\"' expecting 'on'"); + expectError("ROW x = 1 | LOOKUP_🐔 \"foo\"bar\" ON j", ": token recognition error at: '\" ON j'"); + expectError("ROW x = 1 | LOOKUP_🐔 \"foo\"\"bar\" ON j", ": extraneous input '\"bar\"' expecting 'on'"); - expectError("ROW x = 1 | LOOKUP \"\"\"foo\"\"\"bar\"\"\" ON j", ": mismatched input 'bar' expecting 'on'"); - expectError("ROW x = 1 | LOOKUP \"\"\"foo\"\"\"\"\"\"bar\"\"\" ON j", "line 1:31: mismatched input '\"bar\"' expecting 'on'"); + expectError("ROW x = 1 | LOOKUP_🐔 \"\"\"foo\"\"\"bar\"\"\" ON j", ": mismatched input 'bar' expecting 'on'"); + expectError("ROW x = 1 | LOOKUP_🐔 \"\"\"foo\"\"\"\"\"\"bar\"\"\" ON j", ": mismatched input '\"bar\"' expecting 'on'"); } public void testIdentifierAsFieldName() { @@ -885,18 +885,18 @@ public void testSuggestAvailableProcessingCommandsOnParsingError() { public void testDeprecatedIsNullFunction() { expectError( "from test | eval x = is_null(f)", - "line 1:23: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" + "line 1:22: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" ); expectError( "row x = is_null(f)", - "line 1:10: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" + "line 1:9: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" ); if (Build.current().isSnapshot()) { expectError( "from test | eval x = ?fn1(f)", List.of(paramAsIdentifier("fn1", "IS_NULL")), - "line 1:23: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" + "line 1:22: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" ); } } @@ -911,23 +911,23 @@ public void testMetadataFieldOnOtherSources() { } public void testMetadataFieldMultipleDeclarations() { - expectError("from test metadata _index, _version, _index", "1:39: metadata field [_index] already declared [@1:20]"); + expectError("from test metadata _index, _version, _index", "1:38: metadata field [_index] already declared [@1:20]"); } public void testMetadataFieldUnsupportedPrimitiveType() { - expectError("from test metadata _tier", "line 1:21: unsupported metadata field [_tier]"); + expectError("from test metadata _tier", "line 1:20: unsupported metadata field [_tier]"); } public void testMetadataFieldUnsupportedCustomType() { - expectError("from test metadata _feature", "line 1:21: unsupported metadata field [_feature]"); + expectError("from test metadata _feature", "line 1:20: unsupported metadata field [_feature]"); } public void testMetadataFieldNotFoundNonExistent() { - expectError("from test metadata _doesnot_compute", "line 1:21: unsupported metadata field [_doesnot_compute]"); + expectError("from test metadata _doesnot_compute", "line 1:20: unsupported metadata field [_doesnot_compute]"); } public void testMetadataFieldNotFoundNormalField() { - expectError("from test metadata emp_no", "line 1:21: unsupported metadata field [emp_no]"); + expectError("from test metadata emp_no", "line 1:20: unsupported metadata field [emp_no]"); } public void testDissectPattern() { @@ -985,13 +985,13 @@ public void testGrokPattern() { expectError( "row a = \"foo bar\" | GROK a \"%{NUMBER:foo} %{WORD:foo}\"", - "line 1:22: Invalid GROK pattern [%{NUMBER:foo} %{WORD:foo}]:" + "line 1:21: Invalid GROK pattern [%{NUMBER:foo} %{WORD:foo}]:" + " the attribute [foo] is defined multiple times with different types" ); expectError( "row a = \"foo\" | GROK a \"(?P.+)\"", - "line 1:18: Invalid grok pattern [(?P.+)]: [undefined group option]" + "line 1:17: Invalid grok pattern [(?P.+)]: [undefined group option]" ); } @@ -1015,7 +1015,7 @@ public void testLikeRLike() { expectError( "from a | where foo like \"(?i)(^|[^a-zA-Z0-9_-])nmap($|\\\\.)\"", - "line 1:17: Invalid pattern for LIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + "line 1:16: Invalid pattern for LIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + "[Invalid sequence - escape character is not followed by special wildcard char]" ); } @@ -1076,7 +1076,7 @@ public void testEnrich() { ); expectError( "from a | enrich typo:countries on foo", - "line 1:18: Unrecognized value [typo], ENRICH policy qualifier needs to be one of [_ANY, _COORDINATOR, _REMOTE]" + "line 1:17: Unrecognized value [typo], ENRICH policy qualifier needs to be one of [_ANY, _COORDINATOR, _REMOTE]" ); } @@ -1261,8 +1261,8 @@ public void testInvalidPositionalParams() { expectError( "from test | where x < ?0 and y < ?2", List.of(paramAsConstant(null, 5)), - "line 1:24: No parameter is defined for position 0, did you mean position 1?; " - + "line 1:35: No parameter is defined for position 2, did you mean position 1?" + "line 1:23: No parameter is defined for position 0, did you mean position 1?; " + + "line 1:34: No parameter is defined for position 2, did you mean position 1?" ); expectError( @@ -2050,7 +2050,7 @@ private void assertStringAsIndexPattern(String string, String statement) { private void assertStringAsLookupIndexPattern(String string, String statement) { if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> statement(statement)); - assertThat(e.getMessage(), containsString("line 1:14: LOOKUP is in preview and only available in SNAPSHOT build")); + assertThat(e.getMessage(), containsString("line 1:14: LOOKUP_🐔 is in preview and only available in SNAPSHOT build")); return; } var plan = statement(statement); @@ -2107,18 +2107,18 @@ public void testEnrichOnMatchField() { } public void testInlineConvertWithNonexistentType() { - expectError("ROW 1::doesnotexist", "line 1:9: Unknown data type named [doesnotexist]"); - expectError("ROW \"1\"::doesnotexist", "line 1:11: Unknown data type named [doesnotexist]"); - expectError("ROW false::doesnotexist", "line 1:13: Unknown data type named [doesnotexist]"); - expectError("ROW abs(1)::doesnotexist", "line 1:14: Unknown data type named [doesnotexist]"); - expectError("ROW (1+2)::doesnotexist", "line 1:13: Unknown data type named [doesnotexist]"); + expectError("ROW 1::doesnotexist", "line 1:8: Unknown data type named [doesnotexist]"); + expectError("ROW \"1\"::doesnotexist", "line 1:10: Unknown data type named [doesnotexist]"); + expectError("ROW false::doesnotexist", "line 1:12: Unknown data type named [doesnotexist]"); + expectError("ROW abs(1)::doesnotexist", "line 1:13: Unknown data type named [doesnotexist]"); + expectError("ROW (1+2)::doesnotexist", "line 1:12: Unknown data type named [doesnotexist]"); } public void testLookup() { - String query = "ROW a = 1 | LOOKUP t ON j"; + String query = "ROW a = 1 | LOOKUP_🐔 t ON j"; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> statement(query)); - assertThat(e.getMessage(), containsString("line 1:13: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 1:13: mismatched input 'LOOKUP_🐔' expecting {")); return; } var plan = statement(query); @@ -2131,7 +2131,7 @@ public void testLookup() { } public void testInlineConvertUnsupportedType() { - expectError("ROW 3::BYTE", "line 1:6: Unsupported conversion to type [BYTE]"); + expectError("ROW 3::BYTE", "line 1:5: Unsupported conversion to type [BYTE]"); } public void testMetricsWithoutStats() { @@ -2300,7 +2300,6 @@ public void testMetricWithGroupKeyAsAgg() { } public void testMatchOperatorConstantQueryString() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); var plan = statement("FROM test | WHERE field:\"value\""); var filter = as(plan, Filter.class); var match = (Match) filter.condition(); @@ -2310,7 +2309,6 @@ public void testMatchOperatorConstantQueryString() { } public void testInvalidMatchOperator() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); expectError("from test | WHERE field:", "line 1:25: mismatched input '' expecting {QUOTED_STRING, "); expectError( "from test | WHERE field:CONCAT(\"hello\", \"world\")", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/AbstractLogicalPlanSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/AbstractLogicalPlanSerializationTests.java index 6936c96a143d4..eea408914f4c5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/AbstractLogicalPlanSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/AbstractLogicalPlanSerializationTests.java @@ -8,13 +8,11 @@ package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.xpack.esql.core.tree.Node; -import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; import org.elasticsearch.xpack.esql.plan.AbstractNodeSerializationTests; +import org.elasticsearch.xpack.esql.plan.PlanWritables; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelationSerializationTests; import java.util.ArrayList; @@ -32,12 +30,10 @@ public static LogicalPlan randomChild(int depth) { @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { List entries = new ArrayList<>(); - entries.addAll(LogicalPlan.getNamedWriteables()); - entries.addAll(AggregateFunction.getNamedWriteables()); - entries.addAll(Expression.getNamedWriteables()); - entries.addAll(Attribute.getNamedWriteables()); - entries.addAll(Block.getNamedWriteables()); - entries.addAll(NamedExpression.getNamedWriteables()); + entries.addAll(PlanWritables.logical()); + entries.addAll(ExpressionWritables.aggregates()); + entries.addAll(ExpressionWritables.allExpressions()); + entries.addAll(BlockWritables.getNamedWriteables()); return new NamedWriteableRegistry(entries); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinSerializationTests.java index 6b17e4efd4de7..7c75ea623b34f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinSerializationTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import java.io.IOException; import java.util.List; @@ -27,7 +28,7 @@ protected Join createTestInstance() { } private static JoinConfig randomJoinConfig() { - JoinType type = randomFrom(JoinType.values()); + JoinType type = randomFrom(JoinTypes.LEFT, JoinTypes.RIGHT, JoinTypes.INNER, JoinTypes.FULL, JoinTypes.CROSS); List matchFields = randomFieldAttributes(1, 10, false); List leftFields = randomFieldAttributes(1, 10, false); List rightFields = randomFieldAttributes(1, 10, false); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java index dde70d85ba259..13887fbd1740c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import java.util.ArrayList; import java.util.List; @@ -48,7 +48,7 @@ public void testExpressionsAndReferences() { Row left = new Row(Source.EMPTY, leftFields); Row right = new Row(Source.EMPTY, rightFields); - JoinConfig joinConfig = new JoinConfig(JoinType.LEFT, matchFields, leftAttributes, rightAttributes); + JoinConfig joinConfig = new JoinConfig(JoinTypes.LEFT, matchFields, leftAttributes, rightAttributes); Join join = new Join(Source.EMPTY, left, right, joinConfig); // matchfields are a subset of the left and right fields, so they don't contribute to the size of the references set. @@ -88,7 +88,7 @@ public void testTransformExprs() { Row left = new Row(Source.EMPTY, leftFields); Row right = new Row(Source.EMPTY, rightFields); - JoinConfig joinConfig = new JoinConfig(JoinType.LEFT, matchFields, leftAttributes, rightAttributes); + JoinConfig joinConfig = new JoinConfig(JoinTypes.LEFT, matchFields, leftAttributes, rightAttributes); Join join = new Join(Source.EMPTY, left, right, joinConfig); assertTrue(join.config().matchFields().stream().allMatch(ref -> ref.dataType().equals(DataType.INTEGER))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java index ccb27b41f2ed6..1f56150977d99 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.test.AbstractWireTestCase; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; @@ -81,6 +82,6 @@ protected boolean shouldBeSame(LocalSupplier newInstance) { @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(Block.getNamedWriteables()); + return new NamedWriteableRegistry(BlockWritables.getNamedWriteables()); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/AbstractPhysicalPlanSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/AbstractPhysicalPlanSerializationTests.java index 4b74114a0e01c..7689b80515880 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/AbstractPhysicalPlanSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/AbstractPhysicalPlanSerializationTests.java @@ -9,16 +9,13 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.search.SearchModule; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.Node; -import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.plan.AbstractNodeSerializationTests; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.PlanWritables; import java.util.ArrayList; import java.util.List; @@ -50,13 +47,10 @@ public static Integer randomEstimatedRowSize() { @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { List entries = new ArrayList<>(); - entries.addAll(PhysicalPlan.getNamedWriteables()); - entries.addAll(LogicalPlan.getNamedWriteables()); - entries.addAll(AggregateFunction.getNamedWriteables()); - entries.addAll(Expression.getNamedWriteables()); - entries.addAll(Attribute.getNamedWriteables()); - entries.addAll(Block.getNamedWriteables()); - entries.addAll(NamedExpression.getNamedWriteables()); + entries.addAll(PlanWritables.getNamedWriteables()); + entries.addAll(ExpressionWritables.aggregates()); + entries.addAll(ExpressionWritables.allExpressions()); + entries.addAll(BlockWritables.getNamedWriteables()); entries.addAll(new SearchModule(Settings.EMPTY, List.of()).getNamedWriteables()); // Query builders entries.add(Add.ENTRY); // Used by the eval tests return new NamedWriteableRegistry(entries); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/RowExecSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/RowExecSerializationTests.java deleted file mode 100644 index 3dd44cd20e369..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/RowExecSerializationTests.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.plan.physical; - -import org.elasticsearch.xpack.esql.core.expression.Alias; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.LiteralTests; -import org.elasticsearch.xpack.esql.core.expression.NameId; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.tree.SourceTests; - -import java.io.IOException; -import java.util.List; - -public class RowExecSerializationTests extends AbstractPhysicalPlanSerializationTests { - public static RowExec randomRowExec() { - Source source = randomSource(); - List fields = randomList(1, 10, RowExecSerializationTests::randomAlias); - return new RowExec(source, fields); - } - - private static Alias randomAlias() { - Source source = SourceTests.randomSource(); - String name = randomAlphaOfLength(5); - Expression child = LiteralTests.randomLiteral(); - boolean synthetic = randomBoolean(); - return new Alias(source, name, child, new NameId(), synthetic); - } - - @Override - protected RowExec createTestInstance() { - return randomRowExec(); - } - - @Override - protected RowExec mutateInstance(RowExec instance) throws IOException { - List fields = instance.fields(); - fields = randomValueOtherThan(fields, () -> randomList(1, 10, RowExecSerializationTests::randomAlias)); - return new RowExec(instance.source(), fields); - } - - @Override - protected boolean alwaysEmptySource() { - return true; - } -} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index f60e5384e1a6f..ff9e45a9f9233 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -146,6 +146,7 @@ private LocalExecutionPlanner planner() throws IOException { null, null, null, + null, esPhysicalOperationProviders() ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java index cf90cf96fe683..57210fda07f2b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.planner; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.analysis.Analyzer; @@ -46,7 +47,7 @@ private static Analyzer makeAnalyzer(String mappingFileName) { return new Analyzer( new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResult, new EnrichResolution()), - new Verifier(new Metrics(new EsqlFunctionRegistry())) + new Verifier(new Metrics(new EsqlFunctionRegistry()), new XPackLicenseState(() -> 0L)) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/ConfigurationSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/ConfigurationSerializationTests.java index 1f35bb5312b20..b010616cd7cf7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/ConfigurationSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/ConfigurationSerializationTests.java @@ -14,9 +14,9 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.esql.Column; @@ -110,6 +110,6 @@ protected Configuration mutateInstance(Configuration in) { @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(Block.getNamedWriteables()); + return new NamedWriteableRegistry(BlockWritables.getNamedWriteables()); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java index 116df21a33ac0..b323efad2b4c3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.index.IndexMode; import org.elasticsearch.indices.IndicesExpressionGrouper; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -102,7 +103,7 @@ public void testFailedMetric() { return null; }).when(esqlClient).execute(eq(EsqlResolveFieldsAction.TYPE), any(), any()); - var planExecutor = new PlanExecutor(indexResolver, MeterRegistry.NOOP); + var planExecutor = new PlanExecutor(indexResolver, MeterRegistry.NOOP, new XPackLicenseState(() -> 0L)); var enrichResolver = mockEnrichResolver(); var request = new EsqlQueryRequest(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java index 5e6588d2295f9..eda906b147956 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.stats; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.common.stats.Counters; import org.elasticsearch.xpack.esql.analysis.Verifier; @@ -205,7 +206,7 @@ public void testTwoWhereQuery() { public void testTwoQueriesExecuted() { Metrics metrics = new Metrics(new EsqlFunctionRegistry()); - Verifier verifier = new Verifier(metrics); + Verifier verifier = new Verifier(metrics, new XPackLicenseState(() -> 0L)); esqlWithVerifier(""" from employees | where languages > 2 @@ -252,7 +253,7 @@ public void testTwoQueriesExecuted() { public void testMultipleFunctions() { Metrics metrics = new Metrics(new EsqlFunctionRegistry()); - Verifier verifier = new Verifier(metrics); + Verifier verifier = new Verifier(metrics, new XPackLicenseState(() -> 0L)); esqlWithVerifier(""" from employees | where languages > 2 @@ -526,7 +527,7 @@ private Counters esql(String esql, Verifier v) { Metrics metrics = null; if (v == null) { metrics = new Metrics(new EsqlFunctionRegistry()); - verifier = new Verifier(metrics); + verifier = new Verifier(metrics, new XPackLicenseState(() -> 0L)); } analyzer(verifier).analyze(parser.createStatement(esql)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java index 82f0ebf316508..c1d94933537f0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java @@ -43,7 +43,9 @@ import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Grok; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.Stat; import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.StatsType; @@ -436,8 +438,9 @@ public void accept(Page page) { } else if (argClass == Integer.class) { return randomInt(); } else if (argClass == JoinType.class) { - return JoinType.LEFT; + return JoinTypes.LEFT; } + if (Expression.class == argClass) { /* * Rather than use any old subclass of expression lets @@ -488,6 +491,15 @@ public void accept(Page page) { if (argClass == Configuration.class) { return randomConfiguration(); } + if (argClass == JoinConfig.class) { + return new JoinConfig( + JoinTypes.LEFT, + List.of(UnresolvedAttributeTests.randomUnresolvedAttribute()), + List.of(UnresolvedAttributeTests.randomUnresolvedAttribute()), + List.of(UnresolvedAttributeTests.randomUnresolvedAttribute()) + ); + } + try { return mock(argClass); } catch (MockitoException e) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/MultiTypeEsFieldTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/MultiTypeEsFieldTests.java index f533c20975aff..987ab103cf80b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/MultiTypeEsFieldTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/MultiTypeEsFieldTests.java @@ -10,14 +10,13 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.test.AbstractWireTestCase; -import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; -import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape; @@ -92,9 +91,8 @@ protected MultiTypeEsField mutateInstance(MultiTypeEsField instance) throws IOEx @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { - List entries = new ArrayList<>(UnaryScalarFunction.getNamedWriteables()); - entries.addAll(Attribute.getNamedWriteables()); - entries.addAll(Expression.getNamedWriteables()); + List entries = new ArrayList<>(ExpressionWritables.allExpressions()); + entries.addAll(ExpressionWritables.unaryScalars()); return new NamedWriteableRegistry(entries); } diff --git a/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/40_secrets_get.yml b/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/40_secrets_get.yml index e74283bc873e3..ab150e41f310a 100644 --- a/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/40_secrets_get.yml +++ b/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/40_secrets_get.yml @@ -3,6 +3,12 @@ fleet.post_secret: body: '{"value": "test secret"}' - set: { id: id } + # search node needs to be available for fleet.get_secret to work in stateless. + # The `.fleet-secrets` index is created on demand, and its search replica starts out unassigned, + # so wait_for_no_uninitialized_shards can miss it. + - do: + cluster.health: + wait_for_active_shards: all - do: fleet.get_secret: id: $id diff --git a/x-pack/plugin/identity-provider/build.gradle b/x-pack/plugin/identity-provider/build.gradle index f3b0def7eee97..f9c121da0f550 100644 --- a/x-pack/plugin/identity-provider/build.gradle +++ b/x-pack/plugin/identity-provider/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.publish' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle b/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle index b109c01181729..cbdb25825623d 100644 --- a/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle +++ b/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { diff --git a/x-pack/plugin/ilm/qa/multi-cluster/build.gradle b/x-pack/plugin/ilm/qa/multi-cluster/build.gradle index 256225c5ef3bf..8bc2967fc63de 100644 --- a/x-pack/plugin/ilm/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/ilm/qa/multi-cluster/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask -import org.elasticsearch.gradle.internal.info.BuildParams import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' diff --git a/x-pack/plugin/ilm/qa/multi-node/build.gradle b/x-pack/plugin/ilm/qa/multi-node/build.gradle index d420ac9effdde..4cd41e58b11ac 100644 --- a/x-pack/plugin/ilm/qa/multi-node/build.gradle +++ b/x-pack/plugin/ilm/qa/multi-node/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/ilm/src/main/java/module-info.java b/x-pack/plugin/ilm/src/main/java/module-info.java index 591c9786247e6..aa24c2d6f333c 100644 --- a/x-pack/plugin/ilm/src/main/java/module-info.java +++ b/x-pack/plugin/ilm/src/main/java/module-info.java @@ -18,6 +18,4 @@ provides org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider with org.elasticsearch.xpack.ilm.ReservedLifecycleStateHandlerProvider; - - provides org.elasticsearch.features.FeatureSpecification with org.elasticsearch.xpack.ilm.IndexLifecycleFeatures; } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java index f41524480e2df..f830a2821d841 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java @@ -146,7 +146,6 @@ public Collection createComponents(PluginServices services) { ILMHistoryTemplateRegistry ilmTemplateRegistry = new ILMHistoryTemplateRegistry( settings, services.clusterService(), - services.featureService(), services.threadPool(), services.client(), services.xContentRegistry() diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleFeatures.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleFeatures.java deleted file mode 100644 index cc78271e2d878..0000000000000 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleFeatures.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ilm; - -import org.elasticsearch.Version; -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.xpack.ilm.history.ILMHistoryTemplateRegistry; - -import java.util.Map; - -public class IndexLifecycleFeatures implements FeatureSpecification { - @Override - public Map getHistoricalFeatures() { - return Map.of(ILMHistoryTemplateRegistry.MANAGED_BY_DATA_STREAM_LIFECYCLE, Version.V_8_12_0); - } -} diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryTemplateRegistry.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryTemplateRegistry.java index 28c28ef6e4c55..5633033e6faa1 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryTemplateRegistry.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryTemplateRegistry.java @@ -8,12 +8,9 @@ package org.elasticsearch.xpack.ilm.history; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ClientHelper; @@ -40,13 +37,11 @@ public class ILMHistoryTemplateRegistry extends IndexTemplateRegistry { // version 6: manage by data stream lifecycle // version 7: version the index template name so we can upgrade existing deployments public static final int INDEX_TEMPLATE_VERSION = 7; - public static final NodeFeature MANAGED_BY_DATA_STREAM_LIFECYCLE = new NodeFeature("ilm-history-managed-by-dsl"); public static final String ILM_TEMPLATE_VERSION_VARIABLE = "xpack.ilm_history.template.version"; public static final String ILM_TEMPLATE_NAME = "ilm-history-" + INDEX_TEMPLATE_VERSION; public static final String ILM_POLICY_NAME = "ilm-history-ilm-policy"; - private final FeatureService featureService; @Override protected boolean requiresMasterNode() { @@ -58,13 +53,11 @@ protected boolean requiresMasterNode() { public ILMHistoryTemplateRegistry( Settings nodeSettings, ClusterService clusterService, - FeatureService featureService, ThreadPool threadPool, Client client, NamedXContentRegistry xContentRegistry ) { super(nodeSettings, clusterService, threadPool, client, xContentRegistry); - this.featureService = featureService; this.ilmHistoryEnabled = LifecycleSettings.LIFECYCLE_HISTORY_INDEX_ENABLED_SETTING.get(nodeSettings); } @@ -104,9 +97,4 @@ protected List getLifecyclePolicies() { protected String getOrigin() { return ClientHelper.INDEX_LIFECYCLE_ORIGIN; } - - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - return featureService.clusterHasFeature(event.state(), MANAGED_BY_DATA_STREAM_LIFECYCLE); - } } diff --git a/x-pack/plugin/ilm/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/x-pack/plugin/ilm/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification deleted file mode 100644 index 1bf03ae25edd2..0000000000000 --- a/x-pack/plugin/ilm/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -# or more contributor license agreements. Licensed under the Elastic License -# 2.0; you may not use this file except in compliance with the Elastic License -# 2.0. -# - -org.elasticsearch.xpack.ilm.IndexLifecycleFeatures diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java index cbdda089e8328..1797f6b10f3cb 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ClusterServiceUtils; @@ -40,7 +39,6 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xpack.ilm.IndexLifecycleFeatures; import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; @@ -80,7 +78,6 @@ public void setup() { ILMHistoryTemplateRegistry registry = new ILMHistoryTemplateRegistry( clusterService.getSettings(), clusterService, - new FeatureService(List.of(new IndexLifecycleFeatures())), threadPool, client, NamedXContentRegistry.EMPTY diff --git a/x-pack/plugin/inference/build.gradle b/x-pack/plugin/inference/build.gradle index 29d5add35ff49..3c19e11a450b4 100644 --- a/x-pack/plugin/inference/build.gradle +++ b/x-pack/plugin/inference/build.gradle @@ -4,7 +4,6 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java index 1fef26989d845..69767ce0b24f0 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java @@ -8,6 +8,9 @@ package org.elasticsearch.xpack.inference; import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseListener; +import org.elasticsearch.common.Strings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService; @@ -16,9 +19,12 @@ import org.junit.Before; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.concurrent.CountDownLatch; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.oneOf; @@ -47,7 +53,6 @@ public void tearDown() throws Exception { @SuppressWarnings("unchecked") public void testInferDeploysDefaultElser() throws IOException { - assumeTrue("Default config requires a feature flag", DefaultElserFeatureFlag.isEnabled()); var model = getModel(ElasticsearchInternalService.DEFAULT_ELSER_ID); assertDefaultElserConfig(model); @@ -78,7 +83,6 @@ private static void assertDefaultElserConfig(Map modelConfig) { @SuppressWarnings("unchecked") public void testInferDeploysDefaultE5() throws IOException { - assumeTrue("Default config requires a feature flag", DefaultElserFeatureFlag.isEnabled()); var model = getModel(ElasticsearchInternalService.DEFAULT_E5_ID); assertDefaultE5Config(model); @@ -110,4 +114,37 @@ private static void assertDefaultE5Config(Map modelConfig) { Matchers.is(Map.of("enabled", true, "min_number_of_allocations", 0, "max_number_of_allocations", 32)) ); } + + public void testMultipleInferencesTriggeringDownloadAndDeploy() throws InterruptedException { + int numParallelRequests = 4; + var latch = new CountDownLatch(numParallelRequests); + var errors = new ArrayList(); + + var listener = new ResponseListener() { + @Override + public void onSuccess(Response response) { + latch.countDown(); + } + + @Override + public void onFailure(Exception exception) { + errors.add(exception); + latch.countDown(); + } + }; + + var inputs = List.of("Hello World", "Goodnight moon"); + var queryParams = Map.of("timeout", "120s"); + for (int i = 0; i < numParallelRequests; i++) { + var request = createInferenceRequest( + Strings.format("_inference/%s", ElasticsearchInternalService.DEFAULT_ELSER_ID), + inputs, + queryParams + ); + client().performRequestAsync(request, listener); + } + + latch.await(); + assertThat(errors.toString(), errors, empty()); + } } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index 6790b9bb14c5a..4e32ef99d06dd 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -373,12 +373,17 @@ protected Map infer(String modelId, TaskType taskType, List inferInternal(String endpoint, List input, Map queryParameters) throws IOException { + protected Request createInferenceRequest(String endpoint, List input, Map queryParameters) { var request = new Request("POST", endpoint); request.setJsonEntity(jsonBody(input)); if (queryParameters.isEmpty() == false) { request.addParameters(queryParameters); } + return request; + } + + private Map inferInternal(String endpoint, List input, Map queryParameters) throws IOException { + var request = createInferenceRequest(endpoint, input, queryParameters); var response = client().performRequest(request); assertOkOrCreated(response); return entityAsMap(response); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 081c83b1e7067..f5773e73f2b22 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -44,18 +44,18 @@ public void testCRUD() throws IOException { } var getAllModels = getAllModels(); - int numModels = DefaultElserFeatureFlag.isEnabled() ? 11 : 9; + int numModels = 11; assertThat(getAllModels, hasSize(numModels)); var getSparseModels = getModels("_all", TaskType.SPARSE_EMBEDDING); - int numSparseModels = DefaultElserFeatureFlag.isEnabled() ? 6 : 5; + int numSparseModels = 6; assertThat(getSparseModels, hasSize(numSparseModels)); for (var sparseModel : getSparseModels) { assertEquals("sparse_embedding", sparseModel.get("task_type")); } var getDenseModels = getModels("_all", TaskType.TEXT_EMBEDDING); - int numDenseModels = DefaultElserFeatureFlag.isEnabled() ? 5 : 4; + int numDenseModels = 5; assertThat(getDenseModels, hasSize(numDenseModels)); for (var denseModel : getDenseModels) { assertEquals("text_embedding", denseModel.get("task_type")); @@ -134,7 +134,8 @@ public void testApisWithoutTaskType() throws IOException { @SuppressWarnings("unchecked") public void testGetServicesWithoutTaskType() throws IOException { List services = getAllServices(); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { assertThat(services.size(), equalTo(18)); } else { assertThat(services.size(), equalTo(17)); @@ -169,7 +170,8 @@ public void testGetServicesWithoutTaskType() throws IOException { "watsonxai" ) ); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { providerList.add(6, "elastic"); } assertArrayEquals(providers, providerList.toArray()); @@ -257,7 +259,8 @@ public void testGetServicesWithCompletionTaskType() throws IOException { public void testGetServicesWithSparseEmbeddingTaskType() throws IOException { List services = getServices(TaskType.SPARSE_EMBEDDING); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { assertThat(services.size(), equalTo(5)); } else { assertThat(services.size(), equalTo(4)); @@ -272,7 +275,8 @@ public void testGetServicesWithSparseEmbeddingTaskType() throws IOException { Arrays.sort(providers); var providerList = new ArrayList<>(Arrays.asList("alibabacloud-ai-search", "elasticsearch", "hugging_face", "test_service")); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { providerList.add(1, "elastic"); } assertArrayEquals(providers, providerList.toArray()); @@ -432,7 +436,7 @@ public void testUnsupportedStream() throws Exception { assertEquals(TaskType.SPARSE_EMBEDDING.toString(), singleModel.get("task_type")); try { - var events = streamInferOnMockService(modelId, TaskType.SPARSE_EMBEDDING, List.of(randomAlphaOfLength(10))); + var events = streamInferOnMockService(modelId, TaskType.SPARSE_EMBEDDING, List.of(randomUUID())); assertThat(events.size(), equalTo(2)); events.forEach(event -> { switch (event.name()) { @@ -457,7 +461,7 @@ public void testSupportedStream() throws Exception { assertEquals(modelId, singleModel.get("inference_id")); assertEquals(TaskType.COMPLETION.toString(), singleModel.get("task_type")); - var input = IntStream.range(1, 2 + randomInt(8)).mapToObj(i -> randomAlphaOfLength(10)).toList(); + var input = IntStream.range(1, 2 + randomInt(8)).mapToObj(i -> randomUUID()).toList(); try { var events = streamInferOnMockService(modelId, TaskType.COMPLETION, input); diff --git a/x-pack/plugin/inference/qa/mixed-cluster/build.gradle b/x-pack/plugin/inference/qa/mixed-cluster/build.gradle index c05e71fa1cd55..0f8c732154e85 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/build.gradle +++ b/x-pack/plugin/inference/qa/mixed-cluster/build.gradle @@ -1,6 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/build.gradle b/x-pack/plugin/inference/qa/rolling-upgrade/build.gradle index bfaff7c84d9ad..214d775b46236 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/build.gradle +++ b/x-pack/plugin/inference/qa/rolling-upgrade/build.gradle @@ -5,7 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 54d83af8f5d95..3b0fc869c8124 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.delete.DeleteRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -30,8 +31,10 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.Locale; import java.util.Map; +import java.util.Set; import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.randomSemanticTextInput; import static org.hamcrest.Matchers.equalTo; @@ -87,30 +90,38 @@ public void testBulkOperations() throws Exception { int totalBulkReqs = randomIntBetween(2, 100); long totalDocs = 0; + Set ids = new HashSet<>(); for (int bulkReqs = 0; bulkReqs < totalBulkReqs; bulkReqs++) { BulkRequestBuilder bulkReqBuilder = client().prepareBulk(); int totalBulkSize = randomIntBetween(1, 100); for (int bulkSize = 0; bulkSize < totalBulkSize; bulkSize++) { - String id = Long.toString(totalDocs); + if (ids.size() > 0 && rarely(random())) { + String id = randomFrom(ids); + ids.remove(id); + DeleteRequestBuilder request = new DeleteRequestBuilder(client(), INDEX_NAME).setId(id); + bulkReqBuilder.add(request); + continue; + } + String id = Long.toString(totalDocs++); boolean isIndexRequest = randomBoolean(); Map source = new HashMap<>(); source.put("sparse_field", isIndexRequest && rarely() ? null : randomSemanticTextInput()); source.put("dense_field", isIndexRequest && rarely() ? null : randomSemanticTextInput()); if (isIndexRequest) { bulkReqBuilder.add(new IndexRequestBuilder(client()).setIndex(INDEX_NAME).setId(id).setSource(source)); - totalDocs++; + ids.add(id); } else { boolean isUpsert = randomBoolean(); UpdateRequestBuilder request = new UpdateRequestBuilder(client()).setIndex(INDEX_NAME).setDoc(source); - if (isUpsert || totalDocs == 0) { + if (isUpsert || ids.size() == 0) { request.setDocAsUpsert(true); - totalDocs++; } else { // Update already existing document - id = Long.toString(randomLongBetween(0, totalDocs - 1)); + id = randomFrom(ids); } request.setId(id); bulkReqBuilder.add(request); + ids.add(id); } } BulkResponse bulkResponse = bulkReqBuilder.get(); @@ -135,7 +146,7 @@ public void testBulkOperations() throws Exception { SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().size(0).trackTotalHits(true); SearchResponse searchResponse = client().search(new SearchRequest(INDEX_NAME).source(sourceBuilder)).get(); try { - assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(totalDocs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) ids.size())); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/DefaultElserFeatureFlag.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/DefaultElserFeatureFlag.java deleted file mode 100644 index 2a764dabd62ae..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/DefaultElserFeatureFlag.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference; - -import org.elasticsearch.common.util.FeatureFlag; - -public class DefaultElserFeatureFlag { - - private DefaultElserFeatureFlag() {} - - private static final FeatureFlag FEATURE_FLAG = new FeatureFlag("inference_default_elser"); - - public static boolean isEnabled() { - return FEATURE_FLAG.isEnabled(); - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index 10ffedef14e26..c82f287792a7c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.inference.rank.random.RandomRankRetrieverBuilder; import org.elasticsearch.xpack.inference.rank.textsimilarity.TextSimilarityRankRetrieverBuilder; -import java.util.HashSet; import java.util.Set; /** @@ -24,23 +23,24 @@ public class InferenceFeatures implements FeatureSpecification { @Override public Set getFeatures() { - var features = new HashSet(); - features.add(TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED); - features.add(RandomRankRetrieverBuilder.RANDOM_RERANKER_RETRIEVER_SUPPORTED); - features.add(SemanticTextFieldMapper.SEMANTIC_TEXT_SEARCH_INFERENCE_ID); - features.add(SemanticQueryBuilder.SEMANTIC_TEXT_INNER_HITS); - features.add(TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_COMPOSITION_SUPPORTED); - if (DefaultElserFeatureFlag.isEnabled()) { - features.add(SemanticTextFieldMapper.SEMANTIC_TEXT_DEFAULT_ELSER_2); - } - return Set.copyOf(features); + return Set.of( + TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED, + RandomRankRetrieverBuilder.RANDOM_RERANKER_RETRIEVER_SUPPORTED, + SemanticTextFieldMapper.SEMANTIC_TEXT_SEARCH_INFERENCE_ID, + SemanticQueryBuilder.SEMANTIC_TEXT_INNER_HITS, + SemanticTextFieldMapper.SEMANTIC_TEXT_DEFAULT_ELSER_2, + TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_COMPOSITION_SUPPORTED + ); } @Override public Set getTestFeatures() { return Set.of( SemanticTextFieldMapper.SEMANTIC_TEXT_IN_OBJECT_FIELD_FIX, - SemanticTextFieldMapper.SEMANTIC_TEXT_SINGLE_FIELD_UPDATE_FIX + SemanticTextFieldMapper.SEMANTIC_TEXT_SINGLE_FIELD_UPDATE_FIX, + SemanticTextFieldMapper.SEMANTIC_TEXT_DELETE_FIX, + SemanticTextFieldMapper.SEMANTIC_TEXT_ZERO_SIZE_FIX, + SemanticTextFieldMapper.SEMANTIC_TEXT_ALWAYS_EMIT_INFERENCE_ID_FIX ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index 02bddb6076d69..2320cca8295d1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -63,6 +63,7 @@ import org.elasticsearch.xpack.inference.services.elasticsearch.CustomElandInternalServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.CustomElandInternalTextEmbeddingServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.CustomElandRerankTaskSettings; +import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticRerankerServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElserInternalServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElserMlNodeTaskSettings; @@ -415,7 +416,13 @@ private static void addInternalNamedWriteables(List namedWriteables) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index f068caff805af..48458bf4f5086 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.inference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -91,7 +93,6 @@ import org.elasticsearch.xpack.inference.services.cohere.CohereService; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceComponents; -import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService; import org.elasticsearch.xpack.inference.services.googleaistudio.GoogleAiStudioService; @@ -113,6 +114,9 @@ import java.util.stream.Stream; import static java.util.Collections.singletonList; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG; public class InferencePlugin extends Plugin implements ActionPlugin, ExtensiblePlugin, SystemIndexPlugin, MapperPlugin, SearchPlugin { @@ -135,11 +139,13 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP public static final String NAME = "inference"; public static final String UTILITY_THREAD_POOL_NAME = "inference_utility"; + private static final Logger log = LogManager.getLogger(InferencePlugin.class); + private final Settings settings; private final SetOnce httpFactory = new SetOnce<>(); private final SetOnce amazonBedrockFactory = new SetOnce<>(); private final SetOnce serviceComponents = new SetOnce<>(); - private final SetOnce eisComponents = new SetOnce<>(); + private final SetOnce elasticInferenceServiceComponents = new SetOnce<>(); private final SetOnce inferenceServiceRegistry = new SetOnce<>(); private final SetOnce shardBulkInferenceActionFilter = new SetOnce<>(); private List inferenceServiceExtensions; @@ -207,12 +213,35 @@ public Collection createComponents(PluginServices services) { var inferenceServices = new ArrayList<>(inferenceServiceExtensions); inferenceServices.add(this::getInferenceServiceFactories); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { - ElasticInferenceServiceSettings eisSettings = new ElasticInferenceServiceSettings(settings); - eisComponents.set(new ElasticInferenceServiceComponents(eisSettings.getEisGatewayUrl())); + // Set elasticInferenceUrl based on feature flags to support transitioning to the new Elastic Inference Service URL without exposing + // internal names like "eis" or "gateway". + ElasticInferenceServiceSettings inferenceServiceSettings = new ElasticInferenceServiceSettings(settings); + + String elasticInferenceUrl = null; + + if (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + elasticInferenceUrl = inferenceServiceSettings.getElasticInferenceServiceUrl(); + } else if (DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + log.warn( + "Deprecated flag {} detected for enabling {}. Please use {}.", + ELASTIC_INFERENCE_SERVICE_IDENTIFIER, + DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG, + ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG + ); + elasticInferenceUrl = inferenceServiceSettings.getEisGatewayUrl(); + } + + if (elasticInferenceUrl != null) { + elasticInferenceServiceComponents.set(new ElasticInferenceServiceComponents(elasticInferenceUrl)); inferenceServices.add( - () -> List.of(context -> new ElasticInferenceService(httpFactory.get(), serviceComponents.get(), eisComponents.get())) + () -> List.of( + context -> new ElasticInferenceService( + httpFactory.get(), + serviceComponents.get(), + elasticInferenceServiceComponents.get() + ) + ) ); } @@ -227,10 +256,8 @@ public Collection createComponents(PluginServices services) { // reference correctly var registry = new InferenceServiceRegistry(inferenceServices, factoryContext); registry.init(services.client()); - if (DefaultElserFeatureFlag.isEnabled()) { - for (var service : registry.getServices().values()) { - service.defaultConfigIds().forEach(modelRegistry::addDefaultIds); - } + for (var service : registry.getServices().values()) { + service.defaultConfigIds().forEach(modelRegistry::addDefaultIds); } inferenceServiceRegistry.set(registry); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java index b3bbe3a7df9bc..dd59230e575c4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java @@ -413,8 +413,8 @@ private void applyInferenceResponses(BulkItemRequest item, FieldInferenceRespons */ private Map> createFieldInferenceRequests(BulkShardRequest bulkShardRequest) { Map> fieldRequestsMap = new LinkedHashMap<>(); - int itemIndex = 0; - for (var item : bulkShardRequest.items()) { + for (int itemIndex = 0; itemIndex < bulkShardRequest.items().length; itemIndex++) { + var item = bulkShardRequest.items()[itemIndex]; if (item.getPrimaryResponse() != null) { // item was already aborted/processed by a filter in the chain upstream (e.g. security) continue; @@ -441,6 +441,7 @@ private Map> createFieldInferenceRequests(Bu // ignore delete request continue; } + final Map docMap = indexRequest.sourceAsMap(); for (var entry : fieldInferenceMap.values()) { String field = entry.getName(); @@ -483,7 +484,6 @@ private Map> createFieldInferenceRequests(Bu } } } - itemIndex++; } return fieldRequestsMap; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java index bbc5082d45004..b74e473155aec 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java @@ -28,6 +28,14 @@ * * By setting the accumulated tokens limit to a value greater than zero, it effectively allows bursts of traffic. If the accumulated * tokens limit is set to zero, it will force the acquiring thread to wait on each call. + * + * Example: + * Time unit: Second + * Tokens to produce per time unit: 10 + * Limit for tokens in bucket: 100 + * + * Tokens in bucket after n seconds (n second -> tokens in bucket): + * 1 sec -> 10 tokens, 2 sec -> 20 tokens, ... , 10 sec -> 100 tokens (bucket full), ... 200 sec -> 100 tokens (no increase in tokens) */ public class RateLimiter { @@ -76,6 +84,7 @@ public final synchronized void setRate(double newAccumulatedTokensLimit, double throw new IllegalArgumentException(Strings.format("Tokens per time unit must be less than or equal to %s", Double.MAX_VALUE)); } + // If the new token limit is smaller than what we've accumulated already we need to drop tokens to meet the new token limit accumulatedTokens = Math.min(accumulatedTokens, newAccumulatedTokensLimit); accumulatedTokensLimit = newAccumulatedTokensLimit; @@ -88,7 +97,8 @@ public final synchronized void setRate(double newAccumulatedTokensLimit, double } /** - * Causes the thread to wait until the tokens are available + * Causes the thread to wait until the tokens are available. + * This reserves token in advance leading to a reduction of accumulated tokens. * @param tokens the number of items of work that should be throttled, typically you'd pass a value of 1 here * @throws InterruptedException _ */ @@ -130,6 +140,7 @@ private static void validateTokenRequest(int tokens) { /** * Returns the amount of time to wait for the tokens to become available. + * This reserves tokens in advance leading to a reduction of accumulated tokens. * @param tokens the number of items of work that should be throttled, typically you'd pass a value of 1 here. Must be greater than 0. * @return the amount of time to wait */ diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java index c8ada6e535b63..fa096901ed67a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java @@ -15,9 +15,11 @@ import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSparseEmbeddingsModel; import org.elasticsearch.xpack.inference.telemetry.TraceContext; +import java.util.Locale; import java.util.Objects; import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; public class ElasticInferenceServiceActionCreator implements ElasticInferenceServiceActionVisitor { @@ -36,7 +38,10 @@ public ElasticInferenceServiceActionCreator(Sender sender, ServiceComponents ser @Override public ExecutableAction create(ElasticInferenceServiceSparseEmbeddingsModel model) { var requestManager = new ElasticInferenceServiceSparseEmbeddingsRequestManager(model, serviceComponents, traceContext); - var errorMessage = constructFailedToSendRequestMessage(model.uri(), "Elastic Inference Service sparse embeddings"); + var errorMessage = constructFailedToSendRequestMessage( + model.uri(), + String.format(Locale.ROOT, "%s sparse embeddings", ELASTIC_INFERENCE_SERVICE_IDENTIFIER) + ); return new SenderExecutableAction(sender, requestManager, errorMessage); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java index e7ee41525f07d..bf3409888aaf8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java @@ -22,9 +22,11 @@ import org.elasticsearch.xpack.inference.telemetry.TraceContext; import java.util.List; +import java.util.Locale; import java.util.function.Supplier; import static org.elasticsearch.xpack.inference.common.Truncator.truncate; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; public class ElasticInferenceServiceSparseEmbeddingsRequestManager extends ElasticInferenceServiceRequestManager { @@ -40,7 +42,7 @@ public class ElasticInferenceServiceSparseEmbeddingsRequestManager extends Elast private static ResponseHandler createSparseEmbeddingsHandler() { return new ElasticInferenceServiceResponseHandler( - "Elastic Inference Service sparse embeddings", + String.format(Locale.ROOT, "%s sparse embeddings", ELASTIC_INFERENCE_SERVICE_IDENTIFIER), ElasticInferenceServiceSparseEmbeddingsResponseEntity::fromResponse ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceSparseEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceSparseEmbeddingsResponseEntity.java index 2b36cc5d22cd4..42ca45f75a9c0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceSparseEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceSparseEmbeddingsResponseEntity.java @@ -33,7 +33,7 @@ public class ElasticInferenceServiceSparseEmbeddingsResponseEntity { "Failed to find required field [%s] in Elastic Inference Service embeddings response"; /** - * Parses the EIS json response. + * Parses the Elastic Inference Service json response. * * For a request like: * diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java index 24946ee5875a5..78673277797d2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java @@ -30,6 +30,8 @@ public class GoogleVertexAiRerankResponseEntity { private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in Google Vertex AI rerank response"; + private static final String INVALID_ID_FIELD_FORMAT_TEMPLATE = "Expected numeric value for record ID field in Google Vertex AI rerank " + + "response but received [%s]"; /** * Parses the Google Vertex AI rerank response. @@ -109,14 +111,27 @@ private static List doParse(XContentParser parser) throw new IllegalStateException(format(FAILED_TO_FIND_FIELD_TEMPLATE, RankedDoc.SCORE.getPreferredName())); } - return new RankedDocsResults.RankedDoc(index, parsedRankedDoc.score, parsedRankedDoc.content); + if (parsedRankedDoc.id == null) { + throw new IllegalStateException(format(FAILED_TO_FIND_FIELD_TEMPLATE, RankedDoc.ID.getPreferredName())); + } + + try { + return new RankedDocsResults.RankedDoc( + Integer.parseInt(parsedRankedDoc.id), + parsedRankedDoc.score, + parsedRankedDoc.content + ); + } catch (NumberFormatException e) { + throw new IllegalStateException(format(INVALID_ID_FIELD_FORMAT_TEMPLATE, parsedRankedDoc.id)); + } }); } - private record RankedDoc(@Nullable Float score, @Nullable String content) { + private record RankedDoc(@Nullable Float score, @Nullable String content, @Nullable String id) { private static final ParseField CONTENT = new ParseField("content"); private static final ParseField SCORE = new ParseField("score"); + private static final ParseField ID = new ParseField("id"); private static final ObjectParser PARSER = new ObjectParser<>( "google_vertex_ai_rerank_response", true, @@ -126,6 +141,7 @@ private record RankedDoc(@Nullable Float score, @Nullable String content) { static { PARSER.declareString(Builder::setContent, CONTENT); PARSER.declareFloat(Builder::setScore, SCORE); + PARSER.declareString(Builder::setId, ID); } public static RankedDoc parse(XContentParser parser) { @@ -137,6 +153,7 @@ private static final class Builder { private String content; private Float score; + private String id; private Builder() {} @@ -150,8 +167,13 @@ public Builder setContent(String content) { return this; } + public Builder setId(String id) { + this.id = id; + return this; + } + public RankedDoc build() { - return new RankedDoc(score, content); + return new RankedDoc(score, content, id); } } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index f0cb612c9082f..3744bf2a6dbed 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -57,7 +57,6 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; -import org.elasticsearch.xpack.inference.DefaultElserFeatureFlag; import java.io.IOException; import java.util.ArrayList; @@ -70,6 +69,7 @@ import java.util.Set; import java.util.function.Function; +import static org.elasticsearch.search.SearchService.DEFAULT_SIZE; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKED_EMBEDDINGS_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKED_TEXT_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKS_FIELD; @@ -90,8 +90,12 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie public static final NodeFeature SEMANTIC_TEXT_SEARCH_INFERENCE_ID = new NodeFeature("semantic_text.search_inference_id"); public static final NodeFeature SEMANTIC_TEXT_DEFAULT_ELSER_2 = new NodeFeature("semantic_text.default_elser_2"); public static final NodeFeature SEMANTIC_TEXT_IN_OBJECT_FIELD_FIX = new NodeFeature("semantic_text.in_object_field_fix"); - public static final NodeFeature SEMANTIC_TEXT_SINGLE_FIELD_UPDATE_FIX = new NodeFeature("semantic_text.single_field_update_fix"); + public static final NodeFeature SEMANTIC_TEXT_DELETE_FIX = new NodeFeature("semantic_text.delete_fix"); + public static final NodeFeature SEMANTIC_TEXT_ZERO_SIZE_FIX = new NodeFeature("semantic_text.zero_size_fix"); + public static final NodeFeature SEMANTIC_TEXT_ALWAYS_EMIT_INFERENCE_ID_FIX = new NodeFeature( + "semantic_text.always_emit_inference_id_fix" + ); public static final String CONTENT_TYPE = "semantic_text"; public static final String DEFAULT_ELSER_2_INFERENCE_ID = DEFAULT_ELSER_ID; @@ -111,18 +115,14 @@ public static class Builder extends FieldMapper.Builder { INFERENCE_ID_FIELD, false, mapper -> ((SemanticTextFieldType) mapper.fieldType()).inferenceId, - DefaultElserFeatureFlag.isEnabled() ? DEFAULT_ELSER_2_INFERENCE_ID : null + DEFAULT_ELSER_2_INFERENCE_ID ).addValidator(v -> { if (Strings.isEmpty(v)) { - // If the default ELSER feature flag is enabled, the only way we get here is if the user explicitly sets the param to an - // empty value. However, if the feature flag is disabled, we can get here if the user didn't set the param. - // Adjust the error message appropriately. - String message = DefaultElserFeatureFlag.isEnabled() - ? "[" + INFERENCE_ID_FIELD + "] on mapper [" + leafName() + "] of type [" + CONTENT_TYPE + "] must not be empty" - : "[" + INFERENCE_ID_FIELD + "] on mapper [" + leafName() + "] of type [" + CONTENT_TYPE + "] must be specified"; - throw new IllegalArgumentException(message); + throw new IllegalArgumentException( + "[" + INFERENCE_ID_FIELD + "] on mapper [" + leafName() + "] of type [" + CONTENT_TYPE + "] must not be empty" + ); } - }); + }).alwaysSerialize(); private final Parameter searchInferenceId = Parameter.stringParam( SEARCH_INFERENCE_ID_FIELD, @@ -512,7 +512,7 @@ public boolean fieldHasValue(FieldInfos fieldInfos) { return fieldInfos.fieldInfo(getEmbeddingsFieldName(name())) != null; } - public QueryBuilder semanticQuery(InferenceResults inferenceResults, float boost, String queryName) { + public QueryBuilder semanticQuery(InferenceResults inferenceResults, Integer requestSize, float boost, String queryName) { String nestedFieldPath = getChunksFieldName(name()); String inferenceResultsFieldName = getEmbeddingsFieldName(name()); QueryBuilder childQueryBuilder; @@ -556,7 +556,13 @@ public QueryBuilder semanticQuery(InferenceResults inferenceResults, float boost ); } - yield new KnnVectorQueryBuilder(inferenceResultsFieldName, inference, null, null, null); + Integer k = requestSize; + if (k != null) { + // Ensure that k is at least the default size so that aggregations work when size is set to 0 in the request + k = Math.max(k, DEFAULT_SIZE); + } + + yield new KnnVectorQueryBuilder(inferenceResultsFieldName, inference, k, null, null); } default -> throw new IllegalStateException( "Field [" diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java index 478f2e6a21868..d648db2fbfdbc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java @@ -170,7 +170,7 @@ private QueryBuilder doRewriteBuildSemanticQuery(SearchExecutionContext searchEx ); } - return semanticTextFieldType.semanticQuery(inferenceResults, boost(), queryName()); + return semanticTextFieldType.semanticQuery(inferenceResults, searchExecutionContext.requestSize(), boost(), queryName()); } else { throw new IllegalArgumentException( "Field [" + fieldName + "] of type [" + fieldType.typeName() + "] does not support " + NAME + " queries" diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java index 967ad4b46dcb3..83b2a8a0f5182 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java @@ -15,10 +15,7 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; -import org.elasticsearch.xpack.inference.DefaultElserFeatureFlag; -import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Set; @@ -69,11 +66,6 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient @Override public Set supportedCapabilities() { - Set capabilities = new HashSet<>(); - if (DefaultElserFeatureFlag.isEnabled()) { - capabilities.add(DEFAULT_ELSER_2_CAPABILITY); - } - - return Collections.unmodifiableSet(capabilities); + return Set.of(DEFAULT_ELSER_2_CAPABILITY); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java index 98429ed3d001d..e7ce5903163d4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java @@ -62,6 +62,7 @@ public class ElasticInferenceService extends SenderService { public static final String NAME = "elastic"; + public static final String ELASTIC_INFERENCE_SERVICE_IDENTIFIER = "Elastic Inference Service"; private final ElasticInferenceServiceComponents elasticInferenceServiceComponents; @@ -70,10 +71,10 @@ public class ElasticInferenceService extends SenderService { public ElasticInferenceService( HttpRequestSender.Factory factory, ServiceComponents serviceComponents, - ElasticInferenceServiceComponents eisComponents + ElasticInferenceServiceComponents elasticInferenceServiceComponents ) { super(factory, serviceComponents); - this.elasticInferenceServiceComponents = eisComponents; + this.elasticInferenceServiceComponents = elasticInferenceServiceComponents; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java index 4386964e927d2..c5b2cb693df13 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java @@ -7,4 +7,4 @@ package org.elasticsearch.xpack.inference.services.elastic; -public record ElasticInferenceServiceComponents(String eisGatewayUrl) {} +public record ElasticInferenceServiceComponents(String elasticInferenceServiceUrl) {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java index b0fb6d14ee6f7..324c20d0e48bf 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java @@ -15,6 +15,8 @@ */ public class ElasticInferenceServiceFeature { - public static final FeatureFlag ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG = new FeatureFlag("eis"); + @Deprecated + public static final FeatureFlag DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG = new FeatureFlag("eis"); + public static final FeatureFlag ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG = new FeatureFlag("elastic_inference_service"); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java index 8525710c6cf23..bc2daddc2a346 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java @@ -14,20 +14,37 @@ public class ElasticInferenceServiceSettings { + @Deprecated static final Setting EIS_GATEWAY_URL = Setting.simpleString("xpack.inference.eis.gateway.url", Setting.Property.NodeScope); + static final Setting ELASTIC_INFERENCE_SERVICE_URL = Setting.simpleString( + "xpack.inference.elastic.url", + Setting.Property.NodeScope + ); + // Adjust this variable to be volatile, if the setting can be updated at some point in time + @Deprecated private final String eisGatewayUrl; + private final String elasticInferenceServiceUrl; + public ElasticInferenceServiceSettings(Settings settings) { eisGatewayUrl = EIS_GATEWAY_URL.get(settings); + elasticInferenceServiceUrl = ELASTIC_INFERENCE_SERVICE_URL.get(settings); + } public static List> getSettingsDefinitions() { - return List.of(EIS_GATEWAY_URL); + return List.of(EIS_GATEWAY_URL, ELASTIC_INFERENCE_SERVICE_URL); } + @Deprecated public String getEisGatewayUrl() { return eisGatewayUrl; } + + public String getElasticInferenceServiceUrl() { + return elasticInferenceServiceUrl; + } + } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java index bbbae736dbeb9..cc69df86933de 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java @@ -22,8 +22,11 @@ import java.net.URI; import java.net.URISyntaxException; +import java.util.Locale; import java.util.Map; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; + public class ElasticInferenceServiceSparseEmbeddingsModel extends ElasticInferenceServiceModel { private final URI uri; @@ -105,9 +108,13 @@ private URI createUri() throws URISyntaxException { switch (modelId) { case ElserModels.ELSER_V2_MODEL -> modelIdUriPath = "ELSERv2"; - default -> throw new IllegalArgumentException("Unsupported model for EIS [" + modelId + "]"); + default -> throw new IllegalArgumentException( + String.format(Locale.ROOT, "Unsupported model for %s [%s]", ELASTIC_INFERENCE_SERVICE_IDENTIFIER, modelId) + ); } - return new URI(elasticInferenceServiceComponents().eisGatewayUrl() + "/sparse-text-embedding/" + modelIdUriPath); + return new URI( + elasticInferenceServiceComponents().elasticInferenceServiceUrl() + "/api/v1/sparse-text-embedding/" + modelIdUriPath + ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java index 922b366498c27..f743b94df3810 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java @@ -35,7 +35,6 @@ import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigUpdate; import org.elasticsearch.xpack.core.ml.utils.MlPlatformArchitecturesUtil; -import org.elasticsearch.xpack.inference.DefaultElserFeatureFlag; import org.elasticsearch.xpack.inference.InferencePlugin; import java.io.IOException; @@ -157,6 +156,8 @@ protected void putModel(Model model, ActionListener listener) { putBuiltInModel(e5Model.getServiceSettings().modelId(), listener); } else if (model instanceof ElserInternalModel elserModel) { putBuiltInModel(elserModel.getServiceSettings().modelId(), listener); + } else if (model instanceof ElasticRerankerModel elasticRerankerModel) { + putBuiltInModel(elasticRerankerModel.getServiceSettings().modelId(), listener); } else if (model instanceof CustomElandModel) { logger.info("Custom eland model detected, model must have been already loaded into the cluster with eland."); listener.onResponse(Boolean.TRUE); @@ -296,11 +297,6 @@ protected void maybeStartDeployment( InferModelAction.Request request, ActionListener listener ) { - if (DefaultElserFeatureFlag.isEnabled() == false) { - listener.onFailure(e); - return; - } - if (isDefaultId(model.getInferenceEntityId()) && ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { this.start(model, request.getInferenceTimeout(), listener.delegateFailureAndWrap((l, started) -> { client.execute(InferModelAction.INSTANCE, request, listener); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java index b710b24cbda31..b76de5eeedbfc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java @@ -7,14 +7,9 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.ChunkingSettings; -import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; public class CustomElandModel extends ElasticsearchInternalModel { @@ -39,31 +34,10 @@ public CustomElandModel( } @Override - public ActionListener getCreateTrainedModelAssignmentActionListener( - Model model, - ActionListener listener - ) { - - return new ActionListener<>() { - @Override - public void onResponse(CreateTrainedModelAssignmentAction.Response response) { - listener.onResponse(Boolean.TRUE); - } - - @Override - public void onFailure(Exception e) { - if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { - listener.onFailure( - new ResourceNotFoundException( - "Could not start the inference as the custom eland model [{0}] for this platform cannot be found." - + " Custom models need to be loaded into the cluster with eland before they can be started.", - internalServiceSettings.modelId() - ) - ); - return; - } - listener.onFailure(e); - } - }; + protected String modelNotFoundErrorMessage(String modelId) { + return "Could not deploy model [" + + modelId + + "] as the model cannot be found." + + " Custom models need to be loaded into the cluster with Eland before they can be started."; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java index 724c7a8f0a166..ce6c6258d0393 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java @@ -36,6 +36,11 @@ public StartTrainedModelDeploymentAction.Request getStartTrainedModelDeploymentA throw new IllegalStateException("cannot start model that uses an existing deployment"); } + @Override + protected String modelNotFoundErrorMessage(String modelId) { + throw new IllegalStateException("cannot start model [" + modelId + "] that uses an existing deployment"); + } + @Override public ActionListener getCreateTrainedModelAssignmentActionListener( Model model, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerModel.java new file mode 100644 index 0000000000000..115cc9f05599a --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerModel.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elasticsearch; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.ChunkingSettings; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +public class ElasticRerankerModel extends ElasticsearchInternalModel { + + public ElasticRerankerModel( + String inferenceEntityId, + TaskType taskType, + String service, + ElasticRerankerServiceSettings serviceSettings, + ChunkingSettings chunkingSettings + ) { + super(inferenceEntityId, taskType, service, serviceSettings, chunkingSettings); + } + + @Override + public ElasticRerankerServiceSettings getServiceSettings() { + return (ElasticRerankerServiceSettings) super.getServiceSettings(); + } + + @Override + public ActionListener getCreateTrainedModelAssignmentActionListener( + Model model, + ActionListener listener + ) { + + return new ActionListener<>() { + @Override + public void onResponse(CreateTrainedModelAssignmentAction.Response response) { + listener.onResponse(Boolean.TRUE); + } + + @Override + public void onFailure(Exception e) { + if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { + listener.onFailure( + new ResourceNotFoundException("Could not start the Elastic Reranker Endpoint due to [{}]", e, e.getMessage()) + ); + return; + } + listener.onFailure(e); + } + }; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java new file mode 100644 index 0000000000000..316dc092e03c7 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elasticsearch; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; + +import java.io.IOException; +import java.util.Map; + +public class ElasticRerankerServiceSettings extends ElasticsearchInternalServiceSettings { + + public static final String NAME = "elastic_reranker_service_settings"; + + public ElasticRerankerServiceSettings(ElasticsearchInternalServiceSettings other) { + super(other); + } + + public ElasticRerankerServiceSettings( + Integer numAllocations, + int numThreads, + String modelId, + AdaptiveAllocationsSettings adaptiveAllocationsSettings + ) { + super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings); + } + + public ElasticRerankerServiceSettings(StreamInput in) throws IOException { + super(in); + } + + /** + * Parse the ElasticRerankerServiceSettings from map and validate the setting values. + * + * If required setting are missing or the values are invalid an + * {@link ValidationException} is thrown. + * + * @param map Source map containing the config + * @return The builder + */ + public static Builder fromRequestMap(Map map) { + ValidationException validationException = new ValidationException(); + var baseSettings = ElasticsearchInternalServiceSettings.fromMap(map, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return baseSettings; + } + + @Override + public String getWriteableName() { + return ElasticRerankerServiceSettings.NAME; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java index 2405243f302bc..aa12bf0c645c3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java @@ -7,6 +7,9 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Strings; import org.elasticsearch.core.TimeValue; @@ -15,8 +18,10 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import static org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus.State.STARTED; @@ -79,10 +84,38 @@ public StartTrainedModelDeploymentAction.Request getStartTrainedModelDeploymentA return startRequest; } - public abstract ActionListener getCreateTrainedModelAssignmentActionListener( + public ActionListener getCreateTrainedModelAssignmentActionListener( Model model, ActionListener listener - ); + ) { + return new ActionListener<>() { + @Override + public void onResponse(CreateTrainedModelAssignmentAction.Response response) { + listener.onResponse(Boolean.TRUE); + } + + @Override + public void onFailure(Exception e) { + var cause = ExceptionsHelper.unwrapCause(e); + if (cause instanceof ResourceNotFoundException) { + listener.onFailure(new ResourceNotFoundException(modelNotFoundErrorMessage(internalServiceSettings.modelId()))); + return; + } else if (cause instanceof ElasticsearchStatusException statusException) { + if (statusException.status() == RestStatus.CONFLICT + && statusException.getRootCause() instanceof ResourceAlreadyExistsException) { + // Deployment is already started + listener.onResponse(Boolean.TRUE); + } + return; + } + listener.onFailure(e); + } + }; + } + + protected String modelNotFoundErrorMessage(String modelId) { + return "Could not deploy model [" + modelId + "] as the model cannot be found."; + } public boolean usesExistingDeployment() { return internalServiceSettings.getDeploymentId() != null; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index fe83acc8574aa..718aeae979fe9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -97,6 +97,8 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 ); + public static final String RERANKER_ID = ".rerank-v1"; + public static final int EMBEDDING_MAX_BATCH_SIZE = 10; public static final String DEFAULT_ELSER_ID = ".elser-2-elasticsearch"; public static final String DEFAULT_E5_ID = ".multilingual-e5-small-elasticsearch"; @@ -223,6 +225,8 @@ public void parseRequestConfig( ) ) ); + } else if (RERANKER_ID.equals(modelId)) { + rerankerCase(inferenceEntityId, taskType, config, serviceSettingsMap, chunkingSettings, modelListener); } else { customElandCase(inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, chunkingSettings, modelListener); } @@ -323,6 +327,31 @@ private static CustomElandInternalServiceSettings elandServiceSettings( }; } + private void rerankerCase( + String inferenceEntityId, + TaskType taskType, + Map config, + Map serviceSettingsMap, + ChunkingSettings chunkingSettings, + ActionListener modelListener + ) { + + var esServiceSettingsBuilder = ElasticsearchInternalServiceSettings.fromRequestMap(serviceSettingsMap); + + throwIfNotEmptyMap(config, name()); + throwIfNotEmptyMap(serviceSettingsMap, name()); + + modelListener.onResponse( + new ElasticRerankerModel( + inferenceEntityId, + taskType, + NAME, + new ElasticRerankerServiceSettings(esServiceSettingsBuilder.build()), + chunkingSettings + ) + ); + } + private void e5Case( String inferenceEntityId, TaskType taskType, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModel.java index 8d2f59171a601..2594f18db3fb5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModel.java @@ -7,13 +7,8 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.ChunkingSettings; -import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; public class ElserInternalModel extends ElasticsearchInternalModel { @@ -37,31 +32,4 @@ public ElserInternalServiceSettings getServiceSettings() { public ElserMlNodeTaskSettings getTaskSettings() { return (ElserMlNodeTaskSettings) super.getTaskSettings(); } - - @Override - public ActionListener getCreateTrainedModelAssignmentActionListener( - Model model, - ActionListener listener - ) { - return new ActionListener<>() { - @Override - public void onResponse(CreateTrainedModelAssignmentAction.Response response) { - listener.onResponse(Boolean.TRUE); - } - - @Override - public void onFailure(Exception e) { - if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { - listener.onFailure( - new ResourceNotFoundException( - "Could not start the ELSER service as the ELSER model for this platform cannot be found." - + " ELSER needs to be downloaded before it can be started." - ) - ); - return; - } - listener.onFailure(e); - } - }; - } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallModel.java index fee00d04d940b..2dcf91140c995 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallModel.java @@ -7,13 +7,8 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.ChunkingSettings; -import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; public class MultilingualE5SmallModel extends ElasticsearchInternalModel { @@ -31,34 +26,4 @@ public MultilingualE5SmallModel( public MultilingualE5SmallInternalServiceSettings getServiceSettings() { return (MultilingualE5SmallInternalServiceSettings) super.getServiceSettings(); } - - @Override - public ActionListener getCreateTrainedModelAssignmentActionListener( - Model model, - ActionListener listener - ) { - - return new ActionListener<>() { - @Override - public void onResponse(CreateTrainedModelAssignmentAction.Response response) { - listener.onResponse(Boolean.TRUE); - } - - @Override - public void onFailure(Exception e) { - if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { - listener.onFailure( - new ResourceNotFoundException( - "Could not start the TextEmbeddingService service as the " - + "Multilingual-E5-Small model for this platform cannot be found." - + " Multilingual-E5-Small needs to be downloaded before it can be started" - ) - ); - return; - } - listener.onFailure(e); - } - }; - } - } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java index e960b0b777f2b..f4f4605c667c3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java @@ -16,6 +16,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; @@ -30,6 +31,7 @@ import org.elasticsearch.inference.configuration.SettingsConfigurationDisplayType; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.ibmwatsonx.IbmWatsonxActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; @@ -86,11 +88,19 @@ public void parseRequestConfig( Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap( + removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS) + ); + } + IbmWatsonxModel model = createModel( inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, serviceSettingsMap, TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), ConfigurationParseContext.REQUEST @@ -112,6 +122,7 @@ private static IbmWatsonxModel createModel( TaskType taskType, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, @Nullable Map secretSettings, String failureMessage, ConfigurationParseContext context @@ -123,6 +134,7 @@ private static IbmWatsonxModel createModel( NAME, serviceSettings, taskSettings, + chunkingSettings, secretSettings, context ); @@ -141,11 +153,17 @@ public IbmWatsonxModel parsePersistedConfigWithSecrets( Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); Map secretSettingsMap = removeFromMapOrDefaultEmpty(secrets, ModelSecrets.SECRET_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + } + return createModelFromPersistent( inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, secretSettingsMap, parsePersistedConfigErrorMsg(inferenceEntityId, NAME) ); @@ -166,6 +184,7 @@ private static IbmWatsonxModel createModelFromPersistent( TaskType taskType, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, Map secretSettings, String failureMessage ) { @@ -174,6 +193,7 @@ private static IbmWatsonxModel createModelFromPersistent( taskType, serviceSettings, taskSettings, + chunkingSettings, secretSettings, failureMessage, ConfigurationParseContext.PERSISTENT @@ -185,11 +205,17 @@ public Model parsePersistedConfig(String inferenceEntityId, TaskType taskType, M Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + } + return createModelFromPersistent( inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, null, parsePersistedConfigErrorMsg(inferenceEntityId, NAME) ); @@ -266,7 +292,8 @@ protected void doChunkedInfer( var batchedRequests = new EmbeddingRequestChunker( input.getInputs(), EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.FLOAT + EmbeddingRequestChunker.EmbeddingType.FLOAT, + model.getConfigurations().getChunkingSettings() ).batchRequestsWithListeners(listener); for (var request : batchedRequests) { var action = ibmWatsonxModel.accept(getActionCreator(getSender(), getServiceComponents()), taskSettings, inputType); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModel.java index d60e31b5d41c0..6b20e07ecc0a2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModel.java @@ -9,6 +9,7 @@ import org.apache.http.client.utils.URIBuilder; import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.ModelConfigurations; @@ -40,6 +41,7 @@ public IbmWatsonxEmbeddingsModel( String service, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, Map secrets, ConfigurationParseContext context ) { @@ -49,6 +51,7 @@ public IbmWatsonxEmbeddingsModel( service, IbmWatsonxEmbeddingsServiceSettings.fromMap(serviceSettings, context), EmptyTaskSettings.INSTANCE, + chunkingSettings, DefaultSecretSettings.fromMap(secrets) ); } @@ -64,10 +67,11 @@ public IbmWatsonxEmbeddingsModel(IbmWatsonxEmbeddingsModel model, IbmWatsonxEmbe String service, IbmWatsonxEmbeddingsServiceSettings serviceSettings, TaskSettings taskSettings, + ChunkingSettings chunkingsettings, @Nullable DefaultSecretSettings secrets ) { super( - new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), + new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings, chunkingsettings), new ModelSecrets(secrets), serviceSettings ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntityTests.java index 32450e3facfd0..7ff79e2618425 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntityTests.java @@ -39,7 +39,7 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(parsedResults.getRankedDocs(), is(List.of(new RankedDocsResults.RankedDoc(0, 0.97F, "content 2")))); + assertThat(parsedResults.getRankedDocs(), is(List.of(new RankedDocsResults.RankedDoc(2, 0.97F, "content 2")))); } public void testFromResponse_CreatesResultsForMultipleItems() throws IOException { @@ -68,7 +68,7 @@ public void testFromResponse_CreatesResultsForMultipleItems() throws IOException assertThat( parsedResults.getRankedDocs(), - is(List.of(new RankedDocsResults.RankedDoc(0, 0.97F, "content 2"), new RankedDocsResults.RankedDoc(1, 0.90F, "content 1"))) + is(List.of(new RankedDocsResults.RankedDoc(2, 0.97F, "content 2"), new RankedDocsResults.RankedDoc(1, 0.90F, "content 1"))) ); } @@ -161,4 +161,37 @@ public void testFromResponse_FailsWhenScoreFieldIsNotPresent() { assertThat(thrownException.getMessage(), is("Failed to find required field [score] in Google Vertex AI rerank response")); } + + public void testFromResponse_FailsWhenIDFieldIsNotInteger() { + String responseJson = """ + { + "records": [ + { + "id": "abcd", + "title": "title 2", + "content": "content 2", + "score": 0.97 + }, + { + "id": "1", + "title": "title 1", + "content": "content 1", + "score": 0.96 + } + ] + } + """; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> GoogleVertexAiRerankResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Expected numeric value for record ID field in Google Vertex AI rerank response but received [abcd]") + ); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index f444719c730f5..71ff9fc7d84cf 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -61,7 +61,6 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.inference.DefaultElserFeatureFlag; import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.model.TestModel; import org.junit.AssumptionViolatedException; @@ -103,9 +102,6 @@ protected Collection getPlugins() { @Override protected void minimalMapping(XContentBuilder b) throws IOException { b.field("type", "semantic_text"); - if (DefaultElserFeatureFlag.isEnabled() == false) { - b.field("inference_id", "test_model"); - } } @Override @@ -113,6 +109,12 @@ protected String minimalIsInvalidRoutingPathErrorMessage(Mapper mapper) { return "cannot have nested fields when index is in [index.mode=time_series]"; } + @Override + protected void metaMapping(XContentBuilder b) throws IOException { + super.metaMapping(b); + b.field(INFERENCE_ID_FIELD, DEFAULT_ELSER_2_INFERENCE_ID); + } + @Override protected Object getSampleValueForDocument() { return null; @@ -170,14 +172,13 @@ protected void assertSearchable(MappedFieldType fieldType) { public void testDefaults() throws Exception { final String fieldName = "field"; final XContentBuilder fieldMapping = fieldMapping(this::minimalMapping); + final XContentBuilder expectedMapping = fieldMapping(this::metaMapping); MapperService mapperService = createMapperService(fieldMapping); DocumentMapper mapper = mapperService.documentMapper(); - assertEquals(Strings.toString(fieldMapping), mapper.mappingSource().toString()); + assertEquals(Strings.toString(expectedMapping), mapper.mappingSource().toString()); assertSemanticTextField(mapperService, fieldName, false); - if (DefaultElserFeatureFlag.isEnabled()) { - assertInferenceEndpoints(mapperService, fieldName, DEFAULT_ELSER_2_INFERENCE_ID, DEFAULT_ELSER_2_INFERENCE_ID); - } + assertInferenceEndpoints(mapperService, fieldName, DEFAULT_ELSER_2_INFERENCE_ID, DEFAULT_ELSER_2_INFERENCE_ID); ParsedDocument doc1 = mapper.parse(source(this::writeField)); List fields = doc1.rootDoc().getFields("field"); @@ -211,15 +212,18 @@ public void testSetInferenceEndpoints() throws IOException { assertSerialization.accept(fieldMapping, mapperService); } { - if (DefaultElserFeatureFlag.isEnabled()) { - final XContentBuilder fieldMapping = fieldMapping( - b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, searchInferenceId) - ); - final MapperService mapperService = createMapperService(fieldMapping); - assertSemanticTextField(mapperService, fieldName, false); - assertInferenceEndpoints(mapperService, fieldName, DEFAULT_ELSER_2_INFERENCE_ID, searchInferenceId); - assertSerialization.accept(fieldMapping, mapperService); - } + final XContentBuilder fieldMapping = fieldMapping( + b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, searchInferenceId) + ); + final XContentBuilder expectedMapping = fieldMapping( + b -> b.field("type", "semantic_text") + .field(INFERENCE_ID_FIELD, DEFAULT_ELSER_2_INFERENCE_ID) + .field(SEARCH_INFERENCE_ID_FIELD, searchInferenceId) + ); + final MapperService mapperService = createMapperService(fieldMapping); + assertSemanticTextField(mapperService, fieldName, false); + assertInferenceEndpoints(mapperService, fieldName, DEFAULT_ELSER_2_INFERENCE_ID, searchInferenceId); + assertSerialization.accept(expectedMapping, mapperService); } { final XContentBuilder fieldMapping = fieldMapping( @@ -246,26 +250,18 @@ public void testInvalidInferenceEndpoints() { ); } { - final String expectedMessage = DefaultElserFeatureFlag.isEnabled() - ? "[inference_id] on mapper [field] of type [semantic_text] must not be empty" - : "[inference_id] on mapper [field] of type [semantic_text] must be specified"; Exception e = expectThrows( MapperParsingException.class, () -> createMapperService(fieldMapping(b -> b.field("type", "semantic_text").field(INFERENCE_ID_FIELD, ""))) ); - assertThat(e.getMessage(), containsString(expectedMessage)); + assertThat(e.getMessage(), containsString("[inference_id] on mapper [field] of type [semantic_text] must not be empty")); } { - if (DefaultElserFeatureFlag.isEnabled()) { - Exception e = expectThrows( - MapperParsingException.class, - () -> createMapperService(fieldMapping(b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, ""))) - ); - assertThat( - e.getMessage(), - containsString("[search_inference_id] on mapper [field] of type [semantic_text] must not be empty") - ); - } + Exception e = expectThrows( + MapperParsingException.class, + () -> createMapperService(fieldMapping(b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, ""))) + ); + assertThat(e.getMessage(), containsString("[search_inference_id] on mapper [field] of type [semantic_text] must not be empty")); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java index d6c491f2b7cec..f7f37c5bcd15f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; @@ -69,6 +70,8 @@ import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests.createRandomChunkingSettings; +import static org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests.createRandomChunkingSettingsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; @@ -124,6 +127,7 @@ public void testParseRequestConfig_CreatesAIbmWatsonxEmbeddingsModel() throws IO assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is(apiKey)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); }, e -> fail("Model parsing should have succeeded, but failed: " + e.getMessage())); service.parseRequestConfig( @@ -150,6 +154,45 @@ public void testParseRequestConfig_CreatesAIbmWatsonxEmbeddingsModel() throws IO } } + public void testParseRequestConfig_CreatesAIbmWatsonxEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { + try (var service = createIbmWatsonxService()) { + ActionListener modelListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(IbmWatsonxEmbeddingsModel.class)); + + var embeddingsModel = (IbmWatsonxEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); + assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId)); + assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is(apiKey)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + }, e -> fail("Model parsing should have succeeded, but failed: " + e.getMessage())); + + service.parseRequestConfig( + "id", + TaskType.TEXT_EMBEDDING, + getRequestConfigMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + IbmWatsonxServiceFields.PROJECT_ID, + projectId, + ServiceFields.URL, + url, + IbmWatsonxServiceFields.API_VERSION, + apiVersion + ) + ), + new HashMap<>(Map.of()), + createRandomChunkingSettingsMap(), + getSecretSettingsMap(apiKey) + ), + modelListener + ); + } + } + public void testParseRequestConfig_ThrowsUnsupportedModelType() throws IOException { try (var service = createIbmWatsonxService()) { var failureListener = getModelListenerForException( @@ -235,6 +278,47 @@ public void testParsePersistedConfigWithSecrets_CreatesAIbmWatsonxEmbeddingsMode assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); assertThat(embeddingsModel.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is(apiKey)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + } + } + + public void testParsePersistedConfigWithSecrets_CreatesAIbmWatsonxEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { + try (var service = createIbmWatsonxService()) { + var persistedConfig = getPersistedConfigMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + IbmWatsonxServiceFields.PROJECT_ID, + projectId, + ServiceFields.URL, + url, + IbmWatsonxServiceFields.API_VERSION, + apiVersion + ) + ), + getTaskSettingsMapEmpty(), + createRandomChunkingSettingsMap(), + getSecretSettingsMap(apiKey) + ); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(IbmWatsonxEmbeddingsModel.class)); + + var embeddingsModel = (IbmWatsonxEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); + assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId)); + assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); + assertThat(embeddingsModel.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is(apiKey)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); } } @@ -399,6 +483,73 @@ public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExists } } + public void testParsePersistedConfig_CreatesAIbmWatsonxEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { + try (var service = createIbmWatsonxService()) { + var persistedConfig = getPersistedConfigMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + IbmWatsonxServiceFields.PROJECT_ID, + projectId, + ServiceFields.URL, + url, + IbmWatsonxServiceFields.API_VERSION, + apiVersion + ) + ), + getTaskSettingsMapEmpty(), + null + ); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(IbmWatsonxEmbeddingsModel.class)); + + var embeddingsModel = (IbmWatsonxEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); + assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId)); + assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); + assertThat(embeddingsModel.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + } + } + + public void testParsePersistedConfig_CreatesAIbmWatsonxEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { + try (var service = createIbmWatsonxService()) { + var persistedConfig = getPersistedConfigMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + IbmWatsonxServiceFields.PROJECT_ID, + projectId, + ServiceFields.URL, + url, + IbmWatsonxServiceFields.API_VERSION, + apiVersion + ) + ), + getTaskSettingsMapEmpty(), + createRandomChunkingSettingsMap(), + null + ); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(IbmWatsonxEmbeddingsModel.class)); + + var embeddingsModel = (IbmWatsonxEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); + assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId)); + assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); + assertThat(embeddingsModel.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + } + } + public void testInfer_ThrowsErrorWhenModelIsNotIbmWatsonxModel() throws IOException { var sender = mock(Sender.class); @@ -488,7 +639,15 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { } } - public void testChunkedInfer_Batches() throws IOException { + public void testChunkedInfer_ChunkingSettingsNotSet() throws IOException { + testChunkedInfer_Batches(null); + } + + public void testChunkedInfer_ChunkingSettingsSet() throws IOException { + testChunkedInfer_Batches(createRandomChunkingSettings()); + } + + private void testChunkedInfer_Batches(ChunkingSettings chunkingSettings) throws IOException { var input = List.of("foo", "bar"); var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); @@ -878,6 +1037,18 @@ private static ActionListener getModelListenerForException(Class excep }); } + private Map getRequestConfigMap( + Map serviceSettings, + Map taskSettings, + Map chunkingSettings, + Map secretSettings + ) { + var requestConfigMap = getRequestConfigMap(serviceSettings, taskSettings, secretSettings); + requestConfigMap.put(ModelConfigurations.CHUNKING_SETTINGS, chunkingSettings); + + return requestConfigMap; + } + private Map getRequestConfigMap( Map serviceSettings, Map taskSettings, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModelTests.java index 93fd7e402a0de..33fcd752fbf30 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModelTests.java @@ -82,6 +82,7 @@ public static IbmWatsonxEmbeddingsModel createModel( null ), EmptyTaskSettings.INSTANCE, + null, new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) ); } diff --git a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java index fe406722ae1e2..8d8ad94d608d7 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java +++ b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java @@ -36,7 +36,7 @@ protected Settings restClientSettings() { var baseSettings = super.restClientSettings(); return Settings.builder() .put(baseSettings) - .put(CLIENT_SOCKET_TIMEOUT, "120s") // Long timeout for model download + .put(CLIENT_SOCKET_TIMEOUT, "300s") // Long timeout for model download .build(); } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml index 71fb1fd95989f..882f1df03e926 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml @@ -371,3 +371,29 @@ setup: - match: { error.type: illegal_argument_exception } - match: { error.reason: "semantic_text field [level_1.level_2.sparse_field] cannot be in an object field with subobjects disabled" } + +--- +"Mapping always includes inference ID": + - requires: + cluster_features: "semantic_text.always_emit_inference_id_fix" + reason: always emit inference ID fix added in 8.17.0 + test_runner_features: [capabilities] + capabilities: + - method: GET + path: /_inference + capabilities: [default_elser_2] + + - do: + indices.create: + index: test-always-include-inference-id-index + body: + mappings: + properties: + semantic_field: + type: semantic_text + + - do: + indices.get_mapping: + index: test-always-include-inference-id-index + + - exists: test-always-include-inference-id-index.mappings.properties.semantic_field.inference_id diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml index 445df1dc302b9..534e4831c4a0a 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml @@ -551,7 +551,7 @@ setup: --- "Calculates embeddings using the default ELSER 2 endpoint": - requires: - reason: "default ELSER 2 inference ID is behind a feature flag" + reason: "default ELSER 2 inference ID is enabled via a capability" test_runner_features: [capabilities] capabilities: - method: GET @@ -624,3 +624,55 @@ setup: - match: { _source.level_1.dense_field.text: "another inference test" } - exists: _source.level_1.dense_field.inference.chunks.0.embeddings - match: { _source.level_1.dense_field.inference.chunks.0.text: "another inference test" } + +--- +"Deletes on bulk operation": + - requires: + cluster_features: semantic_text.delete_fix + reason: Delete operations are properly applied when subsequent operations include a semantic text field. + + - do: + bulk: + index: test-index + refresh: true + body: | + {"index":{"_id": "1"}} + {"dense_field": ["you know, for testing", "now with chunks"]} + {"index":{"_id": "2"}} + {"dense_field": ["some more tests", "that include chunks"]} + + - do: + search: + index: test-index + body: + query: + semantic: + field: dense_field + query: "you know, for testing" + + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0._source.dense_field.text: ["you know, for testing", "now with chunks"] } + - match: { hits.hits.1._source.dense_field.text: ["some more tests", "that include chunks"] } + + - do: + bulk: + index: test-index + refresh: true + body: | + {"delete":{ "_id": "2"}} + {"update":{"_id": "1"}} + {"doc":{"dense_field": "updated text"}} + + - do: + search: + index: test-index + body: + query: + semantic: + field: dense_field + query: "you know, for testing" + + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0._source.dense_field.text: "updated text" } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml index 17938f3b61a41..c2704a4c22914 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml @@ -843,7 +843,7 @@ setup: --- "Query a field that uses the default ELSER 2 endpoint": - requires: - reason: "default ELSER 2 inference ID is behind a feature flag" + reason: "default ELSER 2 inference ID is enabled via a capability" test_runner_features: [capabilities] capabilities: - method: GET @@ -878,3 +878,117 @@ setup: - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "doc_1" } + +--- +"Query using a sparse embedding model with size set to zero": + - requires: + cluster_features: "semantic_text.zero_size_fix" + reason: zero size fix added in 8.16.1 & 8.15.5 + + - do: + indices.create: + index: test-sparse-index-with-agg-id + body: + mappings: + properties: + inference_field: + type: semantic_text + inference_id: sparse-inference-id + non_inference_field: + type: text + agg_id: + type: keyword + + - do: + index: + index: test-sparse-index-with-agg-id + id: doc_1 + body: + inference_field: "inference test" + agg_id: "doc_1" + + - do: + index: + index: test-sparse-index-with-agg-id + id: doc_2 + body: + non_inference_field: "non-inference test" + agg_id: "doc_2" + refresh: true + + - do: + search: + index: test-sparse-index-with-agg-id + body: + size: 0 + query: + semantic: + field: "inference_field" + query: "inference test" + aggs: + agg_ids: + terms: + field: agg_id + + - match: { hits.total.value: 1 } + - length: { hits.hits: 0 } + - length: { aggregations.agg_ids.buckets: 1 } + - match: { aggregations.agg_ids.buckets.0.key: "doc_1" } + - match: { aggregations.agg_ids.buckets.0.doc_count: 1 } + +--- +"Query using a dense embedding model with size set to zero": + - requires: + cluster_features: "semantic_text.zero_size_fix" + reason: zero size fix added in 8.16.1 & 8.15.5 + + - do: + indices.create: + index: test-dense-index-with-agg-id + body: + mappings: + properties: + inference_field: + type: semantic_text + inference_id: dense-inference-id + non_inference_field: + type: text + agg_id: + type: keyword + + - do: + index: + index: test-dense-index-with-agg-id + id: doc_1 + body: + inference_field: "inference test" + agg_id: "doc_1" + + - do: + index: + index: test-dense-index-with-agg-id + id: doc_2 + body: + non_inference_field: "non-inference test" + agg_id: "doc_2" + refresh: true + + - do: + search: + index: test-dense-index-with-agg-id + body: + size: 0 + query: + semantic: + field: "inference_field" + query: "inference test" + aggs: + agg_ids: + terms: + field: agg_id + + - match: { hits.total.value: 1 } + - length: { hits.hits: 0 } + - length: { aggregations.agg_ids.buckets: 1 } + - match: { aggregations.agg_ids.buckets.0.key: "doc_1" } + - match: { aggregations.agg_ids.buckets.0.doc_count: 1 } diff --git a/x-pack/plugin/kql/build.gradle b/x-pack/plugin/kql/build.gradle index 054011a458fe0..76a4bd5aff777 100644 --- a/x-pack/plugin/kql/build.gradle +++ b/x-pack/plugin/kql/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import static org.elasticsearch.gradle.util.PlatformUtils.normalize @@ -28,12 +33,6 @@ dependencies { tasks.named('yamlRestTest').configure { usesDefaultDistribution() - - /**************************************************************** - * Enable QA/rest integration tests for snapshot builds only * - * TODO: Enable for all builds upon this feature release * - ****************************************************************/ - enabled = buildParams.isSnapshotBuild() } /********************************** diff --git a/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 b/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 index da015b699cb15..739fa5eb0c6eb 100644 --- a/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 +++ b/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 @@ -46,9 +46,26 @@ notQuery: ; nestedQuery - : fieldName COLON LEFT_CURLY_BRACKET query RIGHT_CURLY_BRACKET + : fieldName COLON LEFT_CURLY_BRACKET nestedSubQuery RIGHT_CURLY_BRACKET ; +nestedSubQuery + : nestedSubQuery operator=(AND|OR) nestedSubQuery #booleanNestedQuery + | nestedSimpleSubQuery #defaultNestedQuery + ; + +nestedSimpleSubQuery + : notQuery + | nestedQuery + | matchAllQuery + | nestedParenthesizedQuery + | existsQuery + | rangeQuery + | fieldQuery; + +nestedParenthesizedQuery + : LEFT_PARENTHESIS nestedSubQuery RIGHT_PARENTHESIS; + matchAllQuery : (WILDCARD COLON)? WILDCARD ; diff --git a/x-pack/plugin/kql/src/main/java/module-info.java b/x-pack/plugin/kql/src/main/java/module-info.java index 41e51033b9c70..e3bb6fb99bbd3 100644 --- a/x-pack/plugin/kql/src/main/java/module-info.java +++ b/x-pack/plugin/kql/src/main/java/module-info.java @@ -13,6 +13,7 @@ requires org.apache.lucene.queryparser; requires org.elasticsearch.logging; requires org.apache.lucene.core; + requires org.apache.lucene.join; exports org.elasticsearch.xpack.kql; exports org.elasticsearch.xpack.kql.parser; diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java index 5fe3a61c0a761..2d810a33190ca 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java @@ -9,6 +9,7 @@ import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.Token; +import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.BoolQueryBuilder; @@ -20,6 +21,7 @@ import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; +import java.util.List; import java.util.Set; import java.util.function.BiConsumer; import java.util.function.BiFunction; @@ -56,15 +58,15 @@ public QueryBuilder toQueryBuilder(ParserRuleContext ctx) { @Override public QueryBuilder visitBooleanQuery(KqlBaseParser.BooleanQueryContext ctx) { assert ctx.operator != null; - return isAndQuery(ctx) ? visitAndBooleanQuery(ctx) : visitOrBooleanQuery(ctx); + return isAndQuery(ctx) ? visitAndBooleanQuery(ctx.query()) : visitOrBooleanQuery(ctx.query()); } - public QueryBuilder visitAndBooleanQuery(KqlBaseParser.BooleanQueryContext ctx) { + public QueryBuilder visitAndBooleanQuery(List clauses) { BoolQueryBuilder builder = QueryBuilders.boolQuery(); // TODO: KQLContext has an option to wrap the clauses into a filter instead of a must clause. Do we need it? - for (ParserRuleContext subQueryCtx : ctx.query()) { - if (subQueryCtx instanceof KqlBaseParser.BooleanQueryContext booleanSubQueryCtx && isAndQuery(booleanSubQueryCtx)) { + for (ParserRuleContext subQueryCtx : clauses) { + if (isAndQuery(subQueryCtx)) { typedParsing(this, subQueryCtx, BoolQueryBuilder.class).must().forEach(builder::must); } else { builder.must(typedParsing(this, subQueryCtx, QueryBuilder.class)); @@ -74,11 +76,11 @@ public QueryBuilder visitAndBooleanQuery(KqlBaseParser.BooleanQueryContext ctx) return rewriteConjunctionQuery(builder); } - public QueryBuilder visitOrBooleanQuery(KqlBaseParser.BooleanQueryContext ctx) { + public QueryBuilder visitOrBooleanQuery(List clauses) { BoolQueryBuilder builder = QueryBuilders.boolQuery().minimumShouldMatch(1); - for (ParserRuleContext subQueryCtx : ctx.query()) { - if (subQueryCtx instanceof KqlBaseParser.BooleanQueryContext booleanSubQueryCtx && isOrQuery(booleanSubQueryCtx)) { + for (ParserRuleContext subQueryCtx : clauses) { + if (isOrQuery(subQueryCtx)) { typedParsing(this, subQueryCtx, BoolQueryBuilder.class).should().forEach(builder::should); } else { builder.should(typedParsing(this, subQueryCtx, QueryBuilder.class)); @@ -100,8 +102,40 @@ public QueryBuilder visitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryCont @Override public QueryBuilder visitNestedQuery(KqlBaseParser.NestedQueryContext ctx) { - // TODO: implementation - return new MatchNoneQueryBuilder(); + String nestedFieldName = extractText(ctx.fieldName()); + + if (kqlParsingContext.isNestedField(nestedFieldName) == false) { + throw new KqlParsingException( + "[{}] is not a valid nested field name.", + ctx.start.getLine(), + ctx.start.getCharPositionInLine(), + nestedFieldName + ); + } + QueryBuilder subQuery = kqlParsingContext.withNestedPath( + nestedFieldName, + () -> typedParsing(this, ctx.nestedSubQuery(), QueryBuilder.class) + ); + + if (subQuery instanceof MatchNoneQueryBuilder) { + return subQuery; + } + + return wrapWithNestedQuery( + nestedFieldName, + QueryBuilders.nestedQuery(kqlParsingContext.fullFieldName(nestedFieldName), subQuery, ScoreMode.None) + ); + } + + @Override + public QueryBuilder visitBooleanNestedQuery(KqlBaseParser.BooleanNestedQueryContext ctx) { + assert ctx.operator != null; + return isAndQuery(ctx) ? visitAndBooleanQuery(ctx.nestedSubQuery()) : visitOrBooleanQuery(ctx.nestedSubQuery()); + } + + @Override + public QueryBuilder visitNestedParenthesizedQuery(KqlBaseParser.NestedParenthesizedQueryContext ctx) { + return typedParsing(this, ctx.nestedSubQuery(), QueryBuilder.class); } @Override @@ -116,7 +150,7 @@ public QueryBuilder visitExistsQuery(KqlBaseParser.ExistsQueryContext ctx) { BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery().minimumShouldMatch(1); withFields(ctx.fieldName(), (fieldName, mappedFieldType) -> { if (isRuntimeField(mappedFieldType) == false) { - boolQueryBuilder.should(QueryBuilders.existsQuery(fieldName)); + boolQueryBuilder.should(wrapWithNestedQuery(fieldName, QueryBuilders.existsQuery(fieldName))); } }); @@ -137,7 +171,7 @@ public QueryBuilder visitRangeQuery(KqlBaseParser.RangeQueryContext ctx) { rangeQuery.timeZone(kqlParsingContext.timeZone().getId()); } - boolQueryBuilder.should(rangeQuery); + boolQueryBuilder.should(wrapWithNestedQuery(fieldName, rangeQuery)); }); return rewriteDisjunctionQuery(boolQueryBuilder); @@ -200,24 +234,33 @@ public QueryBuilder visitFieldQuery(KqlBaseParser.FieldQueryContext ctx) { } if (fieldQuery != null) { - boolQueryBuilder.should(fieldQuery); + boolQueryBuilder.should(wrapWithNestedQuery(fieldName, fieldQuery)); } }); return rewriteDisjunctionQuery(boolQueryBuilder); } - private static boolean isAndQuery(KqlBaseParser.BooleanQueryContext ctx) { - return ctx.operator.getType() == KqlBaseParser.AND; + private static boolean isAndQuery(ParserRuleContext ctx) { + return switch (ctx) { + case KqlBaseParser.BooleanQueryContext booleanQueryCtx -> booleanQueryCtx.operator.getType() == KqlBaseParser.AND; + case KqlBaseParser.BooleanNestedQueryContext booleanNestedCtx -> booleanNestedCtx.operator.getType() == KqlBaseParser.AND; + default -> false; + }; } - private static boolean isOrQuery(KqlBaseParser.BooleanQueryContext ctx) { - return ctx.operator.getType() == KqlBaseParser.OR; + private static boolean isOrQuery(ParserRuleContext ctx) { + return switch (ctx) { + case KqlBaseParser.BooleanQueryContext booleanQueryCtx -> booleanQueryCtx.operator.getType() == KqlBaseParser.OR; + case KqlBaseParser.BooleanNestedQueryContext booleanNestedCtx -> booleanNestedCtx.operator.getType() == KqlBaseParser.OR; + default -> false; + }; } private void withFields(KqlBaseParser.FieldNameContext ctx, BiConsumer fieldConsummer) { assert ctx != null : "Field ctx cannot be null"; String fieldNamePattern = extractText(ctx); + Set fieldNames = kqlParsingContext.resolveFieldNames(fieldNamePattern); if (ctx.value.getType() == KqlBaseParser.QUOTED_STRING && Regex.isSimpleMatchPattern(fieldNamePattern)) { @@ -267,4 +310,14 @@ private BiFunction rangeOperation( default -> throw new IllegalArgumentException(format(null, "Invalid range operator {}\"", operator.getText())); }; } + + private QueryBuilder wrapWithNestedQuery(String fieldName, QueryBuilder query) { + String nestedPath = kqlParsingContext.nestedPath(fieldName); + + if (nestedPath == null || nestedPath.equals(kqlParsingContext.currentNestedPath())) { + return query; + } + + return wrapWithNestedQuery(nestedPath, QueryBuilders.nestedQuery(nestedPath, query, ScoreMode.None)); + } } diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp index 7af37d7e3c3b5..fbfe52afa4cd5 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp @@ -42,6 +42,9 @@ query simpleQuery notQuery nestedQuery +nestedSubQuery +nestedSimpleSubQuery +nestedParenthesizedQuery matchAllQuery parenthesizedQuery rangeQuery @@ -54,4 +57,4 @@ fieldName atn: -[4, 1, 16, 136, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 1, 0, 3, 0, 30, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 40, 8, 1, 10, 1, 12, 1, 43, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 53, 8, 2, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 3, 5, 66, 8, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 4, 8, 79, 8, 8, 11, 8, 12, 8, 80, 1, 8, 3, 8, 84, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 100, 8, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 3, 11, 107, 8, 11, 1, 12, 3, 12, 110, 8, 12, 1, 12, 4, 12, 113, 8, 12, 11, 12, 12, 12, 114, 1, 12, 3, 12, 118, 8, 12, 1, 12, 1, 12, 3, 12, 122, 8, 12, 1, 12, 1, 12, 3, 12, 126, 8, 12, 1, 12, 3, 12, 129, 8, 12, 1, 13, 1, 13, 1, 13, 3, 13, 134, 8, 13, 1, 13, 0, 1, 2, 14, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 0, 4, 1, 0, 2, 3, 1, 0, 6, 9, 2, 0, 14, 14, 16, 16, 1, 0, 2, 4, 145, 0, 29, 1, 0, 0, 0, 2, 33, 1, 0, 0, 0, 4, 52, 1, 0, 0, 0, 6, 54, 1, 0, 0, 0, 8, 57, 1, 0, 0, 0, 10, 65, 1, 0, 0, 0, 12, 69, 1, 0, 0, 0, 14, 73, 1, 0, 0, 0, 16, 83, 1, 0, 0, 0, 18, 85, 1, 0, 0, 0, 20, 99, 1, 0, 0, 0, 22, 106, 1, 0, 0, 0, 24, 128, 1, 0, 0, 0, 26, 133, 1, 0, 0, 0, 28, 30, 3, 2, 1, 0, 29, 28, 1, 0, 0, 0, 29, 30, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 32, 5, 0, 0, 1, 32, 1, 1, 0, 0, 0, 33, 34, 6, 1, -1, 0, 34, 35, 3, 4, 2, 0, 35, 41, 1, 0, 0, 0, 36, 37, 10, 2, 0, 0, 37, 38, 7, 0, 0, 0, 38, 40, 3, 2, 1, 2, 39, 36, 1, 0, 0, 0, 40, 43, 1, 0, 0, 0, 41, 39, 1, 0, 0, 0, 41, 42, 1, 0, 0, 0, 42, 3, 1, 0, 0, 0, 43, 41, 1, 0, 0, 0, 44, 53, 3, 6, 3, 0, 45, 53, 3, 8, 4, 0, 46, 53, 3, 12, 6, 0, 47, 53, 3, 10, 5, 0, 48, 53, 3, 18, 9, 0, 49, 53, 3, 14, 7, 0, 50, 53, 3, 20, 10, 0, 51, 53, 3, 22, 11, 0, 52, 44, 1, 0, 0, 0, 52, 45, 1, 0, 0, 0, 52, 46, 1, 0, 0, 0, 52, 47, 1, 0, 0, 0, 52, 48, 1, 0, 0, 0, 52, 49, 1, 0, 0, 0, 52, 50, 1, 0, 0, 0, 52, 51, 1, 0, 0, 0, 53, 5, 1, 0, 0, 0, 54, 55, 5, 4, 0, 0, 55, 56, 3, 4, 2, 0, 56, 7, 1, 0, 0, 0, 57, 58, 3, 26, 13, 0, 58, 59, 5, 5, 0, 0, 59, 60, 5, 12, 0, 0, 60, 61, 3, 2, 1, 0, 61, 62, 5, 13, 0, 0, 62, 9, 1, 0, 0, 0, 63, 64, 5, 16, 0, 0, 64, 66, 5, 5, 0, 0, 65, 63, 1, 0, 0, 0, 65, 66, 1, 0, 0, 0, 66, 67, 1, 0, 0, 0, 67, 68, 5, 16, 0, 0, 68, 11, 1, 0, 0, 0, 69, 70, 5, 10, 0, 0, 70, 71, 3, 2, 1, 0, 71, 72, 5, 11, 0, 0, 72, 13, 1, 0, 0, 0, 73, 74, 3, 26, 13, 0, 74, 75, 7, 1, 0, 0, 75, 76, 3, 16, 8, 0, 76, 15, 1, 0, 0, 0, 77, 79, 7, 2, 0, 0, 78, 77, 1, 0, 0, 0, 79, 80, 1, 0, 0, 0, 80, 78, 1, 0, 0, 0, 80, 81, 1, 0, 0, 0, 81, 84, 1, 0, 0, 0, 82, 84, 5, 15, 0, 0, 83, 78, 1, 0, 0, 0, 83, 82, 1, 0, 0, 0, 84, 17, 1, 0, 0, 0, 85, 86, 3, 26, 13, 0, 86, 87, 5, 5, 0, 0, 87, 88, 5, 16, 0, 0, 88, 19, 1, 0, 0, 0, 89, 90, 3, 26, 13, 0, 90, 91, 5, 5, 0, 0, 91, 92, 3, 24, 12, 0, 92, 100, 1, 0, 0, 0, 93, 94, 3, 26, 13, 0, 94, 95, 5, 5, 0, 0, 95, 96, 5, 10, 0, 0, 96, 97, 3, 24, 12, 0, 97, 98, 5, 11, 0, 0, 98, 100, 1, 0, 0, 0, 99, 89, 1, 0, 0, 0, 99, 93, 1, 0, 0, 0, 100, 21, 1, 0, 0, 0, 101, 107, 3, 24, 12, 0, 102, 103, 5, 10, 0, 0, 103, 104, 3, 24, 12, 0, 104, 105, 5, 11, 0, 0, 105, 107, 1, 0, 0, 0, 106, 101, 1, 0, 0, 0, 106, 102, 1, 0, 0, 0, 107, 23, 1, 0, 0, 0, 108, 110, 7, 3, 0, 0, 109, 108, 1, 0, 0, 0, 109, 110, 1, 0, 0, 0, 110, 112, 1, 0, 0, 0, 111, 113, 7, 2, 0, 0, 112, 111, 1, 0, 0, 0, 113, 114, 1, 0, 0, 0, 114, 112, 1, 0, 0, 0, 114, 115, 1, 0, 0, 0, 115, 117, 1, 0, 0, 0, 116, 118, 7, 3, 0, 0, 117, 116, 1, 0, 0, 0, 117, 118, 1, 0, 0, 0, 118, 129, 1, 0, 0, 0, 119, 121, 7, 0, 0, 0, 120, 122, 7, 3, 0, 0, 121, 120, 1, 0, 0, 0, 121, 122, 1, 0, 0, 0, 122, 129, 1, 0, 0, 0, 123, 125, 5, 4, 0, 0, 124, 126, 7, 0, 0, 0, 125, 124, 1, 0, 0, 0, 125, 126, 1, 0, 0, 0, 126, 129, 1, 0, 0, 0, 127, 129, 5, 15, 0, 0, 128, 109, 1, 0, 0, 0, 128, 119, 1, 0, 0, 0, 128, 123, 1, 0, 0, 0, 128, 127, 1, 0, 0, 0, 129, 25, 1, 0, 0, 0, 130, 134, 5, 14, 0, 0, 131, 134, 5, 15, 0, 0, 132, 134, 5, 16, 0, 0, 133, 130, 1, 0, 0, 0, 133, 131, 1, 0, 0, 0, 133, 132, 1, 0, 0, 0, 134, 27, 1, 0, 0, 0, 15, 29, 41, 52, 65, 80, 83, 99, 106, 109, 114, 117, 121, 125, 128, 133] \ No newline at end of file +[4, 1, 16, 165, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 1, 0, 3, 0, 36, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 46, 8, 1, 10, 1, 12, 1, 49, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 59, 8, 2, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 76, 8, 5, 10, 5, 12, 5, 79, 9, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 87, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 3, 8, 95, 8, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 4, 11, 108, 8, 11, 11, 11, 12, 11, 109, 1, 11, 3, 11, 113, 8, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 129, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 136, 8, 14, 1, 15, 3, 15, 139, 8, 15, 1, 15, 4, 15, 142, 8, 15, 11, 15, 12, 15, 143, 1, 15, 3, 15, 147, 8, 15, 1, 15, 1, 15, 3, 15, 151, 8, 15, 1, 15, 1, 15, 3, 15, 155, 8, 15, 1, 15, 3, 15, 158, 8, 15, 1, 16, 1, 16, 1, 16, 3, 16, 163, 8, 16, 1, 16, 0, 2, 2, 10, 17, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 0, 4, 1, 0, 2, 3, 1, 0, 6, 9, 2, 0, 14, 14, 16, 16, 1, 0, 2, 4, 177, 0, 35, 1, 0, 0, 0, 2, 39, 1, 0, 0, 0, 4, 58, 1, 0, 0, 0, 6, 60, 1, 0, 0, 0, 8, 63, 1, 0, 0, 0, 10, 69, 1, 0, 0, 0, 12, 86, 1, 0, 0, 0, 14, 88, 1, 0, 0, 0, 16, 94, 1, 0, 0, 0, 18, 98, 1, 0, 0, 0, 20, 102, 1, 0, 0, 0, 22, 112, 1, 0, 0, 0, 24, 114, 1, 0, 0, 0, 26, 128, 1, 0, 0, 0, 28, 135, 1, 0, 0, 0, 30, 157, 1, 0, 0, 0, 32, 162, 1, 0, 0, 0, 34, 36, 3, 2, 1, 0, 35, 34, 1, 0, 0, 0, 35, 36, 1, 0, 0, 0, 36, 37, 1, 0, 0, 0, 37, 38, 5, 0, 0, 1, 38, 1, 1, 0, 0, 0, 39, 40, 6, 1, -1, 0, 40, 41, 3, 4, 2, 0, 41, 47, 1, 0, 0, 0, 42, 43, 10, 2, 0, 0, 43, 44, 7, 0, 0, 0, 44, 46, 3, 2, 1, 2, 45, 42, 1, 0, 0, 0, 46, 49, 1, 0, 0, 0, 47, 45, 1, 0, 0, 0, 47, 48, 1, 0, 0, 0, 48, 3, 1, 0, 0, 0, 49, 47, 1, 0, 0, 0, 50, 59, 3, 6, 3, 0, 51, 59, 3, 8, 4, 0, 52, 59, 3, 18, 9, 0, 53, 59, 3, 16, 8, 0, 54, 59, 3, 24, 12, 0, 55, 59, 3, 20, 10, 0, 56, 59, 3, 26, 13, 0, 57, 59, 3, 28, 14, 0, 58, 50, 1, 0, 0, 0, 58, 51, 1, 0, 0, 0, 58, 52, 1, 0, 0, 0, 58, 53, 1, 0, 0, 0, 58, 54, 1, 0, 0, 0, 58, 55, 1, 0, 0, 0, 58, 56, 1, 0, 0, 0, 58, 57, 1, 0, 0, 0, 59, 5, 1, 0, 0, 0, 60, 61, 5, 4, 0, 0, 61, 62, 3, 4, 2, 0, 62, 7, 1, 0, 0, 0, 63, 64, 3, 32, 16, 0, 64, 65, 5, 5, 0, 0, 65, 66, 5, 12, 0, 0, 66, 67, 3, 10, 5, 0, 67, 68, 5, 13, 0, 0, 68, 9, 1, 0, 0, 0, 69, 70, 6, 5, -1, 0, 70, 71, 3, 12, 6, 0, 71, 77, 1, 0, 0, 0, 72, 73, 10, 2, 0, 0, 73, 74, 7, 0, 0, 0, 74, 76, 3, 10, 5, 2, 75, 72, 1, 0, 0, 0, 76, 79, 1, 0, 0, 0, 77, 75, 1, 0, 0, 0, 77, 78, 1, 0, 0, 0, 78, 11, 1, 0, 0, 0, 79, 77, 1, 0, 0, 0, 80, 87, 3, 6, 3, 0, 81, 87, 3, 8, 4, 0, 82, 87, 3, 14, 7, 0, 83, 87, 3, 24, 12, 0, 84, 87, 3, 20, 10, 0, 85, 87, 3, 26, 13, 0, 86, 80, 1, 0, 0, 0, 86, 81, 1, 0, 0, 0, 86, 82, 1, 0, 0, 0, 86, 83, 1, 0, 0, 0, 86, 84, 1, 0, 0, 0, 86, 85, 1, 0, 0, 0, 87, 13, 1, 0, 0, 0, 88, 89, 5, 10, 0, 0, 89, 90, 3, 10, 5, 0, 90, 91, 5, 11, 0, 0, 91, 15, 1, 0, 0, 0, 92, 93, 5, 16, 0, 0, 93, 95, 5, 5, 0, 0, 94, 92, 1, 0, 0, 0, 94, 95, 1, 0, 0, 0, 95, 96, 1, 0, 0, 0, 96, 97, 5, 16, 0, 0, 97, 17, 1, 0, 0, 0, 98, 99, 5, 10, 0, 0, 99, 100, 3, 2, 1, 0, 100, 101, 5, 11, 0, 0, 101, 19, 1, 0, 0, 0, 102, 103, 3, 32, 16, 0, 103, 104, 7, 1, 0, 0, 104, 105, 3, 22, 11, 0, 105, 21, 1, 0, 0, 0, 106, 108, 7, 2, 0, 0, 107, 106, 1, 0, 0, 0, 108, 109, 1, 0, 0, 0, 109, 107, 1, 0, 0, 0, 109, 110, 1, 0, 0, 0, 110, 113, 1, 0, 0, 0, 111, 113, 5, 15, 0, 0, 112, 107, 1, 0, 0, 0, 112, 111, 1, 0, 0, 0, 113, 23, 1, 0, 0, 0, 114, 115, 3, 32, 16, 0, 115, 116, 5, 5, 0, 0, 116, 117, 5, 16, 0, 0, 117, 25, 1, 0, 0, 0, 118, 119, 3, 32, 16, 0, 119, 120, 5, 5, 0, 0, 120, 121, 3, 30, 15, 0, 121, 129, 1, 0, 0, 0, 122, 123, 3, 32, 16, 0, 123, 124, 5, 5, 0, 0, 124, 125, 5, 10, 0, 0, 125, 126, 3, 30, 15, 0, 126, 127, 5, 11, 0, 0, 127, 129, 1, 0, 0, 0, 128, 118, 1, 0, 0, 0, 128, 122, 1, 0, 0, 0, 129, 27, 1, 0, 0, 0, 130, 136, 3, 30, 15, 0, 131, 132, 5, 10, 0, 0, 132, 133, 3, 30, 15, 0, 133, 134, 5, 11, 0, 0, 134, 136, 1, 0, 0, 0, 135, 130, 1, 0, 0, 0, 135, 131, 1, 0, 0, 0, 136, 29, 1, 0, 0, 0, 137, 139, 7, 3, 0, 0, 138, 137, 1, 0, 0, 0, 138, 139, 1, 0, 0, 0, 139, 141, 1, 0, 0, 0, 140, 142, 7, 2, 0, 0, 141, 140, 1, 0, 0, 0, 142, 143, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 146, 1, 0, 0, 0, 145, 147, 7, 3, 0, 0, 146, 145, 1, 0, 0, 0, 146, 147, 1, 0, 0, 0, 147, 158, 1, 0, 0, 0, 148, 150, 7, 0, 0, 0, 149, 151, 7, 3, 0, 0, 150, 149, 1, 0, 0, 0, 150, 151, 1, 0, 0, 0, 151, 158, 1, 0, 0, 0, 152, 154, 5, 4, 0, 0, 153, 155, 7, 0, 0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 158, 1, 0, 0, 0, 156, 158, 5, 15, 0, 0, 157, 138, 1, 0, 0, 0, 157, 148, 1, 0, 0, 0, 157, 152, 1, 0, 0, 0, 157, 156, 1, 0, 0, 0, 158, 31, 1, 0, 0, 0, 159, 163, 5, 14, 0, 0, 160, 163, 5, 15, 0, 0, 161, 163, 5, 16, 0, 0, 162, 159, 1, 0, 0, 0, 162, 160, 1, 0, 0, 0, 162, 161, 1, 0, 0, 0, 163, 33, 1, 0, 0, 0, 17, 35, 47, 58, 77, 86, 94, 109, 112, 128, 135, 138, 143, 146, 150, 154, 157, 162] \ No newline at end of file diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java index e1015edcd4931..c3fc1281b6fd9 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java @@ -92,6 +92,54 @@ class KqlBaseBaseListener implements KqlBaseListener { *

    The default implementation does nothing.

    */ @Override public void exitNestedQuery(KqlBaseParser.NestedQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterBooleanNestedQuery(KqlBaseParser.BooleanNestedQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitBooleanNestedQuery(KqlBaseParser.BooleanNestedQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterDefaultNestedQuery(KqlBaseParser.DefaultNestedQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitDefaultNestedQuery(KqlBaseParser.DefaultNestedQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterNestedSimpleSubQuery(KqlBaseParser.NestedSimpleSubQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitNestedSimpleSubQuery(KqlBaseParser.NestedSimpleSubQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterNestedParenthesizedQuery(KqlBaseParser.NestedParenthesizedQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitNestedParenthesizedQuery(KqlBaseParser.NestedParenthesizedQueryContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java index 3973a647c8cd8..84c882c2e2bcf 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java @@ -62,6 +62,34 @@ class KqlBaseBaseVisitor extends AbstractParseTreeVisitor implements KqlBa * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitNestedQuery(KqlBaseParser.NestedQueryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitBooleanNestedQuery(KqlBaseParser.BooleanNestedQueryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitDefaultNestedQuery(KqlBaseParser.DefaultNestedQueryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitNestedSimpleSubQuery(KqlBaseParser.NestedSimpleSubQueryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitNestedParenthesizedQuery(KqlBaseParser.NestedParenthesizedQueryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java index 49f2031208642..a44ecf1ecad23 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java @@ -79,6 +79,50 @@ interface KqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitNestedQuery(KqlBaseParser.NestedQueryContext ctx); + /** + * Enter a parse tree produced by the {@code booleanNestedQuery} + * labeled alternative in {@link KqlBaseParser#nestedSubQuery}. + * @param ctx the parse tree + */ + void enterBooleanNestedQuery(KqlBaseParser.BooleanNestedQueryContext ctx); + /** + * Exit a parse tree produced by the {@code booleanNestedQuery} + * labeled alternative in {@link KqlBaseParser#nestedSubQuery}. + * @param ctx the parse tree + */ + void exitBooleanNestedQuery(KqlBaseParser.BooleanNestedQueryContext ctx); + /** + * Enter a parse tree produced by the {@code defaultNestedQuery} + * labeled alternative in {@link KqlBaseParser#nestedSubQuery}. + * @param ctx the parse tree + */ + void enterDefaultNestedQuery(KqlBaseParser.DefaultNestedQueryContext ctx); + /** + * Exit a parse tree produced by the {@code defaultNestedQuery} + * labeled alternative in {@link KqlBaseParser#nestedSubQuery}. + * @param ctx the parse tree + */ + void exitDefaultNestedQuery(KqlBaseParser.DefaultNestedQueryContext ctx); + /** + * Enter a parse tree produced by {@link KqlBaseParser#nestedSimpleSubQuery}. + * @param ctx the parse tree + */ + void enterNestedSimpleSubQuery(KqlBaseParser.NestedSimpleSubQueryContext ctx); + /** + * Exit a parse tree produced by {@link KqlBaseParser#nestedSimpleSubQuery}. + * @param ctx the parse tree + */ + void exitNestedSimpleSubQuery(KqlBaseParser.NestedSimpleSubQueryContext ctx); + /** + * Enter a parse tree produced by {@link KqlBaseParser#nestedParenthesizedQuery}. + * @param ctx the parse tree + */ + void enterNestedParenthesizedQuery(KqlBaseParser.NestedParenthesizedQueryContext ctx); + /** + * Exit a parse tree produced by {@link KqlBaseParser#nestedParenthesizedQuery}. + * @param ctx the parse tree + */ + void exitNestedParenthesizedQuery(KqlBaseParser.NestedParenthesizedQueryContext ctx); /** * Enter a parse tree produced by {@link KqlBaseParser#matchAllQuery}. * @param ctx the parse tree diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java index 118ac32aadd61..7e797b9edbb93 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java @@ -30,12 +30,15 @@ class KqlBaseParser extends Parser { RIGHT_CURLY_BRACKET=13, UNQUOTED_LITERAL=14, QUOTED_STRING=15, WILDCARD=16; public static final int RULE_topLevelQuery = 0, RULE_query = 1, RULE_simpleQuery = 2, RULE_notQuery = 3, - RULE_nestedQuery = 4, RULE_matchAllQuery = 5, RULE_parenthesizedQuery = 6, - RULE_rangeQuery = 7, RULE_rangeQueryValue = 8, RULE_existsQuery = 9, RULE_fieldQuery = 10, - RULE_fieldLessQuery = 11, RULE_fieldQueryValue = 12, RULE_fieldName = 13; + RULE_nestedQuery = 4, RULE_nestedSubQuery = 5, RULE_nestedSimpleSubQuery = 6, + RULE_nestedParenthesizedQuery = 7, RULE_matchAllQuery = 8, RULE_parenthesizedQuery = 9, + RULE_rangeQuery = 10, RULE_rangeQueryValue = 11, RULE_existsQuery = 12, + RULE_fieldQuery = 13, RULE_fieldLessQuery = 14, RULE_fieldQueryValue = 15, + RULE_fieldName = 16; private static String[] makeRuleNames() { return new String[] { - "topLevelQuery", "query", "simpleQuery", "notQuery", "nestedQuery", "matchAllQuery", + "topLevelQuery", "query", "simpleQuery", "notQuery", "nestedQuery", "nestedSubQuery", + "nestedSimpleSubQuery", "nestedParenthesizedQuery", "matchAllQuery", "parenthesizedQuery", "rangeQuery", "rangeQueryValue", "existsQuery", "fieldQuery", "fieldLessQuery", "fieldQueryValue", "fieldName" }; @@ -139,17 +142,17 @@ public final TopLevelQueryContext topLevelQuery() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(29); + setState(35); _errHandler.sync(this); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 115740L) != 0)) { { - setState(28); + setState(34); query(0); } } - setState(31); + setState(37); match(EOF); } } @@ -244,11 +247,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(34); + setState(40); simpleQuery(); } _ctx.stop = _input.LT(-1); - setState(41); + setState(47); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,1,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -259,9 +262,9 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new BooleanQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(36); + setState(42); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(37); + setState(43); ((BooleanQueryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==AND || _la==OR) ) { @@ -272,12 +275,12 @@ private QueryContext query(int _p) throws RecognitionException { _errHandler.reportMatch(this); consume(); } - setState(38); + setState(44); query(2); } } } - setState(43); + setState(49); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,1,_ctx); } @@ -343,62 +346,62 @@ public final SimpleQueryContext simpleQuery() throws RecognitionException { SimpleQueryContext _localctx = new SimpleQueryContext(_ctx, getState()); enterRule(_localctx, 4, RULE_simpleQuery); try { - setState(52); + setState(58); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(44); + setState(50); notQuery(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(45); + setState(51); nestedQuery(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(46); + setState(52); parenthesizedQuery(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(47); + setState(53); matchAllQuery(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(48); + setState(54); existsQuery(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(49); + setState(55); rangeQuery(); } break; case 7: enterOuterAlt(_localctx, 7); { - setState(50); + setState(56); fieldQuery(); } break; case 8: enterOuterAlt(_localctx, 8); { - setState(51); + setState(57); fieldLessQuery(); } break; @@ -447,9 +450,9 @@ public final NotQueryContext notQuery() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(54); + setState(60); match(NOT); - setState(55); + setState(61); ((NotQueryContext)_localctx).subQuery = simpleQuery(); } } @@ -471,8 +474,8 @@ public FieldNameContext fieldName() { } public TerminalNode COLON() { return getToken(KqlBaseParser.COLON, 0); } public TerminalNode LEFT_CURLY_BRACKET() { return getToken(KqlBaseParser.LEFT_CURLY_BRACKET, 0); } - public QueryContext query() { - return getRuleContext(QueryContext.class,0); + public NestedSubQueryContext nestedSubQuery() { + return getRuleContext(NestedSubQueryContext.class,0); } public TerminalNode RIGHT_CURLY_BRACKET() { return getToken(KqlBaseParser.RIGHT_CURLY_BRACKET, 0); } public NestedQueryContext(ParserRuleContext parent, int invokingState) { @@ -500,15 +503,15 @@ public final NestedQueryContext nestedQuery() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(57); + setState(63); fieldName(); - setState(58); + setState(64); match(COLON); - setState(59); + setState(65); match(LEFT_CURLY_BRACKET); - setState(60); - query(0); - setState(61); + setState(66); + nestedSubQuery(0); + setState(67); match(RIGHT_CURLY_BRACKET); } } @@ -523,6 +526,288 @@ public final NestedQueryContext nestedQuery() throws RecognitionException { return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class NestedSubQueryContext extends ParserRuleContext { + public NestedSubQueryContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_nestedSubQuery; } + + public NestedSubQueryContext() { } + public void copyFrom(NestedSubQueryContext ctx) { + super.copyFrom(ctx); + } + } + @SuppressWarnings("CheckReturnValue") + public static class BooleanNestedQueryContext extends NestedSubQueryContext { + public Token operator; + public List nestedSubQuery() { + return getRuleContexts(NestedSubQueryContext.class); + } + public NestedSubQueryContext nestedSubQuery(int i) { + return getRuleContext(NestedSubQueryContext.class,i); + } + public TerminalNode AND() { return getToken(KqlBaseParser.AND, 0); } + public TerminalNode OR() { return getToken(KqlBaseParser.OR, 0); } + public BooleanNestedQueryContext(NestedSubQueryContext ctx) { copyFrom(ctx); } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterBooleanNestedQuery(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitBooleanNestedQuery(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitBooleanNestedQuery(this); + else return visitor.visitChildren(this); + } + } + @SuppressWarnings("CheckReturnValue") + public static class DefaultNestedQueryContext extends NestedSubQueryContext { + public NestedSimpleSubQueryContext nestedSimpleSubQuery() { + return getRuleContext(NestedSimpleSubQueryContext.class,0); + } + public DefaultNestedQueryContext(NestedSubQueryContext ctx) { copyFrom(ctx); } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterDefaultNestedQuery(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitDefaultNestedQuery(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitDefaultNestedQuery(this); + else return visitor.visitChildren(this); + } + } + + public final NestedSubQueryContext nestedSubQuery() throws RecognitionException { + return nestedSubQuery(0); + } + + private NestedSubQueryContext nestedSubQuery(int _p) throws RecognitionException { + ParserRuleContext _parentctx = _ctx; + int _parentState = getState(); + NestedSubQueryContext _localctx = new NestedSubQueryContext(_ctx, _parentState); + NestedSubQueryContext _prevctx = _localctx; + int _startState = 10; + enterRecursionRule(_localctx, 10, RULE_nestedSubQuery, _p); + int _la; + try { + int _alt; + enterOuterAlt(_localctx, 1); + { + { + _localctx = new DefaultNestedQueryContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + + setState(70); + nestedSimpleSubQuery(); + } + _ctx.stop = _input.LT(-1); + setState(77); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,3,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + if ( _parseListeners!=null ) triggerExitRuleEvent(); + _prevctx = _localctx; + { + { + _localctx = new BooleanNestedQueryContext(new NestedSubQueryContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_nestedSubQuery); + setState(72); + if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); + setState(73); + ((BooleanNestedQueryContext)_localctx).operator = _input.LT(1); + _la = _input.LA(1); + if ( !(_la==AND || _la==OR) ) { + ((BooleanNestedQueryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } + setState(74); + nestedSubQuery(2); + } + } + } + setState(79); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,3,_ctx); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + unrollRecursionContexts(_parentctx); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class NestedSimpleSubQueryContext extends ParserRuleContext { + public NotQueryContext notQuery() { + return getRuleContext(NotQueryContext.class,0); + } + public NestedQueryContext nestedQuery() { + return getRuleContext(NestedQueryContext.class,0); + } + public NestedParenthesizedQueryContext nestedParenthesizedQuery() { + return getRuleContext(NestedParenthesizedQueryContext.class,0); + } + public ExistsQueryContext existsQuery() { + return getRuleContext(ExistsQueryContext.class,0); + } + public RangeQueryContext rangeQuery() { + return getRuleContext(RangeQueryContext.class,0); + } + public FieldQueryContext fieldQuery() { + return getRuleContext(FieldQueryContext.class,0); + } + public NestedSimpleSubQueryContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_nestedSimpleSubQuery; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterNestedSimpleSubQuery(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitNestedSimpleSubQuery(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitNestedSimpleSubQuery(this); + else return visitor.visitChildren(this); + } + } + + public final NestedSimpleSubQueryContext nestedSimpleSubQuery() throws RecognitionException { + NestedSimpleSubQueryContext _localctx = new NestedSimpleSubQueryContext(_ctx, getState()); + enterRule(_localctx, 12, RULE_nestedSimpleSubQuery); + try { + setState(86); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,4,_ctx) ) { + case 1: + enterOuterAlt(_localctx, 1); + { + setState(80); + notQuery(); + } + break; + case 2: + enterOuterAlt(_localctx, 2); + { + setState(81); + nestedQuery(); + } + break; + case 3: + enterOuterAlt(_localctx, 3); + { + setState(82); + nestedParenthesizedQuery(); + } + break; + case 4: + enterOuterAlt(_localctx, 4); + { + setState(83); + existsQuery(); + } + break; + case 5: + enterOuterAlt(_localctx, 5); + { + setState(84); + rangeQuery(); + } + break; + case 6: + enterOuterAlt(_localctx, 6); + { + setState(85); + fieldQuery(); + } + break; + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class NestedParenthesizedQueryContext extends ParserRuleContext { + public TerminalNode LEFT_PARENTHESIS() { return getToken(KqlBaseParser.LEFT_PARENTHESIS, 0); } + public NestedSubQueryContext nestedSubQuery() { + return getRuleContext(NestedSubQueryContext.class,0); + } + public TerminalNode RIGHT_PARENTHESIS() { return getToken(KqlBaseParser.RIGHT_PARENTHESIS, 0); } + public NestedParenthesizedQueryContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_nestedParenthesizedQuery; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterNestedParenthesizedQuery(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitNestedParenthesizedQuery(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitNestedParenthesizedQuery(this); + else return visitor.visitChildren(this); + } + } + + public final NestedParenthesizedQueryContext nestedParenthesizedQuery() throws RecognitionException { + NestedParenthesizedQueryContext _localctx = new NestedParenthesizedQueryContext(_ctx, getState()); + enterRule(_localctx, 14, RULE_nestedParenthesizedQuery); + try { + enterOuterAlt(_localctx, 1); + { + setState(88); + match(LEFT_PARENTHESIS); + setState(89); + nestedSubQuery(0); + setState(90); + match(RIGHT_PARENTHESIS); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + @SuppressWarnings("CheckReturnValue") public static class MatchAllQueryContext extends ParserRuleContext { public List WILDCARD() { return getTokens(KqlBaseParser.WILDCARD); } @@ -551,23 +836,23 @@ public T accept(ParseTreeVisitor visitor) { public final MatchAllQueryContext matchAllQuery() throws RecognitionException { MatchAllQueryContext _localctx = new MatchAllQueryContext(_ctx, getState()); - enterRule(_localctx, 10, RULE_matchAllQuery); + enterRule(_localctx, 16, RULE_matchAllQuery); try { enterOuterAlt(_localctx, 1); { - setState(65); + setState(94); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,3,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) { case 1: { - setState(63); + setState(92); match(WILDCARD); - setState(64); + setState(93); match(COLON); } break; } - setState(67); + setState(96); match(WILDCARD); } } @@ -610,15 +895,15 @@ public T accept(ParseTreeVisitor visitor) { public final ParenthesizedQueryContext parenthesizedQuery() throws RecognitionException { ParenthesizedQueryContext _localctx = new ParenthesizedQueryContext(_ctx, getState()); - enterRule(_localctx, 12, RULE_parenthesizedQuery); + enterRule(_localctx, 18, RULE_parenthesizedQuery); try { enterOuterAlt(_localctx, 1); { - setState(69); + setState(98); match(LEFT_PARENTHESIS); - setState(70); + setState(99); query(0); - setState(71); + setState(100); match(RIGHT_PARENTHESIS); } } @@ -667,14 +952,14 @@ public T accept(ParseTreeVisitor visitor) { public final RangeQueryContext rangeQuery() throws RecognitionException { RangeQueryContext _localctx = new RangeQueryContext(_ctx, getState()); - enterRule(_localctx, 14, RULE_rangeQuery); + enterRule(_localctx, 20, RULE_rangeQuery); int _la; try { enterOuterAlt(_localctx, 1); { - setState(73); + setState(102); fieldName(); - setState(74); + setState(103); ((RangeQueryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 960L) != 0)) ) { @@ -685,7 +970,7 @@ public final RangeQueryContext rangeQuery() throws RecognitionException { _errHandler.reportMatch(this); consume(); } - setState(75); + setState(104); rangeQueryValue(); } } @@ -732,18 +1017,18 @@ public T accept(ParseTreeVisitor visitor) { public final RangeQueryValueContext rangeQueryValue() throws RecognitionException { RangeQueryValueContext _localctx = new RangeQueryValueContext(_ctx, getState()); - enterRule(_localctx, 16, RULE_rangeQueryValue); + enterRule(_localctx, 22, RULE_rangeQueryValue); int _la; try { int _alt; - setState(83); + setState(112); _errHandler.sync(this); switch (_input.LA(1)) { case UNQUOTED_LITERAL: case WILDCARD: enterOuterAlt(_localctx, 1); { - setState(78); + setState(107); _errHandler.sync(this); _alt = 1; do { @@ -751,7 +1036,7 @@ public final RangeQueryValueContext rangeQueryValue() throws RecognitionExceptio case 1: { { - setState(77); + setState(106); _la = _input.LA(1); if ( !(_la==UNQUOTED_LITERAL || _la==WILDCARD) ) { _errHandler.recoverInline(this); @@ -767,16 +1052,16 @@ public final RangeQueryValueContext rangeQueryValue() throws RecognitionExceptio default: throw new NoViableAltException(this); } - setState(80); + setState(109); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,4,_ctx); + _alt = getInterpreter().adaptivePredict(_input,6,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); } break; case QUOTED_STRING: enterOuterAlt(_localctx, 2); { - setState(82); + setState(111); match(QUOTED_STRING); } break; @@ -823,15 +1108,15 @@ public T accept(ParseTreeVisitor visitor) { public final ExistsQueryContext existsQuery() throws RecognitionException { ExistsQueryContext _localctx = new ExistsQueryContext(_ctx, getState()); - enterRule(_localctx, 18, RULE_existsQuery); + enterRule(_localctx, 24, RULE_existsQuery); try { enterOuterAlt(_localctx, 1); { - setState(85); + setState(114); fieldName(); - setState(86); + setState(115); match(COLON); - setState(87); + setState(116); match(WILDCARD); } } @@ -878,34 +1163,34 @@ public T accept(ParseTreeVisitor visitor) { public final FieldQueryContext fieldQuery() throws RecognitionException { FieldQueryContext _localctx = new FieldQueryContext(_ctx, getState()); - enterRule(_localctx, 20, RULE_fieldQuery); + enterRule(_localctx, 26, RULE_fieldQuery); try { - setState(99); + setState(128); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,8,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(89); + setState(118); fieldName(); - setState(90); + setState(119); match(COLON); - setState(91); + setState(120); fieldQueryValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(93); + setState(122); fieldName(); - setState(94); + setState(123); match(COLON); - setState(95); + setState(124); match(LEFT_PARENTHESIS); - setState(96); + setState(125); fieldQueryValue(); - setState(97); + setState(126); match(RIGHT_PARENTHESIS); } break; @@ -950,9 +1235,9 @@ public T accept(ParseTreeVisitor visitor) { public final FieldLessQueryContext fieldLessQuery() throws RecognitionException { FieldLessQueryContext _localctx = new FieldLessQueryContext(_ctx, getState()); - enterRule(_localctx, 22, RULE_fieldLessQuery); + enterRule(_localctx, 28, RULE_fieldLessQuery); try { - setState(106); + setState(135); _errHandler.sync(this); switch (_input.LA(1)) { case AND: @@ -963,18 +1248,18 @@ public final FieldLessQueryContext fieldLessQuery() throws RecognitionException case WILDCARD: enterOuterAlt(_localctx, 1); { - setState(101); + setState(130); fieldQueryValue(); } break; case LEFT_PARENTHESIS: enterOuterAlt(_localctx, 2); { - setState(102); + setState(131); match(LEFT_PARENTHESIS); - setState(103); + setState(132); fieldQueryValue(); - setState(104); + setState(133); match(RIGHT_PARENTHESIS); } break; @@ -1037,22 +1322,22 @@ public T accept(ParseTreeVisitor visitor) { public final FieldQueryValueContext fieldQueryValue() throws RecognitionException { FieldQueryValueContext _localctx = new FieldQueryValueContext(_ctx, getState()); - enterRule(_localctx, 24, RULE_fieldQueryValue); + enterRule(_localctx, 30, RULE_fieldQueryValue); int _la; try { int _alt; - setState(128); + setState(157); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,15,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(109); + setState(138); _errHandler.sync(this); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 28L) != 0)) { { - setState(108); + setState(137); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 28L) != 0)) ) { _errHandler.recoverInline(this); @@ -1065,7 +1350,7 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio } } - setState(112); + setState(141); _errHandler.sync(this); _alt = 1; do { @@ -1073,7 +1358,7 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio case 1: { { - setState(111); + setState(140); _la = _input.LA(1); if ( !(_la==UNQUOTED_LITERAL || _la==WILDCARD) ) { _errHandler.recoverInline(this); @@ -1089,16 +1374,16 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio default: throw new NoViableAltException(this); } - setState(114); + setState(143); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,9,_ctx); + _alt = getInterpreter().adaptivePredict(_input,11,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); - setState(117); + setState(146); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,10,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: { - setState(116); + setState(145); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 28L) != 0)) ) { _errHandler.recoverInline(this); @@ -1116,7 +1401,7 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio case 2: enterOuterAlt(_localctx, 2); { - setState(119); + setState(148); _la = _input.LA(1); if ( !(_la==AND || _la==OR) ) { _errHandler.recoverInline(this); @@ -1126,12 +1411,12 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio _errHandler.reportMatch(this); consume(); } - setState(121); + setState(150); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: { - setState(120); + setState(149); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 28L) != 0)) ) { _errHandler.recoverInline(this); @@ -1149,14 +1434,14 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio case 3: enterOuterAlt(_localctx, 3); { - setState(123); + setState(152); match(NOT); - setState(125); + setState(154); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: { - setState(124); + setState(153); _la = _input.LA(1); if ( !(_la==AND || _la==OR) ) { _errHandler.recoverInline(this); @@ -1174,7 +1459,7 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio case 4: enterOuterAlt(_localctx, 4); { - setState(127); + setState(156); match(QUOTED_STRING); } break; @@ -1218,29 +1503,29 @@ public T accept(ParseTreeVisitor visitor) { public final FieldNameContext fieldName() throws RecognitionException { FieldNameContext _localctx = new FieldNameContext(_ctx, getState()); - enterRule(_localctx, 26, RULE_fieldName); + enterRule(_localctx, 32, RULE_fieldName); try { - setState(133); + setState(162); _errHandler.sync(this); switch (_input.LA(1)) { case UNQUOTED_LITERAL: enterOuterAlt(_localctx, 1); { - setState(130); + setState(159); ((FieldNameContext)_localctx).value = match(UNQUOTED_LITERAL); } break; case QUOTED_STRING: enterOuterAlt(_localctx, 2); { - setState(131); + setState(160); ((FieldNameContext)_localctx).value = match(QUOTED_STRING); } break; case WILDCARD: enterOuterAlt(_localctx, 3); { - setState(132); + setState(161); ((FieldNameContext)_localctx).value = match(WILDCARD); } break; @@ -1263,6 +1548,8 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { case 1: return query_sempred((QueryContext)_localctx, predIndex); + case 5: + return nestedSubQuery_sempred((NestedSubQueryContext)_localctx, predIndex); } return true; } @@ -1273,87 +1560,117 @@ private boolean query_sempred(QueryContext _localctx, int predIndex) { } return true; } + private boolean nestedSubQuery_sempred(NestedSubQueryContext _localctx, int predIndex) { + switch (predIndex) { + case 1: + return precpred(_ctx, 2); + } + return true; + } public static final String _serializedATN = - "\u0004\u0001\u0010\u0088\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0004\u0001\u0010\u00a5\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ - "\u0002\f\u0007\f\u0002\r\u0007\r\u0001\u0000\u0003\u0000\u001e\b\u0000"+ - "\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0005\u0001(\b\u0001\n\u0001\f\u0001+\t\u0001"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0003\u00025\b\u0002\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0004\u0001\u0005\u0001\u0005\u0003\u0005B\b\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0004\bO\b\b\u000b\b\f\b"+ - "P\u0001\b\u0003\bT\b\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n"+ - "\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0003"+ - "\nd\b\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0003"+ - "\u000bk\b\u000b\u0001\f\u0003\fn\b\f\u0001\f\u0004\fq\b\f\u000b\f\f\f"+ - "r\u0001\f\u0003\fv\b\f\u0001\f\u0001\f\u0003\fz\b\f\u0001\f\u0001\f\u0003"+ - "\f~\b\f\u0001\f\u0003\f\u0081\b\f\u0001\r\u0001\r\u0001\r\u0003\r\u0086"+ - "\b\r\u0001\r\u0000\u0001\u0002\u000e\u0000\u0002\u0004\u0006\b\n\f\u000e"+ - "\u0010\u0012\u0014\u0016\u0018\u001a\u0000\u0004\u0001\u0000\u0002\u0003"+ - "\u0001\u0000\u0006\t\u0002\u0000\u000e\u000e\u0010\u0010\u0001\u0000\u0002"+ - "\u0004\u0091\u0000\u001d\u0001\u0000\u0000\u0000\u0002!\u0001\u0000\u0000"+ - "\u0000\u00044\u0001\u0000\u0000\u0000\u00066\u0001\u0000\u0000\u0000\b"+ - "9\u0001\u0000\u0000\u0000\nA\u0001\u0000\u0000\u0000\fE\u0001\u0000\u0000"+ - "\u0000\u000eI\u0001\u0000\u0000\u0000\u0010S\u0001\u0000\u0000\u0000\u0012"+ - "U\u0001\u0000\u0000\u0000\u0014c\u0001\u0000\u0000\u0000\u0016j\u0001"+ - "\u0000\u0000\u0000\u0018\u0080\u0001\u0000\u0000\u0000\u001a\u0085\u0001"+ - "\u0000\u0000\u0000\u001c\u001e\u0003\u0002\u0001\u0000\u001d\u001c\u0001"+ - "\u0000\u0000\u0000\u001d\u001e\u0001\u0000\u0000\u0000\u001e\u001f\u0001"+ - "\u0000\u0000\u0000\u001f \u0005\u0000\u0000\u0001 \u0001\u0001\u0000\u0000"+ - "\u0000!\"\u0006\u0001\uffff\uffff\u0000\"#\u0003\u0004\u0002\u0000#)\u0001"+ - "\u0000\u0000\u0000$%\n\u0002\u0000\u0000%&\u0007\u0000\u0000\u0000&(\u0003"+ - "\u0002\u0001\u0002\'$\u0001\u0000\u0000\u0000(+\u0001\u0000\u0000\u0000"+ - ")\'\u0001\u0000\u0000\u0000)*\u0001\u0000\u0000\u0000*\u0003\u0001\u0000"+ - "\u0000\u0000+)\u0001\u0000\u0000\u0000,5\u0003\u0006\u0003\u0000-5\u0003"+ - "\b\u0004\u0000.5\u0003\f\u0006\u0000/5\u0003\n\u0005\u000005\u0003\u0012"+ - "\t\u000015\u0003\u000e\u0007\u000025\u0003\u0014\n\u000035\u0003\u0016"+ - "\u000b\u00004,\u0001\u0000\u0000\u00004-\u0001\u0000\u0000\u00004.\u0001"+ - "\u0000\u0000\u00004/\u0001\u0000\u0000\u000040\u0001\u0000\u0000\u0000"+ - "41\u0001\u0000\u0000\u000042\u0001\u0000\u0000\u000043\u0001\u0000\u0000"+ - "\u00005\u0005\u0001\u0000\u0000\u000067\u0005\u0004\u0000\u000078\u0003"+ - "\u0004\u0002\u00008\u0007\u0001\u0000\u0000\u00009:\u0003\u001a\r\u0000"+ - ":;\u0005\u0005\u0000\u0000;<\u0005\f\u0000\u0000<=\u0003\u0002\u0001\u0000"+ - "=>\u0005\r\u0000\u0000>\t\u0001\u0000\u0000\u0000?@\u0005\u0010\u0000"+ - "\u0000@B\u0005\u0005\u0000\u0000A?\u0001\u0000\u0000\u0000AB\u0001\u0000"+ - "\u0000\u0000BC\u0001\u0000\u0000\u0000CD\u0005\u0010\u0000\u0000D\u000b"+ - "\u0001\u0000\u0000\u0000EF\u0005\n\u0000\u0000FG\u0003\u0002\u0001\u0000"+ - "GH\u0005\u000b\u0000\u0000H\r\u0001\u0000\u0000\u0000IJ\u0003\u001a\r"+ - "\u0000JK\u0007\u0001\u0000\u0000KL\u0003\u0010\b\u0000L\u000f\u0001\u0000"+ - "\u0000\u0000MO\u0007\u0002\u0000\u0000NM\u0001\u0000\u0000\u0000OP\u0001"+ - "\u0000\u0000\u0000PN\u0001\u0000\u0000\u0000PQ\u0001\u0000\u0000\u0000"+ - "QT\u0001\u0000\u0000\u0000RT\u0005\u000f\u0000\u0000SN\u0001\u0000\u0000"+ - "\u0000SR\u0001\u0000\u0000\u0000T\u0011\u0001\u0000\u0000\u0000UV\u0003"+ - "\u001a\r\u0000VW\u0005\u0005\u0000\u0000WX\u0005\u0010\u0000\u0000X\u0013"+ - "\u0001\u0000\u0000\u0000YZ\u0003\u001a\r\u0000Z[\u0005\u0005\u0000\u0000"+ - "[\\\u0003\u0018\f\u0000\\d\u0001\u0000\u0000\u0000]^\u0003\u001a\r\u0000"+ - "^_\u0005\u0005\u0000\u0000_`\u0005\n\u0000\u0000`a\u0003\u0018\f\u0000"+ - "ab\u0005\u000b\u0000\u0000bd\u0001\u0000\u0000\u0000cY\u0001\u0000\u0000"+ - "\u0000c]\u0001\u0000\u0000\u0000d\u0015\u0001\u0000\u0000\u0000ek\u0003"+ - "\u0018\f\u0000fg\u0005\n\u0000\u0000gh\u0003\u0018\f\u0000hi\u0005\u000b"+ - "\u0000\u0000ik\u0001\u0000\u0000\u0000je\u0001\u0000\u0000\u0000jf\u0001"+ - "\u0000\u0000\u0000k\u0017\u0001\u0000\u0000\u0000ln\u0007\u0003\u0000"+ - "\u0000ml\u0001\u0000\u0000\u0000mn\u0001\u0000\u0000\u0000np\u0001\u0000"+ - "\u0000\u0000oq\u0007\u0002\u0000\u0000po\u0001\u0000\u0000\u0000qr\u0001"+ - "\u0000\u0000\u0000rp\u0001\u0000\u0000\u0000rs\u0001\u0000\u0000\u0000"+ - "su\u0001\u0000\u0000\u0000tv\u0007\u0003\u0000\u0000ut\u0001\u0000\u0000"+ - "\u0000uv\u0001\u0000\u0000\u0000v\u0081\u0001\u0000\u0000\u0000wy\u0007"+ - "\u0000\u0000\u0000xz\u0007\u0003\u0000\u0000yx\u0001\u0000\u0000\u0000"+ - "yz\u0001\u0000\u0000\u0000z\u0081\u0001\u0000\u0000\u0000{}\u0005\u0004"+ - "\u0000\u0000|~\u0007\u0000\u0000\u0000}|\u0001\u0000\u0000\u0000}~\u0001"+ - "\u0000\u0000\u0000~\u0081\u0001\u0000\u0000\u0000\u007f\u0081\u0005\u000f"+ - "\u0000\u0000\u0080m\u0001\u0000\u0000\u0000\u0080w\u0001\u0000\u0000\u0000"+ - "\u0080{\u0001\u0000\u0000\u0000\u0080\u007f\u0001\u0000\u0000\u0000\u0081"+ - "\u0019\u0001\u0000\u0000\u0000\u0082\u0086\u0005\u000e\u0000\u0000\u0083"+ - "\u0086\u0005\u000f\u0000\u0000\u0084\u0086\u0005\u0010\u0000\u0000\u0085"+ - "\u0082\u0001\u0000\u0000\u0000\u0085\u0083\u0001\u0000\u0000\u0000\u0085"+ - "\u0084\u0001\u0000\u0000\u0000\u0086\u001b\u0001\u0000\u0000\u0000\u000f"+ - "\u001d)4APScjmruy}\u0080\u0085"; + "\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007"+ + "\u000f\u0002\u0010\u0007\u0010\u0001\u0000\u0003\u0000$\b\u0000\u0001"+ + "\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0005\u0001.\b\u0001\n\u0001\f\u00011\t\u0001\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0003\u0002;\b\u0002\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ + "\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0005\u0005L\b\u0005\n\u0005\f\u0005O\t\u0005\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006W\b"+ + "\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b"+ + "\u0003\b_\b\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n"+ + "\u0001\n\u0001\n\u0001\n\u0001\u000b\u0004\u000bl\b\u000b\u000b\u000b"+ + "\f\u000bm\u0001\u000b\u0003\u000bq\b\u000b\u0001\f\u0001\f\u0001\f\u0001"+ + "\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\r\u0001\r\u0003\r\u0081\b\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0001\u000e\u0003\u000e\u0088\b\u000e\u0001\u000f\u0003\u000f\u008b"+ + "\b\u000f\u0001\u000f\u0004\u000f\u008e\b\u000f\u000b\u000f\f\u000f\u008f"+ + "\u0001\u000f\u0003\u000f\u0093\b\u000f\u0001\u000f\u0001\u000f\u0003\u000f"+ + "\u0097\b\u000f\u0001\u000f\u0001\u000f\u0003\u000f\u009b\b\u000f\u0001"+ + "\u000f\u0003\u000f\u009e\b\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0003"+ + "\u0010\u00a3\b\u0010\u0001\u0010\u0000\u0002\u0002\n\u0011\u0000\u0002"+ + "\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c\u001e"+ + " \u0000\u0004\u0001\u0000\u0002\u0003\u0001\u0000\u0006\t\u0002\u0000"+ + "\u000e\u000e\u0010\u0010\u0001\u0000\u0002\u0004\u00b1\u0000#\u0001\u0000"+ + "\u0000\u0000\u0002\'\u0001\u0000\u0000\u0000\u0004:\u0001\u0000\u0000"+ + "\u0000\u0006<\u0001\u0000\u0000\u0000\b?\u0001\u0000\u0000\u0000\nE\u0001"+ + "\u0000\u0000\u0000\fV\u0001\u0000\u0000\u0000\u000eX\u0001\u0000\u0000"+ + "\u0000\u0010^\u0001\u0000\u0000\u0000\u0012b\u0001\u0000\u0000\u0000\u0014"+ + "f\u0001\u0000\u0000\u0000\u0016p\u0001\u0000\u0000\u0000\u0018r\u0001"+ + "\u0000\u0000\u0000\u001a\u0080\u0001\u0000\u0000\u0000\u001c\u0087\u0001"+ + "\u0000\u0000\u0000\u001e\u009d\u0001\u0000\u0000\u0000 \u00a2\u0001\u0000"+ + "\u0000\u0000\"$\u0003\u0002\u0001\u0000#\"\u0001\u0000\u0000\u0000#$\u0001"+ + "\u0000\u0000\u0000$%\u0001\u0000\u0000\u0000%&\u0005\u0000\u0000\u0001"+ + "&\u0001\u0001\u0000\u0000\u0000\'(\u0006\u0001\uffff\uffff\u0000()\u0003"+ + "\u0004\u0002\u0000)/\u0001\u0000\u0000\u0000*+\n\u0002\u0000\u0000+,\u0007"+ + "\u0000\u0000\u0000,.\u0003\u0002\u0001\u0002-*\u0001\u0000\u0000\u0000"+ + ".1\u0001\u0000\u0000\u0000/-\u0001\u0000\u0000\u0000/0\u0001\u0000\u0000"+ + "\u00000\u0003\u0001\u0000\u0000\u00001/\u0001\u0000\u0000\u00002;\u0003"+ + "\u0006\u0003\u00003;\u0003\b\u0004\u00004;\u0003\u0012\t\u00005;\u0003"+ + "\u0010\b\u00006;\u0003\u0018\f\u00007;\u0003\u0014\n\u00008;\u0003\u001a"+ + "\r\u00009;\u0003\u001c\u000e\u0000:2\u0001\u0000\u0000\u0000:3\u0001\u0000"+ + "\u0000\u0000:4\u0001\u0000\u0000\u0000:5\u0001\u0000\u0000\u0000:6\u0001"+ + "\u0000\u0000\u0000:7\u0001\u0000\u0000\u0000:8\u0001\u0000\u0000\u0000"+ + ":9\u0001\u0000\u0000\u0000;\u0005\u0001\u0000\u0000\u0000<=\u0005\u0004"+ + "\u0000\u0000=>\u0003\u0004\u0002\u0000>\u0007\u0001\u0000\u0000\u0000"+ + "?@\u0003 \u0010\u0000@A\u0005\u0005\u0000\u0000AB\u0005\f\u0000\u0000"+ + "BC\u0003\n\u0005\u0000CD\u0005\r\u0000\u0000D\t\u0001\u0000\u0000\u0000"+ + "EF\u0006\u0005\uffff\uffff\u0000FG\u0003\f\u0006\u0000GM\u0001\u0000\u0000"+ + "\u0000HI\n\u0002\u0000\u0000IJ\u0007\u0000\u0000\u0000JL\u0003\n\u0005"+ + "\u0002KH\u0001\u0000\u0000\u0000LO\u0001\u0000\u0000\u0000MK\u0001\u0000"+ + "\u0000\u0000MN\u0001\u0000\u0000\u0000N\u000b\u0001\u0000\u0000\u0000"+ + "OM\u0001\u0000\u0000\u0000PW\u0003\u0006\u0003\u0000QW\u0003\b\u0004\u0000"+ + "RW\u0003\u000e\u0007\u0000SW\u0003\u0018\f\u0000TW\u0003\u0014\n\u0000"+ + "UW\u0003\u001a\r\u0000VP\u0001\u0000\u0000\u0000VQ\u0001\u0000\u0000\u0000"+ + "VR\u0001\u0000\u0000\u0000VS\u0001\u0000\u0000\u0000VT\u0001\u0000\u0000"+ + "\u0000VU\u0001\u0000\u0000\u0000W\r\u0001\u0000\u0000\u0000XY\u0005\n"+ + "\u0000\u0000YZ\u0003\n\u0005\u0000Z[\u0005\u000b\u0000\u0000[\u000f\u0001"+ + "\u0000\u0000\u0000\\]\u0005\u0010\u0000\u0000]_\u0005\u0005\u0000\u0000"+ + "^\\\u0001\u0000\u0000\u0000^_\u0001\u0000\u0000\u0000_`\u0001\u0000\u0000"+ + "\u0000`a\u0005\u0010\u0000\u0000a\u0011\u0001\u0000\u0000\u0000bc\u0005"+ + "\n\u0000\u0000cd\u0003\u0002\u0001\u0000de\u0005\u000b\u0000\u0000e\u0013"+ + "\u0001\u0000\u0000\u0000fg\u0003 \u0010\u0000gh\u0007\u0001\u0000\u0000"+ + "hi\u0003\u0016\u000b\u0000i\u0015\u0001\u0000\u0000\u0000jl\u0007\u0002"+ + "\u0000\u0000kj\u0001\u0000\u0000\u0000lm\u0001\u0000\u0000\u0000mk\u0001"+ + "\u0000\u0000\u0000mn\u0001\u0000\u0000\u0000nq\u0001\u0000\u0000\u0000"+ + "oq\u0005\u000f\u0000\u0000pk\u0001\u0000\u0000\u0000po\u0001\u0000\u0000"+ + "\u0000q\u0017\u0001\u0000\u0000\u0000rs\u0003 \u0010\u0000st\u0005\u0005"+ + "\u0000\u0000tu\u0005\u0010\u0000\u0000u\u0019\u0001\u0000\u0000\u0000"+ + "vw\u0003 \u0010\u0000wx\u0005\u0005\u0000\u0000xy\u0003\u001e\u000f\u0000"+ + "y\u0081\u0001\u0000\u0000\u0000z{\u0003 \u0010\u0000{|\u0005\u0005\u0000"+ + "\u0000|}\u0005\n\u0000\u0000}~\u0003\u001e\u000f\u0000~\u007f\u0005\u000b"+ + "\u0000\u0000\u007f\u0081\u0001\u0000\u0000\u0000\u0080v\u0001\u0000\u0000"+ + "\u0000\u0080z\u0001\u0000\u0000\u0000\u0081\u001b\u0001\u0000\u0000\u0000"+ + "\u0082\u0088\u0003\u001e\u000f\u0000\u0083\u0084\u0005\n\u0000\u0000\u0084"+ + "\u0085\u0003\u001e\u000f\u0000\u0085\u0086\u0005\u000b\u0000\u0000\u0086"+ + "\u0088\u0001\u0000\u0000\u0000\u0087\u0082\u0001\u0000\u0000\u0000\u0087"+ + "\u0083\u0001\u0000\u0000\u0000\u0088\u001d\u0001\u0000\u0000\u0000\u0089"+ + "\u008b\u0007\u0003\u0000\u0000\u008a\u0089\u0001\u0000\u0000\u0000\u008a"+ + "\u008b\u0001\u0000\u0000\u0000\u008b\u008d\u0001\u0000\u0000\u0000\u008c"+ + "\u008e\u0007\u0002\u0000\u0000\u008d\u008c\u0001\u0000\u0000\u0000\u008e"+ + "\u008f\u0001\u0000\u0000\u0000\u008f\u008d\u0001\u0000\u0000\u0000\u008f"+ + "\u0090\u0001\u0000\u0000\u0000\u0090\u0092\u0001\u0000\u0000\u0000\u0091"+ + "\u0093\u0007\u0003\u0000\u0000\u0092\u0091\u0001\u0000\u0000\u0000\u0092"+ + "\u0093\u0001\u0000\u0000\u0000\u0093\u009e\u0001\u0000\u0000\u0000\u0094"+ + "\u0096\u0007\u0000\u0000\u0000\u0095\u0097\u0007\u0003\u0000\u0000\u0096"+ + "\u0095\u0001\u0000\u0000\u0000\u0096\u0097\u0001\u0000\u0000\u0000\u0097"+ + "\u009e\u0001\u0000\u0000\u0000\u0098\u009a\u0005\u0004\u0000\u0000\u0099"+ + "\u009b\u0007\u0000\u0000\u0000\u009a\u0099\u0001\u0000\u0000\u0000\u009a"+ + "\u009b\u0001\u0000\u0000\u0000\u009b\u009e\u0001\u0000\u0000\u0000\u009c"+ + "\u009e\u0005\u000f\u0000\u0000\u009d\u008a\u0001\u0000\u0000\u0000\u009d"+ + "\u0094\u0001\u0000\u0000\u0000\u009d\u0098\u0001\u0000\u0000\u0000\u009d"+ + "\u009c\u0001\u0000\u0000\u0000\u009e\u001f\u0001\u0000\u0000\u0000\u009f"+ + "\u00a3\u0005\u000e\u0000\u0000\u00a0\u00a3\u0005\u000f\u0000\u0000\u00a1"+ + "\u00a3\u0005\u0010\u0000\u0000\u00a2\u009f\u0001\u0000\u0000\u0000\u00a2"+ + "\u00a0\u0001\u0000\u0000\u0000\u00a2\u00a1\u0001\u0000\u0000\u0000\u00a3"+ + "!\u0001\u0000\u0000\u0000\u0011#/:MV^mp\u0080\u0087\u008a\u008f\u0092"+ + "\u0096\u009a\u009d\u00a2"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java index 18ef8f389195b..8200bfe0da25d 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java @@ -56,6 +56,32 @@ interface KqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitNestedQuery(KqlBaseParser.NestedQueryContext ctx); + /** + * Visit a parse tree produced by the {@code booleanNestedQuery} + * labeled alternative in {@link KqlBaseParser#nestedSubQuery}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitBooleanNestedQuery(KqlBaseParser.BooleanNestedQueryContext ctx); + /** + * Visit a parse tree produced by the {@code defaultNestedQuery} + * labeled alternative in {@link KqlBaseParser#nestedSubQuery}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDefaultNestedQuery(KqlBaseParser.DefaultNestedQueryContext ctx); + /** + * Visit a parse tree produced by {@link KqlBaseParser#nestedSimpleSubQuery}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNestedSimpleSubQuery(KqlBaseParser.NestedSimpleSubQueryContext ctx); + /** + * Visit a parse tree produced by {@link KqlBaseParser#nestedParenthesizedQuery}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNestedParenthesizedQuery(KqlBaseParser.NestedParenthesizedQueryContext ctx); /** * Visit a parse tree produced by {@link KqlBaseParser#matchAllQuery}. * @param ctx the parse tree diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParsingContext.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParsingContext.java index 5f88080fb3ed4..30740833ee40e 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParsingContext.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParsingContext.java @@ -11,11 +11,18 @@ import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.NestedLookup; +import org.elasticsearch.index.mapper.NestedObjectMapper; import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.support.NestedScope; import java.time.ZoneId; import java.util.List; +import java.util.Map; import java.util.Set; +import java.util.function.Supplier; + +import static org.elasticsearch.common.Strings.format; public class KqlParsingContext { @@ -32,10 +39,11 @@ public static Builder builder(QueryRewriteContext queryRewriteContext) { return new Builder(queryRewriteContext); } - private QueryRewriteContext queryRewriteContext; + private final QueryRewriteContext queryRewriteContext; private final boolean caseInsensitive; private final ZoneId timeZone; private final String defaultField; + private final NestedScope nestedScope = new NestedScope(); public KqlParsingContext(QueryRewriteContext queryRewriteContext, boolean caseInsensitive, ZoneId timeZone, String defaultField) { this.queryRewriteContext = queryRewriteContext; @@ -56,9 +64,17 @@ public String defaultField() { return defaultField; } + public String nestedPath(String fieldName) { + return nestedLookup().getNestedParent(fieldName); + } + + public boolean isNestedField(String fieldName) { + return nestedMappers().containsKey(fullFieldName(fieldName)); + } + public Set resolveFieldNames(String fieldNamePattern) { assert fieldNamePattern != null && fieldNamePattern.isEmpty() == false : "fieldNamePattern cannot be null or empty"; - return queryRewriteContext.getMatchingFieldNames(fieldNamePattern); + return queryRewriteContext.getMatchingFieldNames(fullFieldName(fieldNamePattern)); } public Set resolveDefaultFieldNames() { @@ -89,6 +105,38 @@ public boolean isSearchableField(String fieldName) { return isSearchableField(fieldName, fieldType(fieldName)); } + public NestedScope nestedScope() { + return nestedScope; + } + + public T withNestedPath(String nestedFieldName, Supplier supplier) { + assert isNestedField(nestedFieldName); + nestedScope.nextLevel(nestedMappers().get(fullFieldName(nestedFieldName))); + T result = supplier.get(); + nestedScope.previousLevel(); + return result; + } + + public String currentNestedPath() { + return nestedScope().getObjectMapper() != null ? nestedScope().getObjectMapper().fullPath() : null; + } + + public String fullFieldName(String fieldName) { + if (nestedScope.getObjectMapper() == null) { + return fieldName; + } + + return format("%s.%s", nestedScope.getObjectMapper().fullPath(), fieldName); + } + + private NestedLookup nestedLookup() { + return queryRewriteContext.getMappingLookup().nestedLookup(); + } + + private Map nestedMappers() { + return nestedLookup().getNestedMappers(); + } + public static class Builder { private final QueryRewriteContext queryRewriteContext; private boolean caseInsensitive = true; diff --git a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/AbstractKqlParserTestCase.java b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/AbstractKqlParserTestCase.java index 588e60bd4dd75..e6e4e20cfd3ca 100644 --- a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/AbstractKqlParserTestCase.java +++ b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/AbstractKqlParserTestCase.java @@ -46,11 +46,9 @@ import static org.hamcrest.Matchers.equalTo; public abstract class AbstractKqlParserTestCase extends AbstractBuilderTestCase { - protected static final String SUPPORTED_QUERY_FILE_PATH = "/supported-queries"; protected static final String UNSUPPORTED_QUERY_FILE_PATH = "/unsupported-queries"; protected static final Predicate BOOLEAN_QUERY_FILTER = (q) -> q.matches("(?i)[^{]*[^\\\\]*(NOT|AND|OR)[^}]*"); - protected static final String NESTED_FIELD_NAME = "mapped_nested"; @Override diff --git a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlNestedFieldQueryTests.java b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlNestedFieldQueryTests.java new file mode 100644 index 0000000000000..5660945fa0db3 --- /dev/null +++ b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlNestedFieldQueryTests.java @@ -0,0 +1,297 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.kql.parser; + +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.query.NestedQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.RangeQueryBuilder; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.elasticsearch.common.Strings.format; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +public class KqlNestedFieldQueryTests extends AbstractKqlParserTestCase { + public void testInvalidNestedFieldName() { + for (String invalidFieldName : List.of(OBJECT_FIELD_NAME, TEXT_FIELD_NAME, "not_a_field", "mapped_nest*")) { + KqlParsingException e = assertThrows( + KqlParsingException.class, + () -> parseKqlQuery(format("%s : { %s: foo AND %s < 10 } ", invalidFieldName, TEXT_FIELD_NAME, INT_FIELD_NAME)) + ); + assertThat(e.getMessage(), Matchers.containsString(invalidFieldName)); + assertThat(e.getMessage(), Matchers.containsString("is not a valid nested field name")); + } + } + + public void testInlineNestedFieldMatchTextQuery() { + for (String fieldName : List.of(TEXT_FIELD_NAME, INT_FIELD_NAME)) { + { + // Querying a nested text subfield. + String nestedFieldName = format("%s.%s", NESTED_FIELD_NAME, fieldName); + String searchTerms = randomSearchTerms(); + String kqlQueryString = format("%s: %s", nestedFieldName, searchTerms); + + NestedQueryBuilder nestedQuery = asInstanceOf(NestedQueryBuilder.class, parseKqlQuery(kqlQueryString)); + + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + assertMatchQueryBuilder(nestedQuery.query(), nestedFieldName, searchTerms); + } + + { + // Several levels of nested fields. + String nestedFieldName = format("%s.%s.%s", NESTED_FIELD_NAME, NESTED_FIELD_NAME, fieldName); + String searchTerms = randomSearchTerms(); + String kqlQueryString = format("%s: %s", nestedFieldName, searchTerms); + + NestedQueryBuilder nestedQuery = asInstanceOf(NestedQueryBuilder.class, parseKqlQuery(kqlQueryString)); + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + + NestedQueryBuilder nestedSubQuery = asInstanceOf(NestedQueryBuilder.class, nestedQuery.query()); + assertThat(nestedSubQuery.path(), equalTo(format("%s.%s", NESTED_FIELD_NAME, NESTED_FIELD_NAME))); + + assertMatchQueryBuilder(nestedSubQuery.query(), nestedFieldName, searchTerms); + } + } + } + + public void testInlineNestedFieldMatchKeywordFieldQuery() { + { + // Querying a nested text subfield. + String nestedFieldName = format("%s.%s", NESTED_FIELD_NAME, KEYWORD_FIELD_NAME); + String searchTerms = randomSearchTerms(); + String kqlQueryString = format("%s: %s", nestedFieldName, searchTerms); + + NestedQueryBuilder nestedQuery = asInstanceOf(NestedQueryBuilder.class, parseKqlQuery(kqlQueryString)); + + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + assertTermQueryBuilder(nestedQuery.query(), nestedFieldName, searchTerms); + } + + { + // Several levels of nested fields. + String nestedFieldName = format("%s.%s.%s", NESTED_FIELD_NAME, NESTED_FIELD_NAME, KEYWORD_FIELD_NAME); + String searchTerms = randomSearchTerms(); + String kqlQueryString = format("%s: %s", nestedFieldName, searchTerms); + + NestedQueryBuilder nestedQuery = asInstanceOf(NestedQueryBuilder.class, parseKqlQuery(kqlQueryString)); + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + + NestedQueryBuilder nestedSubQuery = asInstanceOf(NestedQueryBuilder.class, nestedQuery.query()); + assertThat(nestedSubQuery.path(), equalTo(format("%s.%s", NESTED_FIELD_NAME, NESTED_FIELD_NAME))); + + assertTermQueryBuilder(nestedSubQuery.query(), nestedFieldName, searchTerms); + } + } + + public void testInlineNestedFieldRangeQuery() { + { + // Querying a nested text subfield. + String nestedFieldName = format("%s.%s", NESTED_FIELD_NAME, INT_FIELD_NAME); + String operator = randomFrom(">", ">=", "<", "<="); + String kqlQueryString = format("%s %s %s", nestedFieldName, operator, randomDouble()); + + NestedQueryBuilder nestedQuery = asInstanceOf(NestedQueryBuilder.class, parseKqlQuery(kqlQueryString)); + + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + assertRangeQueryBuilder(nestedQuery.query(), nestedFieldName, rangeQueryBuilder -> {}); + } + + { + // Several levels of nested fields. + String nestedFieldName = format("%s.%s.%s", NESTED_FIELD_NAME, NESTED_FIELD_NAME, INT_FIELD_NAME); + String operator = randomFrom(">", ">=", "<", "<="); + String kqlQueryString = format("%s %s %s", nestedFieldName, operator, randomDouble()); + + NestedQueryBuilder nestedQuery = asInstanceOf(NestedQueryBuilder.class, parseKqlQuery(kqlQueryString)); + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + + NestedQueryBuilder nestedSubQuery = asInstanceOf(NestedQueryBuilder.class, nestedQuery.query()); + assertThat(nestedSubQuery.path(), equalTo(format("%s.%s", NESTED_FIELD_NAME, NESTED_FIELD_NAME))); + + assertRangeQueryBuilder(nestedSubQuery.query(), nestedFieldName, rangeQueryBuilder -> {}); + } + } + + public void testNestedQuerySyntax() { + // Single word - Keyword & text field + List.of(KEYWORD_FIELD_NAME, TEXT_FIELD_NAME) + .forEach( + fieldName -> assertThat( + parseKqlQuery(format("%s : { %s : %s }", NESTED_FIELD_NAME, fieldName, "foo")), + equalTo(parseKqlQuery(format("%s.%s : %s", NESTED_FIELD_NAME, fieldName, "foo"))) + ) + ); + + // Multiple words - Keyword & text field + List.of(KEYWORD_FIELD_NAME, TEXT_FIELD_NAME) + .forEach( + fieldName -> assertThat( + parseKqlQuery(format("%s : { %s : %s }", NESTED_FIELD_NAME, fieldName, "foo bar")), + equalTo(parseKqlQuery(format("%s.%s : %s", NESTED_FIELD_NAME, fieldName, "foo bar"))) + ) + ); + + // Range syntax + { + String operator = randomFrom("<", "<=", ">", ">="); + double rangeValue = randomDouble(); + assertThat( + parseKqlQuery(format("%s : { %s %s %s }", NESTED_FIELD_NAME, INT_FIELD_NAME, operator, rangeValue)), + equalTo(parseKqlQuery(format("%s.%s %s %s", NESTED_FIELD_NAME, INT_FIELD_NAME, operator, rangeValue))) + ); + } + + // Several level of nesting + { + QueryBuilder inlineQuery = parseKqlQuery( + format("%s.%s.%s : %s", NESTED_FIELD_NAME, NESTED_FIELD_NAME, TEXT_FIELD_NAME, "foo bar") + ); + + assertThat( + parseKqlQuery(format("%s : { %s : { %s : %s } }", NESTED_FIELD_NAME, NESTED_FIELD_NAME, TEXT_FIELD_NAME, "foo bar")), + equalTo(inlineQuery) + ); + + assertThat( + parseKqlQuery(format("%s.%s : { %s : %s }", NESTED_FIELD_NAME, NESTED_FIELD_NAME, TEXT_FIELD_NAME, "foo bar")), + equalTo(inlineQuery) + ); + + assertThat( + parseKqlQuery(format("%s : { %s.%s : %s }", NESTED_FIELD_NAME, NESTED_FIELD_NAME, TEXT_FIELD_NAME, "foo bar")), + equalTo(inlineQuery) + ); + } + } + + public void testBooleanAndNestedQuerySyntax() { + NestedQueryBuilder nestedQuery = asInstanceOf( + NestedQueryBuilder.class, + parseKqlQuery( + format("%s: { %s : foo AND %s: bar AND %s > 3}", NESTED_FIELD_NAME, TEXT_FIELD_NAME, KEYWORD_FIELD_NAME, INT_FIELD_NAME) + ) + ); + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + + BoolQueryBuilder subQuery = asInstanceOf(BoolQueryBuilder.class, nestedQuery.query()); + assertThat(subQuery.should(), empty()); + assertThat(subQuery.filter(), empty()); + assertThat(subQuery.mustNot(), empty()); + assertThat(subQuery.must(), hasSize(3)); + assertMatchQueryBuilder( + subQuery.must().stream().filter(q -> q instanceof MatchQueryBuilder).findFirst().get(), + format("%s.%s", NESTED_FIELD_NAME, TEXT_FIELD_NAME), + "foo" + ); + assertTermQueryBuilder( + subQuery.must().stream().filter(q -> q instanceof TermQueryBuilder).findFirst().get(), + format("%s.%s", NESTED_FIELD_NAME, KEYWORD_FIELD_NAME), + "bar" + ); + assertRangeQueryBuilder( + subQuery.must().stream().filter(q -> q instanceof RangeQueryBuilder).findAny().get(), + format("%s.%s", NESTED_FIELD_NAME, INT_FIELD_NAME), + q -> {} + ); + } + + public void testBooleanOrNestedQuerySyntax() { + NestedQueryBuilder nestedQuery = asInstanceOf( + NestedQueryBuilder.class, + parseKqlQuery( + format("%s: { %s : foo OR %s: bar OR %s > 3 }", NESTED_FIELD_NAME, TEXT_FIELD_NAME, KEYWORD_FIELD_NAME, INT_FIELD_NAME) + ) + ); + + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + + BoolQueryBuilder subQuery = asInstanceOf(BoolQueryBuilder.class, nestedQuery.query()); + assertThat(subQuery.must(), empty()); + assertThat(subQuery.filter(), empty()); + assertThat(subQuery.mustNot(), empty()); + assertThat(subQuery.should(), hasSize(3)); + assertMatchQueryBuilder( + subQuery.should().stream().filter(q -> q instanceof MatchQueryBuilder).findFirst().get(), + format("%s.%s", NESTED_FIELD_NAME, TEXT_FIELD_NAME), + "foo" + ); + assertTermQueryBuilder( + subQuery.should().stream().filter(q -> q instanceof TermQueryBuilder).findFirst().get(), + format("%s.%s", NESTED_FIELD_NAME, KEYWORD_FIELD_NAME), + "bar" + ); + assertRangeQueryBuilder( + subQuery.should().stream().filter(q -> q instanceof RangeQueryBuilder).findAny().get(), + format("%s.%s", NESTED_FIELD_NAME, INT_FIELD_NAME), + q -> {} + ); + } + + public void testBooleanNotNestedQuerySyntax() { + { + NestedQueryBuilder nestedQuery = asInstanceOf( + NestedQueryBuilder.class, + parseKqlQuery(format("%s: { NOT %s : foo }", NESTED_FIELD_NAME, TEXT_FIELD_NAME)) + ); + + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + + BoolQueryBuilder subQuery = asInstanceOf(BoolQueryBuilder.class, nestedQuery.query()); + assertThat(subQuery.must(), empty()); + assertThat(subQuery.filter(), empty()); + assertThat(subQuery.should(), empty()); + assertThat(subQuery.mustNot(), hasSize(1)); + assertMatchQueryBuilder(subQuery.mustNot().get(0), format("%s.%s", NESTED_FIELD_NAME, TEXT_FIELD_NAME), "foo"); + } + + { + NestedQueryBuilder nestedQuery = asInstanceOf( + NestedQueryBuilder.class, + parseKqlQuery(format("%s: { NOT %s : foo }", NESTED_FIELD_NAME, KEYWORD_FIELD_NAME)) + ); + + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + + BoolQueryBuilder subQuery = asInstanceOf(BoolQueryBuilder.class, nestedQuery.query()); + assertThat(subQuery.must(), empty()); + assertThat(subQuery.filter(), empty()); + assertThat(subQuery.should(), empty()); + assertThat(subQuery.mustNot(), hasSize(1)); + assertTermQueryBuilder(subQuery.mustNot().get(0), format("%s.%s", NESTED_FIELD_NAME, KEYWORD_FIELD_NAME), "foo"); + } + + { + NestedQueryBuilder nestedQuery = asInstanceOf( + NestedQueryBuilder.class, + parseKqlQuery(format("%s: { NOT %s < 3 }", NESTED_FIELD_NAME, INT_FIELD_NAME)) + ); + + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + + BoolQueryBuilder subQuery = asInstanceOf(BoolQueryBuilder.class, nestedQuery.query()); + assertThat(subQuery.must(), empty()); + assertThat(subQuery.filter(), empty()); + assertThat(subQuery.should(), empty()); + assertThat(subQuery.mustNot(), hasSize(1)); + assertRangeQueryBuilder(subQuery.mustNot().get(0), format("%s.%s", NESTED_FIELD_NAME, INT_FIELD_NAME), q -> {}); + } + } + + private static String randomSearchTerms() { + return Stream.generate(ESTestCase::randomIdentifier).limit(randomIntBetween(1, 10)).collect(Collectors.joining(" ")); + } +} diff --git a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserExistsQueryTests.java b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserExistsQueryTests.java index 45dd3312bbc03..6415cdb94ada7 100644 --- a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserExistsQueryTests.java +++ b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserExistsQueryTests.java @@ -10,7 +10,10 @@ import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.ExistsQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.NestedQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; + +import java.util.regex.Pattern; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; @@ -35,11 +38,18 @@ public void testParseExistsQueryWithNoMatchingFields() { public void testParseExistsQueryWithASingleField() { for (String fieldName : searchableFields()) { - ExistsQueryBuilder parsedQuery = asInstanceOf(ExistsQueryBuilder.class, parseKqlQuery(kqlExistsQuery(fieldName))); - assertThat(parsedQuery.fieldName(), equalTo(fieldName)); + QueryBuilder parsedQuery = parseKqlQuery(kqlExistsQuery(fieldName)); // Using quotes to wrap the field name does not change the result. assertThat(parseKqlQuery(kqlExistsQuery("\"" + fieldName + "\"")), equalTo(parsedQuery)); + + long nestingLevel = Pattern.compile("[.]").splitAsStream(fieldName).takeWhile(s -> s.equals(NESTED_FIELD_NAME)).count(); + for (int i = 0; i < nestingLevel; i++) { + parsedQuery = asInstanceOf(NestedQueryBuilder.class, parsedQuery).query(); + } + + ExistsQueryBuilder existsQuery = asInstanceOf(ExistsQueryBuilder.class, parsedQuery); + assertThat(existsQuery.fieldName(), equalTo(fieldName)); } } @@ -53,7 +63,9 @@ public void testParseExistsQueryUsingWildcardFieldName() { assertThat( parsedQuery.should(), - containsInAnyOrder(searchableFields(fieldNamePattern).stream().map(QueryBuilders::existsQuery).toArray()) + containsInAnyOrder( + searchableFields(fieldNamePattern).stream().map(fieldName -> parseKqlQuery(kqlExistsQuery(fieldName))).toArray() + ) ); } diff --git a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilderTests.java b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilderTests.java index 2bc23c7d457dd..7323f7d6d1a4e 100644 --- a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilderTests.java +++ b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilderTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.kql.query; import org.apache.lucene.search.Query; +import org.elasticsearch.Build; import org.elasticsearch.core.Strings; import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -21,6 +22,7 @@ import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.xpack.kql.KqlPlugin; import org.hamcrest.Matchers; +import org.junit.BeforeClass; import java.io.IOException; import java.util.Collection; @@ -34,6 +36,10 @@ import static org.hamcrest.Matchers.nullValue; public class KqlQueryBuilderTests extends AbstractQueryTestCase { + @BeforeClass + protected static void ensureSnapshotBuild() { + assumeTrue("requires snapshot builds", Build.current().isSnapshot()); + } @Override protected Collection> getPlugins() { diff --git a/x-pack/plugin/kql/src/test/resources/supported-queries b/x-pack/plugin/kql/src/test/resources/supported-queries index b659b1ae5b1db..f54a1d32fe3be 100644 --- a/x-pack/plugin/kql/src/test/resources/supported-queries +++ b/x-pack/plugin/kql/src/test/resources/supported-queries @@ -91,13 +91,6 @@ mapped_nested: { NOT(mapped_string:foo AND mapped_string_2:foo bar) } mapped_nested: { NOT mapped_string:foo AND NOT mapped_string_2:foo bar } mapped_nested: { (NOT mapped_string:foo) AND (NOT mapped_string_2:foo bar) } mapped_nested: { NOT(mapped_string:foo) AND NOT(mapped_string_2:foo bar) } -mapped_nested: { mapped_string:foo AND mapped_string_2:foo bar AND foo bar } -mapped_nested: { mapped_string:foo AND mapped_string_2:foo bar OR foo bar } -mapped_nested: { mapped_string:foo OR mapped_string_2:foo bar OR foo bar } -mapped_nested: { mapped_string:foo OR mapped_string_2:foo bar AND foo bar } -mapped_nested: { mapped_string:foo AND (mapped_string_2:foo bar OR foo bar) } -mapped_nested: { mapped_string:foo AND (mapped_string_2:foo bar OR foo bar) } -mapped_nested: { mapped_string:foo OR (mapped_string_2:foo bar OR foo bar) } mapped_nested: { mapped_str*:foo } mapped_nested: { mapped_nested : { mapped_string:foo AND mapped_int < 3 } AND mapped_string_2:foo bar } mapped_nested: { mapped_nested.mapped_string:foo AND mapped_string_2:foo bar } diff --git a/x-pack/plugin/kql/src/test/resources/unsupported-queries b/x-pack/plugin/kql/src/test/resources/unsupported-queries index 149bcf5bd2b5a..526ae94d6ac88 100644 --- a/x-pack/plugin/kql/src/test/resources/unsupported-queries +++ b/x-pack/plugin/kql/src/test/resources/unsupported-queries @@ -25,6 +25,20 @@ mapped_string:(foo (bar)) // Bad syntax for nested fields: mapped_nested { mapped_string: bar } +// Unknown nested field or not a nested field +not_nested : { mapped_string: bar } +mapped_string: { mapped_string: bar } + +// Nested query can not use fieldless subqueries +mapped_nested: { foo } +mapped_nested: { mapped_string:foo AND mapped_string_2:foo bar AND foo bar } +mapped_nested: { mapped_string:foo AND mapped_string_2:foo bar OR foo bar } +mapped_nested: { mapped_string:foo OR mapped_string_2:foo bar OR foo bar } +mapped_nested: { mapped_string:foo OR mapped_string_2:foo bar AND foo bar } +mapped_nested: { mapped_string:foo AND (mapped_string_2:foo bar OR foo bar) } +mapped_nested: { mapped_string:foo AND (mapped_string_2:foo bar OR foo bar) } +mapped_nested: { mapped_string:foo OR (mapped_string_2:foo bar OR foo bar) } + // Missing escape sequences: mapped_string: foo:bar mapped_string: (foo and bar) diff --git a/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/50_kql_nested_fields_query.yml b/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/50_kql_nested_fields_query.yml new file mode 100644 index 0000000000000..4ce6688e5222d --- /dev/null +++ b/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/50_kql_nested_fields_query.yml @@ -0,0 +1,218 @@ +setup: + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ kql_query ] + test_runner_features: [ capabilities, contains ] + reason: KQL query is not available + + - requires: + "test_runner_features": "contains" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + department: + type: keyword + staff: + type: integer + courses: + type: nested + properties: + name: + type: text + credits: + type: integer + sessions: + type: nested + properties: + semester: + type: keyword + students: + type: integer + + - do: + bulk: + index: test-index + refresh: true + body: | + { "index" : { "_id": "doc-1" } } + { "department": "compsci", "staff": 12, "courses": [ { "name": "Object Oriented Programming", "credits": 3, "sessions": [ { "semester": "spr2021", "students": 37 }, { "semester": "fall2020", "students": 45} ] }, { "name": "Theory of Computation", "credits": 4, "sessions": [ { "semester": "spr2021", "students": 19 }, { "semester": "fall2020", "students": 14 } ] } ] } + { "index" : { "_id": "doc-42" } } + { "department": "math", "staff": 20, "courses": [ { "name": "Precalculus", "credits": 1, "sessions": [ { "semester": "spr2021", "students": 100 }, { "semester": "fall2020", "students": 134 } ] }, { "name": "Linear Algebra", "credits": 3, "sessions": [ { "semester": "spr2021", "students": 29 }, { "semester": "fall2020", "students": 23 } ] } ] } + +--- +"Inline syntax": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses.name: object oriented programming" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses.name: object oriented programming AND courses.credits > 3" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses.name: object oriented programming OR courses.credits > 3" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + +--- +"Nested field syntax": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses : { name: object oriented programming }" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses: { name: object oriented programming AND credits > 3 }" + } + } + } + - match: { hits.total: 0 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses: { name: object oriented programming AND credits >= 3 }" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses: { name: object oriented programming OR credits > 3 }" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses: { NOT name: object oriented programming AND credits < 4 }" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-42" } + + +--- +"Several level of nesting field syntax": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses: { name: object oriented programming AND sessions.semester: spr2021 }" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses: { sessions : { semester: spr2021 AND students < 20 } }" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses: { name: computation AND sessions : { semester: spr2021 AND students < 20 } }" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } diff --git a/x-pack/plugin/logsdb/build.gradle b/x-pack/plugin/logsdb/build.gradle index 60578f832d153..1aef69e0e3fac 100644 --- a/x-pack/plugin/logsdb/build.gradle +++ b/x-pack/plugin/logsdb/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.internal-es-plugin' diff --git a/x-pack/plugin/logsdb/qa/with-basic/build.gradle b/x-pack/plugin/logsdb/qa/with-basic/build.gradle index 44ebd83bf4f4c..9729ac9c29cef 100644 --- a/x-pack/plugin/logsdb/qa/with-basic/build.gradle +++ b/x-pack/plugin/logsdb/qa/with-basic/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java index 8d7a813b206d8..8930ff23fb3b0 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java @@ -271,7 +271,7 @@ public void testDateHistogramAggregation() throws IOException { } public void testEsqlSource() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(100, 200); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); @@ -287,7 +287,7 @@ public void testEsqlSource() throws IOException { } public void testEsqlTermsAggregation() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(100, 200); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); @@ -302,7 +302,7 @@ public void testEsqlTermsAggregation() throws IOException { } public void testEsqlTermsAggregationByMethod() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(100, 200); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java index 0eb0754985c94..04d12fd51bae7 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java @@ -48,6 +48,11 @@ public LogsDBPlugin(Settings settings) { public Collection createComponents(PluginServices services) { licenseService.setLicenseState(XPackPlugin.getSharedLicenseState()); var clusterSettings = services.clusterService().getClusterSettings(); + // The `cluster.logsdb.enabled` setting is registered by this plugin, but its value may be updated by other plugins + // before this plugin registers its settings update consumer below. This means we might miss updates that occurred earlier. + // To handle this, we explicitly fetch the current `cluster.logsdb.enabled` setting value from the cluster settings + // and update it, ensuring we capture any prior changes. + logsdbIndexModeSettingsProvider.updateClusterIndexModeLogsdbEnabled(clusterSettings.get(CLUSTER_LOGSDB_ENABLED)); clusterSettings.addSettingsUpdateConsumer(FALLBACK_SETTING, licenseService::setSyntheticSourceFallback); clusterSettings.addSettingsUpdateConsumer( CLUSTER_LOGSDB_ENABLED, @@ -62,10 +67,13 @@ public Collection getAdditionalIndexSettingProviders(Index if (DiscoveryNode.isStateless(settings)) { return List.of(logsdbIndexModeSettingsProvider); } - return List.of( - new SyntheticSourceIndexSettingsProvider(licenseService, parameters.mapperServiceFactory(), logsdbIndexModeSettingsProvider), - logsdbIndexModeSettingsProvider + var syntheticSettingProvider = new SyntheticSourceIndexSettingsProvider( + licenseService, + parameters.mapperServiceFactory(), + logsdbIndexModeSettingsProvider, + () -> parameters.clusterService().state().nodes().getMinSupportedIndexVersion() ); + return List.of(syntheticSettingProvider, logsdbIndexModeSettingsProvider); } @Override diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java index e87f10ec19916..1f38ecda19515 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java @@ -26,6 +26,7 @@ import java.io.IOException; import java.time.Instant; import java.util.List; +import java.util.function.Supplier; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_ROUTING_PATH; @@ -39,15 +40,18 @@ final class SyntheticSourceIndexSettingsProvider implements IndexSettingProvider private final SyntheticSourceLicenseService syntheticSourceLicenseService; private final CheckedFunction mapperServiceFactory; private final LogsdbIndexModeSettingsProvider logsdbIndexModeSettingsProvider; + private final Supplier createdIndexVersion; SyntheticSourceIndexSettingsProvider( SyntheticSourceLicenseService syntheticSourceLicenseService, CheckedFunction mapperServiceFactory, - LogsdbIndexModeSettingsProvider logsdbIndexModeSettingsProvider + LogsdbIndexModeSettingsProvider logsdbIndexModeSettingsProvider, + Supplier createdIndexVersion ) { this.syntheticSourceLicenseService = syntheticSourceLicenseService; this.mapperServiceFactory = mapperServiceFactory; this.logsdbIndexModeSettingsProvider = logsdbIndexModeSettingsProvider; + this.createdIndexVersion = createdIndexVersion; } @Override @@ -148,7 +152,7 @@ private IndexMetadata buildIndexMetadataForMapperService( ); int shardReplicas = indexTemplateAndCreateRequestSettings.getAsInt(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0); var finalResolvedSettings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(IndexMetadata.SETTING_VERSION_CREATED, createdIndexVersion.get()) .put(indexTemplateAndCreateRequestSettings) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, dummyShards) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, shardReplicas) diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java index 2d8723a0d8c25..1f5d26eaedf34 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.license.MockLicenseState; @@ -54,7 +55,7 @@ public void setup() { provider = new SyntheticSourceIndexSettingsProvider(syntheticSourceLicenseService, im -> { newMapperServiceCounter.incrementAndGet(); return MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()); - }, getLogsdbIndexModeSettingsProvider(false)); + }, getLogsdbIndexModeSettingsProvider(false), IndexVersion::current); newMapperServiceCounter.set(0); } @@ -336,7 +337,8 @@ public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSourceFileMatch( provider = new SyntheticSourceIndexSettingsProvider( syntheticSourceLicenseService, im -> MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()), - getLogsdbIndexModeSettingsProvider(true) + getLogsdbIndexModeSettingsProvider(true), + IndexVersion::current ); final Settings settings = Settings.EMPTY; diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml index 33fedce3b59c1..792df4dbf639e 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml @@ -470,13 +470,7 @@ create an index with time_series index mode and synthetic source: indices.get_settings: index: "test_time_series_index_mode_synthetic" - match: { test_time_series_index_mode_synthetic.settings.index.mode: time_series } - - - - do: - indices.get_mapping: - index: test_time_series_index_mode_synthetic - - - match: { test_time_series_index_mode_synthetic.mappings._source.mode: synthetic } + - match: { test_time_series_index_mode_synthetic.settings.index.mapping.source.mode: synthetic } --- create an index with logsdb index mode and synthetic source: @@ -493,12 +487,7 @@ create an index with logsdb index mode and synthetic source: indices.get_settings: index: "test_logsdb_index_mode_synthetic" - match: { test_logsdb_index_mode_synthetic.settings.index.mode: logsdb } - - - do: - indices.get_mapping: - index: test_logsdb_index_mode_synthetic - - - match: { test_logsdb_index_mode_synthetic.mappings._source.mode: synthetic } + - match: { test_logsdb_index_mode_synthetic.settings.index.mapping.source.mode: synthetic } --- create an index with time_series index mode and stored source: @@ -524,12 +513,7 @@ create an index with time_series index mode and stored source: indices.get_settings: index: "test_time_series_index_mode_undefined" - match: { test_time_series_index_mode_undefined.settings.index.mode: time_series } - - - do: - indices.get_mapping: - index: test_time_series_index_mode_undefined - - - match: { test_time_series_index_mode_undefined.mappings._source.mode: stored } + - match: { test_time_series_index_mode_undefined.settings.index.mapping.source.mode: stored } --- create an index with logsdb index mode and stored source: @@ -546,12 +530,7 @@ create an index with logsdb index mode and stored source: indices.get_settings: index: "test_logsdb_index_mode_undefined" - match: { test_logsdb_index_mode_undefined.settings.index.mode: logsdb } - - - do: - indices.get_mapping: - index: test_logsdb_index_mode_undefined - - - match: { test_logsdb_index_mode_undefined.mappings._source.mode: stored } + - match: { test_logsdb_index_mode_undefined.settings.index.mapping.source.mode: stored } --- create an index with time_series index mode and disabled source: diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java index a9992e168bc66..2ea56b147bf9c 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java @@ -49,7 +49,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli } return restChannel -> { - final String content = request.content().utf8ToString(); + final String content = request.releasableContent().utf8ToString(); client.execute( PutPipelineAction.INSTANCE, new PutPipelineRequest(id, content, request.getXContentType()), diff --git a/x-pack/plugin/mapper-aggregate-metric/build.gradle b/x-pack/plugin/mapper-aggregate-metric/build.gradle index bae5acc21fc75..2a7841929b21d 100644 --- a/x-pack/plugin/mapper-aggregate-metric/build.gradle +++ b/x-pack/plugin/mapper-aggregate-metric/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License diff --git a/x-pack/plugin/mapper-constant-keyword/build.gradle b/x-pack/plugin/mapper-constant-keyword/build.gradle index 3b11d951fe37a..c1e0eb61b611b 100644 --- a/x-pack/plugin/mapper-constant-keyword/build.gradle +++ b/x-pack/plugin/mapper-constant-keyword/build.gradle @@ -1,7 +1,12 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-es-plugin' -apply plugin: 'elasticsearch.legacy-yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' esplugin { name 'constant-keyword' diff --git a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java index 216f82552353b..fa5d9428bb0c6 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.constantkeyword.mapper; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; @@ -58,7 +57,6 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; -import java.util.stream.Stream; /** * A {@link FieldMapper} that assigns every document the same value. @@ -356,40 +354,14 @@ protected SyntheticSourceSupport syntheticSourceSupport() { return new SyntheticSourceSupport.Native(SourceLoader.SyntheticFieldLoader.NOTHING); } - var loader = new SourceLoader.SyntheticFieldLoader() { - @Override - public Stream> storedFieldLoaders() { - return Stream.of(); - } - - @Override - public DocValuesLoader docValuesLoader(LeafReader reader, int[] docIdsInLeaf) { - return docId -> true; - } - - @Override - public boolean hasValue() { - return true; - } - - @Override - public void write(XContentBuilder b) throws IOException { - if (fieldType().value != null) { - b.field(leafName(), fieldType().value); - } - } - - @Override - public void reset() { - // NOOP - } - - @Override - public String fieldName() { - return fullPath(); - } - }; + /* + If there was no value in the document, synthetic source should not have the value too. + This is consistent with stored source behavior and is important for scenarios + like reindexing into an index that has a different value of this value in the mapping. - return new SyntheticSourceSupport.Native(loader); + In order to do that we use fallback logic which implements exactly such logic (_source only contains value + if it was in the original document). + */ + return new SyntheticSourceSupport.Fallback(); } } diff --git a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java index 4661fe77e8b11..2b9170afdfd70 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java +++ b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java @@ -333,6 +333,17 @@ public void testNullValueSyntheticSource() throws IOException { assertThat(syntheticSource(mapper, b -> {}), equalTo("{}")); } + public void testNoValueInDocumentSyntheticSource() throws IOException { + DocumentMapper mapper = createSytheticSourceMapperService(mapping(b -> { + b.startObject("field"); + b.field("type", "constant_keyword"); + b.field("value", randomAlphaOfLength(5)); + b.endObject(); + })).documentMapper(); + + assertThat(syntheticSource(mapper, b -> {}), equalTo("{}")); + } + @Override protected boolean supportsEmptyInputArray() { return false; diff --git a/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/java/org/elasticsearch/xpack/constantkeyword/ConstantKeywordClientYamlTestSuiteIT.java b/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/java/org/elasticsearch/xpack/constantkeyword/ConstantKeywordClientYamlTestSuiteIT.java index 789059d9e11c0..5b6048b481abf 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/java/org/elasticsearch/xpack/constantkeyword/ConstantKeywordClientYamlTestSuiteIT.java +++ b/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/java/org/elasticsearch/xpack/constantkeyword/ConstantKeywordClientYamlTestSuiteIT.java @@ -10,8 +10,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; /** Runs yaml rest tests */ public class ConstantKeywordClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -24,4 +26,12 @@ public ConstantKeywordClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidat public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("constant-keyword").build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/resources/rest-api-spec/test/20_synthetic_source.yml b/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/resources/rest-api-spec/test/20_synthetic_source.yml index d40f69f483dbb..012b1006b8d20 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/resources/rest-api-spec/test/20_synthetic_source.yml +++ b/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/resources/rest-api-spec/test/20_synthetic_source.yml @@ -1,7 +1,7 @@ constant_keyword: - requires: - cluster_features: [ "mapper.source.mode_from_index_setting" ] - reason: "Source mode configured through index setting" + cluster_features: [ "mapper.constant_keyword.synthetic_source_write_fix" ] + reason: "Behavior fix" - do: indices.create: @@ -26,6 +26,15 @@ constant_keyword: body: kwd: foo + - do: + index: + index: test + id: 2 + refresh: true + body: + kwd: foo + const_kwd: bar + - do: search: index: test @@ -33,6 +42,19 @@ constant_keyword: query: ids: values: [1] + + - match: + hits.hits.0._source: + kwd: foo + + - do: + search: + index: test + body: + query: + ids: + values: [2] + - match: hits.hits.0._source: kwd: foo diff --git a/x-pack/plugin/mapper-unsigned-long/build.gradle b/x-pack/plugin/mapper-unsigned-long/build.gradle index faad1db822560..17a4f8a03fa57 100644 --- a/x-pack/plugin/mapper-unsigned-long/build.gradle +++ b/x-pack/plugin/mapper-unsigned-long/build.gradle @@ -1,6 +1,3 @@ -import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License @@ -8,11 +5,13 @@ import org.elasticsearch.gradle.internal.info.BuildParams * 2.0. */ +import org.elasticsearch.gradle.Version + evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.internal-es-plugin' -apply plugin: 'elasticsearch.legacy-yaml-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' +apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { name 'unsigned-long' @@ -36,13 +35,3 @@ restResources { include '_common', 'bulk', 'indices', 'index', 'search', 'xpack' } } - -if (buildParams.isSnapshotBuild() == false) { - tasks.named("test").configure { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } -} - -testClusters.configureEach { - requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") -} diff --git a/x-pack/plugin/mapper-unsigned-long/src/yamlRestTest/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongClientYamlTestSuiteIT.java b/x-pack/plugin/mapper-unsigned-long/src/yamlRestTest/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongClientYamlTestSuiteIT.java index 008bfb193387c..df2c5d81ca14a 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/yamlRestTest/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongClientYamlTestSuiteIT.java +++ b/x-pack/plugin/mapper-unsigned-long/src/yamlRestTest/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongClientYamlTestSuiteIT.java @@ -10,8 +10,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; /** Runs yaml rest tests */ public class UnsignedLongClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -24,4 +26,12 @@ public UnsignedLongClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate t public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("unsigned-long").build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/x-pack/plugin/mapper-version/build.gradle b/x-pack/plugin/mapper-version/build.gradle index fb760b3446dfd..bf78c61523e39 100644 --- a/x-pack/plugin/mapper-version/build.gradle +++ b/x-pack/plugin/mapper-version/build.gradle @@ -1,11 +1,15 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ evaluationDependsOn(xpackModule('core')) - apply plugin: 'elasticsearch.internal-es-plugin' -apply plugin: 'elasticsearch.legacy-yaml-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' +apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { @@ -25,8 +29,3 @@ dependencies { testImplementation project(path: xpackModule('analytics')) } -if (buildParams.isSnapshotBuild() == false) { - tasks.named("test").configure { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } -} diff --git a/x-pack/plugin/mapper-version/src/yamlRestTest/java/org/elasticsearch/xpack/versionfield/VersionClientYamlTestSuiteIT.java b/x-pack/plugin/mapper-version/src/yamlRestTest/java/org/elasticsearch/xpack/versionfield/VersionClientYamlTestSuiteIT.java index bc9f32766a3bb..3474d5ce9be8c 100644 --- a/x-pack/plugin/mapper-version/src/yamlRestTest/java/org/elasticsearch/xpack/versionfield/VersionClientYamlTestSuiteIT.java +++ b/x-pack/plugin/mapper-version/src/yamlRestTest/java/org/elasticsearch/xpack/versionfield/VersionClientYamlTestSuiteIT.java @@ -10,8 +10,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; /** Runs yaml rest tests */ public class VersionClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -24,4 +26,12 @@ public VersionClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCa public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("mapper-version").build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/DownloadTaskRemovedListener.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/DownloadTaskRemovedListener.java new file mode 100644 index 0000000000000..929dac6ee357a --- /dev/null +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/DownloadTaskRemovedListener.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.packageloader.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.tasks.RemovedTaskListener; +import org.elasticsearch.tasks.Task; + +public record DownloadTaskRemovedListener(ModelDownloadTask trackedTask, ActionListener listener) + implements + RemovedTaskListener { + + @Override + public void onRemoved(Task task) { + if (task.getId() == trackedTask.getId()) { + if (trackedTask.getTaskException() == null) { + listener.onResponse(AcknowledgedResponse.TRUE); + } else { + listener.onFailure(trackedTask.getTaskException()); + } + } + } +} diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTask.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTask.java index 59977bd418e11..dd09c3cf65fec 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTask.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTask.java @@ -13,6 +13,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ml.MlTasks; import java.io.IOException; import java.util.Map; @@ -51,9 +52,12 @@ public void writeTo(StreamOutput out) throws IOException { } private final AtomicReference downloadProgress = new AtomicReference<>(new DownLoadProgress(0, 0)); + private final String modelId; + private volatile Exception taskException; - public ModelDownloadTask(long id, String type, String action, String description, TaskId parentTaskId, Map headers) { - super(id, type, action, description, parentTaskId, headers); + public ModelDownloadTask(long id, String type, String action, String modelId, TaskId parentTaskId, Map headers) { + super(id, type, action, taskDescription(modelId), parentTaskId, headers); + this.modelId = modelId; } void setProgress(int totalParts, int downloadedParts) { @@ -65,4 +69,19 @@ public DownloadStatus getStatus() { return new DownloadStatus(downloadProgress.get()); } + public String getModelId() { + return modelId; + } + + public void setTaskException(Exception exception) { + this.taskException = exception; + } + + public Exception getTaskException() { + return taskException; + } + + public static String taskDescription(String modelId) { + return MlTasks.downloadModelTaskDescription(modelId); + } } diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java index 76b7781b1cffe..2a14a8761e357 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java @@ -30,7 +30,6 @@ import org.elasticsearch.tasks.TaskAwareRequest; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.common.notifications.Level; @@ -42,6 +41,9 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -49,7 +51,6 @@ import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ml.MlTasks.MODEL_IMPORT_TASK_ACTION; import static org.elasticsearch.xpack.core.ml.MlTasks.MODEL_IMPORT_TASK_TYPE; -import static org.elasticsearch.xpack.core.ml.MlTasks.downloadModelTaskDescription; public class TransportLoadTrainedModelPackage extends TransportMasterNodeAction { @@ -57,6 +58,7 @@ public class TransportLoadTrainedModelPackage extends TransportMasterNodeAction< private final Client client; private final CircuitBreakerService circuitBreakerService; + final Map> taskRemovedListenersByModelId; @Inject public TransportLoadTrainedModelPackage( @@ -81,6 +83,7 @@ public TransportLoadTrainedModelPackage( ); this.client = new OriginSettingClient(client, ML_ORIGIN); this.circuitBreakerService = circuitBreakerService; + taskRemovedListenersByModelId = new HashMap<>(); } @Override @@ -91,6 +94,12 @@ protected ClusterBlockException checkBlock(Request request, ClusterState state) @Override protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) throws Exception { + if (handleDownloadInProgress(request.getModelId(), request.isWaitForCompletion(), listener)) { + logger.debug("Existing download of model [{}] in progress", request.getModelId()); + // download in progress, nothing to do + return; + } + ModelDownloadTask downloadTask = createDownloadTask(request); try { @@ -107,7 +116,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A var downloadCompleteListener = request.isWaitForCompletion() ? listener : ActionListener.noop(); - importModel(client, taskManager, request, modelImporter, downloadCompleteListener, downloadTask); + importModel(client, () -> unregisterTask(downloadTask), request, modelImporter, downloadTask, downloadCompleteListener); } catch (Exception e) { taskManager.unregister(downloadTask); listener.onFailure(e); @@ -124,22 +133,91 @@ private ParentTaskAssigningClient getParentTaskAssigningClient(Task originTask) return new ParentTaskAssigningClient(client, parentTaskId); } + /** + * Look for a current download task of the model and optionally wait + * for that task to complete if there is one. + * synchronized with {@code unregisterTask} to prevent the task being + * removed before the remove listener is added. + * @param modelId Model being downloaded + * @param isWaitForCompletion Wait until the download completes before + * calling the listener + * @param listener Model download listener + * @return True if a download task is in progress + */ + synchronized boolean handleDownloadInProgress( + String modelId, + boolean isWaitForCompletion, + ActionListener listener + ) { + var description = ModelDownloadTask.taskDescription(modelId); + var tasks = taskManager.getCancellableTasks().values(); + + ModelDownloadTask inProgress = null; + for (var task : tasks) { + if (description.equals(task.getDescription()) && task instanceof ModelDownloadTask downloadTask) { + inProgress = downloadTask; + break; + } + } + + if (inProgress != null) { + if (isWaitForCompletion == false) { + // Not waiting for the download to complete, it is enough that the download is in progress + // Respond now not when the download completes + listener.onResponse(AcknowledgedResponse.TRUE); + return true; + } + // Otherwise register a task removed listener which is called + // once the tasks is complete and unregistered + var tracker = new DownloadTaskRemovedListener(inProgress, listener); + taskRemovedListenersByModelId.computeIfAbsent(modelId, s -> new ArrayList<>()).add(tracker); + taskManager.registerRemovedTaskListener(tracker); + return true; + } + + return false; + } + + /** + * Unregister the completed task triggering any remove task listeners. + * This method is synchronized to prevent the task being removed while + * {@code waitForExistingDownload} is in progress. + * @param task The completed task + */ + synchronized void unregisterTask(ModelDownloadTask task) { + taskManager.unregister(task); // unregister will call the on remove function + + var trackers = taskRemovedListenersByModelId.remove(task.getModelId()); + if (trackers != null) { + for (var tracker : trackers) { + taskManager.unregisterRemovedTaskListener(tracker); + } + } + } + /** * This is package scope so that we can test the logic directly. - * This should only be called from the masterOperation method and the tests + * This should only be called from the masterOperation method and the tests. + * This method is static for testing. * * @param auditClient a client which should only be used to send audit notifications. This client cannot be associated with the passed * in task, that way when the task is cancelled the notification requests can * still be performed. If it is associated with the task (i.e. via ParentTaskAssigningClient), * then the requests will throw a TaskCancelledException. + * @param unregisterTaskFn Runnable to unregister the task. Because this is a static function + * a lambda is used rather than the instance method. + * @param request The download request + * @param modelImporter The importer + * @param task Download task + * @param listener Listener */ static void importModel( Client auditClient, - TaskManager taskManager, + Runnable unregisterTaskFn, Request request, ModelImporter modelImporter, - ActionListener listener, - Task task + ModelDownloadTask task, + ActionListener listener ) { final String modelId = request.getModelId(); final long relativeStartNanos = System.nanoTime(); @@ -155,9 +233,12 @@ static void importModel( Level.INFO ); listener.onResponse(AcknowledgedResponse.TRUE); - }, exception -> listener.onFailure(processException(auditClient, modelId, exception))); + }, exception -> { + task.setTaskException(exception); + listener.onFailure(processException(auditClient, modelId, exception)); + }); - modelImporter.doImport(ActionListener.runAfter(finishListener, () -> taskManager.unregister(task))); + modelImporter.doImport(ActionListener.runAfter(finishListener, unregisterTaskFn)); } static Exception processException(Client auditClient, String modelId, Exception e) { @@ -197,14 +278,7 @@ public TaskId getParentTask() { @Override public ModelDownloadTask createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { - return new ModelDownloadTask( - id, - type, - action, - downloadModelTaskDescription(request.getModelId()), - parentTaskId, - headers - ); + return new ModelDownloadTask(id, type, action, request.getModelId(), parentTaskId, headers); } }, false); } diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java index cbcfd5b760779..3486ce6af0db5 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java @@ -10,13 +10,19 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.common.notifications.Level; import org.elasticsearch.xpack.core.ml.action.AuditMlNotificationAction; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ModelPackageConfig; @@ -27,9 +33,13 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.URISyntaxException; +import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.core.ml.MlTasks.MODEL_IMPORT_TASK_ACTION; +import static org.elasticsearch.xpack.core.ml.MlTasks.MODEL_IMPORT_TASK_TYPE; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.core.Is.is; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; @@ -37,6 +47,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class TransportLoadTrainedModelPackageTests extends ESTestCase { private static final String MODEL_IMPORT_FAILURE_MSG_FORMAT = "Model importing failed due to %s [%s]"; @@ -44,17 +55,10 @@ public class TransportLoadTrainedModelPackageTests extends ESTestCase { public void testSendsFinishedUploadNotification() { var uploader = createUploader(null); var taskManager = mock(TaskManager.class); - var task = mock(Task.class); + var task = mock(ModelDownloadTask.class); var client = mock(Client.class); - TransportLoadTrainedModelPackage.importModel( - client, - taskManager, - createRequestWithWaiting(), - uploader, - ActionListener.noop(), - task - ); + TransportLoadTrainedModelPackage.importModel(client, () -> {}, createRequestWithWaiting(), uploader, task, ActionListener.noop()); var notificationArg = ArgumentCaptor.forClass(AuditMlNotificationAction.Request.class); // 2 notifications- the start and finish messages @@ -108,32 +112,63 @@ public void testSendsWarningNotificationForTaskCancelledException() throws Excep public void testCallsOnResponseWithAcknowledgedResponse() throws Exception { var client = mock(Client.class); var taskManager = mock(TaskManager.class); - var task = mock(Task.class); + var task = mock(ModelDownloadTask.class); ModelImporter uploader = createUploader(null); var responseRef = new AtomicReference(); var listener = ActionListener.wrap(responseRef::set, e -> fail("received an exception: " + e.getMessage())); - TransportLoadTrainedModelPackage.importModel(client, taskManager, createRequestWithWaiting(), uploader, listener, task); + TransportLoadTrainedModelPackage.importModel(client, () -> {}, createRequestWithWaiting(), uploader, task, listener); assertThat(responseRef.get(), is(AcknowledgedResponse.TRUE)); } public void testDoesNotCallListenerWhenNotWaitingForCompletion() { var uploader = mock(ModelImporter.class); var client = mock(Client.class); - var taskManager = mock(TaskManager.class); - var task = mock(Task.class); - + var task = mock(ModelDownloadTask.class); TransportLoadTrainedModelPackage.importModel( client, - taskManager, + () -> {}, createRequestWithoutWaiting(), uploader, - ActionListener.running(ESTestCase::fail), - task + task, + ActionListener.running(ESTestCase::fail) ); } + public void testWaitForExistingDownload() { + var taskManager = mock(TaskManager.class); + var modelId = "foo"; + var task = new ModelDownloadTask(1L, MODEL_IMPORT_TASK_TYPE, MODEL_IMPORT_TASK_ACTION, modelId, new TaskId("node", 1L), Map.of()); + when(taskManager.getCancellableTasks()).thenReturn(Map.of(1L, task)); + + var transportService = mock(TransportService.class); + when(transportService.getTaskManager()).thenReturn(taskManager); + + var action = new TransportLoadTrainedModelPackage( + transportService, + mock(ClusterService.class), + mock(ThreadPool.class), + mock(ActionFilters.class), + mock(IndexNameExpressionResolver.class), + mock(Client.class), + mock(CircuitBreakerService.class) + ); + + assertTrue(action.handleDownloadInProgress(modelId, true, ActionListener.noop())); + verify(taskManager).registerRemovedTaskListener(any()); + assertThat(action.taskRemovedListenersByModelId.entrySet(), hasSize(1)); + assertThat(action.taskRemovedListenersByModelId.get(modelId), hasSize(1)); + + // With wait for completion == false no new removed listener will be added + assertTrue(action.handleDownloadInProgress(modelId, false, ActionListener.noop())); + verify(taskManager, times(1)).registerRemovedTaskListener(any()); + assertThat(action.taskRemovedListenersByModelId.entrySet(), hasSize(1)); + assertThat(action.taskRemovedListenersByModelId.get(modelId), hasSize(1)); + + assertFalse(action.handleDownloadInProgress("no-task-for-this-one", randomBoolean(), ActionListener.noop())); + } + private void assertUploadCallsOnFailure(Exception exception, String message, RestStatus status, Level level) throws Exception { var esStatusException = new ElasticsearchStatusException(message, status, exception); @@ -152,7 +187,7 @@ private void assertNotificationAndOnFailure( ) throws Exception { var client = mock(Client.class); var taskManager = mock(TaskManager.class); - var task = mock(Task.class); + var task = mock(ModelDownloadTask.class); ModelImporter uploader = createUploader(thrownException); var failureRef = new AtomicReference(); @@ -160,7 +195,14 @@ private void assertNotificationAndOnFailure( (AcknowledgedResponse response) -> { fail("received a acknowledged response: " + response.toString()); }, failureRef::set ); - TransportLoadTrainedModelPackage.importModel(client, taskManager, createRequestWithWaiting(), uploader, listener, task); + TransportLoadTrainedModelPackage.importModel( + client, + () -> taskManager.unregister(task), + createRequestWithWaiting(), + uploader, + task, + listener + ); var notificationArg = ArgumentCaptor.forClass(AuditMlNotificationAction.Request.class); // 2 notifications- the starting message and the failure diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 67c26c78a6741..716c401a9fcc8 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle index 3854c70b0f389..07dc1cc3c612a 100644 --- a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle +++ b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/ml/qa/disabled/build.gradle b/x-pack/plugin/ml/qa/disabled/build.gradle index 0d1d8d6484afc..9d157b3e7fa32 100644 --- a/x-pack/plugin/ml/qa/disabled/build.gradle +++ b/x-pack/plugin/ml/qa/disabled/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle index b43132c2daf50..c0d6913d85590 100644 --- a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle @@ -1,8 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' diff --git a/x-pack/plugin/ml/qa/single-node-tests/build.gradle b/x-pack/plugin/ml/qa/single-node-tests/build.gradle index 5ed1c5179716f..02421d9bb3d14 100644 --- a/x-pack/plugin/ml/qa/single-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/single-node-tests/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java index ba4483493da1d..e0405b1749536 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java @@ -45,7 +45,6 @@ import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.inference.InferenceWaitForAllocation; import org.elasticsearch.xpack.ml.inference.adaptiveallocations.AdaptiveAllocationsScalerService; -import org.elasticsearch.xpack.ml.inference.adaptiveallocations.ScaleFromZeroFeatureFlag; import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentService; import org.elasticsearch.xpack.ml.inference.loadingservice.LocalModel; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; @@ -277,13 +276,11 @@ private void inferAgainstAllocatedModel( boolean starting = adaptiveAllocationsScalerService.maybeStartAllocation(assignment); if (starting) { message += "; starting deployment of one allocation"; - - if (ScaleFromZeroFeatureFlag.isEnabled()) { - waitForAllocation.waitForAssignment( - new InferenceWaitForAllocation.WaitingRequest(request, responseBuilder, parentTaskId, listener) - ); - return; - } + logger.debug(message); + waitForAllocation.waitForAssignment( + new InferenceWaitForAllocation.WaitingRequest(request, responseBuilder, parentTaskId, listener) + ); + return; } logger.debug(message); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java index 5fd70ce71cd24..f01372ca4f246 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java @@ -190,11 +190,11 @@ protected void masterOperation( () -> "[" + request.getDeploymentId() + "] creating new assignment for model [" + request.getModelId() + "] failed", e ); - if (ExceptionsHelper.unwrapCause(e) instanceof ResourceAlreadyExistsException) { + if (ExceptionsHelper.unwrapCause(e) instanceof ResourceAlreadyExistsException resourceAlreadyExistsException) { e = new ElasticsearchStatusException( "Cannot start deployment [{}] because it has already been started", RestStatus.CONFLICT, - e, + resourceAlreadyExistsException, request.getDeploymentId() ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java index e55736cf43607..e13b1e0033191 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Releasables; @@ -110,31 +111,33 @@ protected void doClose() { } @Override - public InternalAggregation[] buildAggregations(long[] ordsToCollect) throws IOException { - Bucket[][] topBucketsPerOrd = new Bucket[ordsToCollect.length][]; - for (int ordIdx = 0; ordIdx < ordsToCollect.length; ordIdx++) { - final long ord = ordsToCollect[ordIdx]; - final TokenListCategorizer categorizer = (ord < categorizers.size()) ? categorizers.get(ord) : null; - if (categorizer == null) { - topBucketsPerOrd[ordIdx] = new Bucket[0]; - continue; + public InternalAggregation[] buildAggregations(LongArray ordsToCollect) throws IOException { + try (ObjectArray topBucketsPerOrd = bigArrays().newObjectArray(ordsToCollect.size())) { + for (long ordIdx = 0; ordIdx < ordsToCollect.size(); ordIdx++) { + final long ord = ordsToCollect.get(ordIdx); + final TokenListCategorizer categorizer = (ord < categorizers.size()) ? categorizers.get(ord) : null; + if (categorizer == null) { + topBucketsPerOrd.set(ordIdx, new Bucket[0]); + continue; + } + int size = (int) Math.min(bucketOrds.bucketsInOrd(ordIdx), bucketCountThresholds.getShardSize()); + checkRealMemoryCBForInternalBucket(); + topBucketsPerOrd.set(ordIdx, categorizer.toOrderedBuckets(size)); } - int size = (int) Math.min(bucketOrds.bucketsInOrd(ordIdx), bucketCountThresholds.getShardSize()); - topBucketsPerOrd[ordIdx] = categorizer.toOrderedBuckets(size); - } - buildSubAggsForAllBuckets(topBucketsPerOrd, Bucket::getBucketOrd, Bucket::setAggregations); - InternalAggregation[] results = new InternalAggregation[ordsToCollect.length]; - for (int ordIdx = 0; ordIdx < ordsToCollect.length; ordIdx++) { - results[ordIdx] = new InternalCategorizationAggregation( - name, - bucketCountThresholds.getRequiredSize(), - bucketCountThresholds.getMinDocCount(), - similarityThreshold, - metadata(), - Arrays.asList(topBucketsPerOrd[ordIdx]) + buildSubAggsForAllBuckets(topBucketsPerOrd, Bucket::getBucketOrd, Bucket::setAggregations); + + return buildAggregations( + Math.toIntExact(ordsToCollect.size()), + ordIdx -> new InternalCategorizationAggregation( + name, + bucketCountThresholds.getRequiredSize(), + bucketCountThresholds.getMinDocCount(), + similarityThreshold, + metadata(), + Arrays.asList(topBucketsPerOrd.get(ordIdx)) + ) ); } - return results; } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java index 7ef7a8f4e6dd5..95b6a18182f9b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java @@ -142,8 +142,7 @@ public void writeTo(StreamOutput out) throws IOException { aggregations.writeTo(out); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(CommonFields.DOC_COUNT.getPreferredName(), serializableCategory.getNumMatches()); builder.field(CommonFields.KEY.getPreferredName()); @@ -152,7 +151,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CategoryDefinition.MAX_MATCHING_LENGTH.getPreferredName(), serializableCategory.maxMatchingStringLen()); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } BucketKey getRawKey() { @@ -280,7 +278,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } builder.endArray(); return builder; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java index c97166ac6fd80..39bdb69d4da40 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java @@ -12,12 +12,13 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; +import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Objects; -public class ChangePointBucket extends InternalMultiBucketAggregation.InternalBucket { +public class ChangePointBucket extends InternalMultiBucketAggregation.InternalBucket implements ToXContent { private final Object key; private final long docCount; private final InternalAggregations aggregations; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/DelegatingCircuitBreakerService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/DelegatingCircuitBreakerService.java index 350f45afb9e1f..1b28ebbb3eec6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/DelegatingCircuitBreakerService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/DelegatingCircuitBreakerService.java @@ -40,10 +40,12 @@ * At the time of writing circuit breakers are a global gauge.) * * After the map phase and before reduce, the {@link ItemSetMapReduceAggregator} creates instances of - * {@link InternalItemSetMapReduceAggregation}, see {@link ItemSetMapReduceAggregator#buildAggregations(long[])}. + * {@link InternalItemSetMapReduceAggregation}, see + * {@link ItemSetMapReduceAggregator#buildAggregations(org.elasticsearch.common.util.LongArray)}. * * (Note 1: Instead of keeping the existing instance, it would have been possible to deep-copy the object like - * {@link CardinalityAggregator#buildAggregations(long[])}. I decided against this approach mainly because the deep-copy isn't + * {@link CardinalityAggregator#buildAggregations(org.elasticsearch.common.util.LongArray)}. + * I decided against this approach mainly because the deep-copy isn't * secured by circuit breakers, meaning the node could run out of memory during the deep-copy.) * (Note 2: Between {@link ItemSetMapReduceAggregator#doClose()} and serializing {@link InternalItemSetMapReduceAggregation} * memory accounting is broken, meaning the agg context gets closed and bytes get returned to the circuit breaker before memory is diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java index 0f9555c77341f..1a5e5d7a0790e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongObjectPagedHashMap; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.Tuple; @@ -117,9 +118,9 @@ public InternalAggregation buildEmptyAggregation() { } @Override - public final InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { + public final InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; + for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { results[ordIdx] = buildAggregation(ordIdx); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleFromZeroFeatureFlag.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleFromZeroFeatureFlag.java deleted file mode 100644 index 4c446b65db9dd..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleFromZeroFeatureFlag.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.inference.adaptiveallocations; - -import org.elasticsearch.common.util.FeatureFlag; - -public class ScaleFromZeroFeatureFlag { - private ScaleFromZeroFeatureFlag() {} - - private static final FeatureFlag FEATURE_FLAG = new FeatureFlag("ml_scale_from_zero"); - - public static boolean isEnabled() { - return FEATURE_FLAG.isEnabled(); - } -} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java index deb645ff96133..4a9d65481d412 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java @@ -981,7 +981,7 @@ private static Set countInferenceProcessors(IngestMetadata ingestMetadat return allReferencedModelKeys; } ingestMetadata.getPipelines().forEach((pipelineId, pipelineConfiguration) -> { - Object processors = pipelineConfiguration.getConfigAsMap().get("processors"); + Object processors = pipelineConfiguration.getConfig().get("processors"); if (processors instanceof List) { for (Object processor : (List) processors) { if (processor instanceof Map) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java index bbe5bea691c35..5dd7dbbffaa61 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java @@ -194,7 +194,7 @@ Reader normalize(CharSequence str) { if (charDelta < 0) { // normalised form is shorter int lastDiff = getLastCumulativeDiff(); - addOffCorrectMap(normalizedCharPos, lastDiff + charDelta); + addOffCorrectMap(normalizedCharPos, lastDiff - charDelta); } else if (charDelta > 0) { // inserted chars, add the offset in the output stream int lastDiff = getLastCumulativeDiff(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java index 31deac066cba2..01821f5582471 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java @@ -367,8 +367,10 @@ List tokenize(CharSequence inputSequence, IntToIntFuncti new DelimitedToken.Encoded( Strings.format("<0x%02X>", bytes[i]), pieces[i], + // even though we are changing the number of characters in the output, we don't + // need to change the offsets. The offsets refer to the input characters offsetCorrection.apply(node.startsAtCharPos), - offsetCorrection.apply(startsAtBytes + i) + offsetCorrection.apply(endsAtChars) ) ); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java index 561076c302eda..1604c47ac4754 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java @@ -36,7 +36,7 @@ public final class AggregationTestUtils { private AggregationTestUtils() {} static InternalHistogram.Bucket createHistogramBucket(long timestamp, long docCount, List subAggregations) { - return new InternalHistogram.Bucket(timestamp, docCount, false, DocValueFormat.RAW, createAggs(subAggregations)); + return new InternalHistogram.Bucket(timestamp, docCount, DocValueFormat.RAW, createAggs(subAggregations)); } static InternalComposite.InternalBucket createCompositeBucket( diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java index bbe509da67452..a8461de8630ae 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java @@ -94,6 +94,20 @@ public void testTokenize() throws IOException { } } + public void testTokenizeWithHiddenControlCharacters() throws IOException { + try ( + DebertaV2Tokenizer tokenizer = DebertaV2Tokenizer.builder( + TEST_CASE_VOCAB, + TEST_CASE_SCORES, + new DebertaV2Tokenization(false, false, null, Tokenization.Truncate.NONE, -1) + ).build() + ) { + TokenizationResult.Tokens tokenization = tokenizer.tokenize("\u009F\u008Fz", Tokenization.Truncate.NONE, -1, 0, null).get(0); + assertThat(tokenStrings(tokenization.tokens().get(0)), contains("▁", "z")); + + } + } + public void testSurrogatePair() throws IOException { try ( DebertaV2Tokenizer tokenizer = DebertaV2Tokenizer.builder( diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndicesStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndicesStatsMonitoringDocTests.java index b1d4f3ff7045f..6822f54633bdc 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndicesStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndicesStatsMonitoringDocTests.java @@ -53,8 +53,6 @@ public void setUp() throws Exception { "dcvO5uZATE-EhIKc3tk9Bg", null, null, - null, - null, new ShardStats[] { // Primaries new ShardStats(mockShardRouting(true), mockShardPath(), mockCommonStats(), null, null, null, false, 0), diff --git a/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelIndexTemplateRegistry.java b/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelIndexTemplateRegistry.java index 435530542c857..ca52db9331cf3 100644 --- a/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelIndexTemplateRegistry.java +++ b/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelIndexTemplateRegistry.java @@ -10,7 +10,6 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ClientHelper; @@ -27,10 +26,9 @@ public OTelIndexTemplateRegistry( ClusterService clusterService, ThreadPool threadPool, Client client, - NamedXContentRegistry xContentRegistry, - FeatureService featureService + NamedXContentRegistry xContentRegistry ) { - super(nodeSettings, clusterService, threadPool, client, xContentRegistry, featureService); + super(nodeSettings, clusterService, threadPool, client, xContentRegistry); } @Override diff --git a/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelPlugin.java b/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelPlugin.java index 543102330bd08..67bd8c4e002d3 100644 --- a/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelPlugin.java +++ b/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelPlugin.java @@ -48,14 +48,7 @@ public Collection createComponents(PluginServices services) { Settings settings = services.environment().settings(); ClusterService clusterService = services.clusterService(); registry.set( - new OTelIndexTemplateRegistry( - settings, - clusterService, - services.threadPool(), - services.client(), - services.xContentRegistry(), - services.featureService() - ) + new OTelIndexTemplateRegistry(settings, clusterService, services.threadPool(), services.client(), services.xContentRegistry()) ); if (enabled) { OTelIndexTemplateRegistry registryInstance = registry.get(); diff --git a/x-pack/plugin/repositories-metering-api/qa/azure/build.gradle b/x-pack/plugin/repositories-metering-api/qa/azure/build.gradle index 4683c13f1fc0c..43c78bfc887b7 100644 --- a/x-pack/plugin/repositories-metering-api/qa/azure/build.gradle +++ b/x-pack/plugin/repositories-metering-api/qa/azure/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { diff --git a/x-pack/plugin/repositories-metering-api/qa/gcs/build.gradle b/x-pack/plugin/repositories-metering-api/qa/gcs/build.gradle index 62fe47c08f5f5..984590f42256c 100644 --- a/x-pack/plugin/repositories-metering-api/qa/gcs/build.gradle +++ b/x-pack/plugin/repositories-metering-api/qa/gcs/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { diff --git a/x-pack/plugin/repositories-metering-api/qa/s3/build.gradle b/x-pack/plugin/repositories-metering-api/qa/s3/build.gradle index 3c58e6a06af69..5c83e8980a474 100644 --- a/x-pack/plugin/repositories-metering-api/qa/s3/build.gradle +++ b/x-pack/plugin/repositories-metering-api/qa/s3/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java index ba25a774ff540..e33c1cc30f355 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java @@ -444,20 +444,14 @@ private static InternalAggregation unrollMultiBucket( long key = ((InternalDateHistogram) rolled).getKey(bucket).longValue(); DocValueFormat formatter = ((InternalDateHistogram.Bucket) bucket).getFormatter(); assert bucketCount >= 0; - return new InternalDateHistogram.Bucket( - key, - bucketCount, - ((InternalDateHistogram.Bucket) bucket).getKeyed(), - formatter, - subAggs - ); + return new InternalDateHistogram.Bucket(key, bucketCount, formatter, subAggs); }); } else if (rolled instanceof InternalHistogram) { return unrollMultiBucket(rolled, original, currentTree, (bucket, bucketCount, subAggs) -> { long key = ((InternalHistogram) rolled).getKey(bucket).longValue(); DocValueFormat formatter = ((InternalHistogram.Bucket) bucket).getFormatter(); assert bucketCount >= 0; - return new InternalHistogram.Bucket(key, bucketCount, ((InternalHistogram.Bucket) bucket).getKeyed(), formatter, subAggs); + return new InternalHistogram.Bucket(key, bucketCount, formatter, subAggs); }); } else if (rolled instanceof StringTerms) { return unrollMultiBucket(rolled, original, currentTree, (bucket, bucketCount, subAggs) -> { diff --git a/x-pack/plugin/searchable-snapshots/qa/azure/build.gradle b/x-pack/plugin/searchable-snapshots/qa/azure/build.gradle index e2f77fae89225..de5ec42147d3f 100644 --- a/x-pack/plugin/searchable-snapshots/qa/azure/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/azure/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/searchable-snapshots/qa/gcs/build.gradle b/x-pack/plugin/searchable-snapshots/qa/gcs/build.gradle index c0a420aff313a..0340453d0840b 100644 --- a/x-pack/plugin/searchable-snapshots/qa/gcs/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/gcs/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle b/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle index e8d97da9a9e37..b41e0f8dcc1cf 100644 --- a/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' apply plugin: 'elasticsearch.internal-available-ports' diff --git a/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle b/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle index 430df2a7e8122..1659c592e5e64 100644 --- a/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' diff --git a/x-pack/plugin/searchable-snapshots/qa/s3/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/s3/S3SearchableSnapshotsCredentialsReloadIT.java b/x-pack/plugin/searchable-snapshots/qa/s3/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/s3/S3SearchableSnapshotsCredentialsReloadIT.java new file mode 100644 index 0000000000000..3049fe830e728 --- /dev/null +++ b/x-pack/plugin/searchable-snapshots/qa/s3/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/s3/S3SearchableSnapshotsCredentialsReloadIT.java @@ -0,0 +1,281 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.searchablesnapshots.s3; + +import fixture.s3.S3HttpFixture; +import io.netty.handler.codec.http.HttpMethod; + +import org.apache.http.client.methods.HttpPut; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.entity.ContentType; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.WarningsHandler; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.MutableSettingsProvider; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ObjectPath; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.function.UnaryOperator; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.Matchers.allOf; + +public class S3SearchableSnapshotsCredentialsReloadIT extends ESRestTestCase { + + private static final String BUCKET = "S3SearchableSnapshotsCredentialsReloadIT-bucket"; + private static final String BASE_PATH = "S3SearchableSnapshotsCredentialsReloadIT-base-path"; + + public static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, "ignored"); + + private static final MutableSettingsProvider keystoreSettings = new MutableSettingsProvider(); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.license.self_generated.type", "trial") + .keystore(keystoreSettings) + .setting("xpack.searchable.snapshot.shared_cache.size", "4kB") + .setting("xpack.searchable.snapshot.shared_cache.region_size", "4kB") + .setting("xpack.searchable_snapshots.cache_fetch_async_thread_pool.keep_alive", "0ms") + .setting("xpack.security.enabled", "false") + .systemProperty("es.allow_insecure_settings", "true") + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(s3Fixture).around(cluster); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Before + public void skipFips() { + assumeFalse("getting these tests to run in a FIPS JVM is kinda fiddly and we don't really need the extra coverage", inFipsJvm()); + } + + public void testReloadCredentialsFromKeystore() throws IOException { + final TestHarness testHarness = new TestHarness(); + testHarness.putRepository(); + + // Set up initial credentials + final String accessKey1 = randomIdentifier(); + s3Fixture.setAccessKey(accessKey1); + keystoreSettings.put("s3.client.default.access_key", accessKey1); + keystoreSettings.put("s3.client.default.secret_key", randomIdentifier()); + cluster.updateStoredSecureSettings(); + assertOK(client().performRequest(new Request("POST", "/_nodes/reload_secure_settings"))); + + testHarness.createFrozenSearchableSnapshotIndex(); + + // Verify searchable snapshot functionality + testHarness.ensureSearchSuccess(); + + // Rotate credentials in blob store + logger.info("--> rotate credentials"); + final String accessKey2 = randomValueOtherThan(accessKey1, ESTestCase::randomIdentifier); + s3Fixture.setAccessKey(accessKey2); + + // Ensure searchable snapshot now does not work due to invalid credentials + logger.info("--> expect failure"); + testHarness.ensureSearchFailure(); + + // Set up refreshed credentials + logger.info("--> update keystore contents"); + keystoreSettings.put("s3.client.default.access_key", accessKey2); + cluster.updateStoredSecureSettings(); + assertOK(client().performRequest(new Request("POST", "/_nodes/reload_secure_settings"))); + + // Check access using refreshed credentials + logger.info("--> expect success"); + testHarness.ensureSearchSuccess(); + } + + public void testReloadCredentialsFromAlternativeClient() throws IOException { + final TestHarness testHarness = new TestHarness(); + testHarness.putRepository(); + + // Set up credentials + final String accessKey1 = randomIdentifier(); + final String accessKey2 = randomValueOtherThan(accessKey1, ESTestCase::randomIdentifier); + final String alternativeClient = randomValueOtherThan("default", ESTestCase::randomIdentifier); + + s3Fixture.setAccessKey(accessKey1); + keystoreSettings.put("s3.client.default.access_key", accessKey1); + keystoreSettings.put("s3.client.default.secret_key", randomIdentifier()); + keystoreSettings.put("s3.client." + alternativeClient + ".access_key", accessKey2); + keystoreSettings.put("s3.client." + alternativeClient + ".secret_key", randomIdentifier()); + cluster.updateStoredSecureSettings(); + assertOK(client().performRequest(new Request("POST", "/_nodes/reload_secure_settings"))); + + testHarness.createFrozenSearchableSnapshotIndex(); + + // Verify searchable snapshot functionality + testHarness.ensureSearchSuccess(); + + // Rotate credentials in blob store + logger.info("--> rotate credentials"); + s3Fixture.setAccessKey(accessKey2); + + // Ensure searchable snapshot now does not work due to invalid credentials + logger.info("--> expect failure"); + testHarness.ensureSearchFailure(); + + // Adjust repository to use new client + logger.info("--> update repository metadata"); + testHarness.putRepository(b -> b.put("client", alternativeClient)); + + // Check access using refreshed credentials + logger.info("--> expect success"); + testHarness.ensureSearchSuccess(); + } + + public void testReloadCredentialsFromMetadata() throws IOException { + final TestHarness testHarness = new TestHarness(); + testHarness.warningsHandler = WarningsHandler.PERMISSIVE; + + // Set up credentials + final String accessKey1 = randomIdentifier(); + final String accessKey2 = randomValueOtherThan(accessKey1, ESTestCase::randomIdentifier); + + testHarness.putRepository(b -> b.put("access_key", accessKey1).put("secret_key", randomIdentifier())); + s3Fixture.setAccessKey(accessKey1); + + testHarness.createFrozenSearchableSnapshotIndex(); + + // Verify searchable snapshot functionality + testHarness.ensureSearchSuccess(); + + // Rotate credentials in blob store + logger.info("--> rotate credentials"); + s3Fixture.setAccessKey(accessKey2); + + // Ensure searchable snapshot now does not work due to invalid credentials + logger.info("--> expect failure"); + testHarness.ensureSearchFailure(); + + // Adjust repository to use new client + logger.info("--> update repository metadata"); + testHarness.putRepository(b -> b.put("access_key", accessKey2).put("secret_key", randomIdentifier())); + + // Check access using refreshed credentials + logger.info("--> expect success"); + testHarness.ensureSearchSuccess(); + } + + private class TestHarness { + private final String mountedIndexName = randomIdentifier(); + private final String repositoryName = randomIdentifier(); + + @Nullable // to use the default + WarningsHandler warningsHandler; + + void putRepository() throws IOException { + putRepository(UnaryOperator.identity()); + } + + void putRepository(UnaryOperator settingsOperator) throws IOException { + // Register repository + final Request request = newXContentRequest( + HttpMethod.PUT, + "/_snapshot/" + repositoryName, + (b, p) -> b.field("type", "s3") + .startObject("settings") + .value( + settingsOperator.apply( + Settings.builder().put("bucket", BUCKET).put("base_path", BASE_PATH).put("endpoint", s3Fixture.getAddress()) + ).build() + ) + .endObject() + ); + request.addParameter("verify", "false"); // because we don't have access to the blob store yet + request.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warningsHandler)); + assertOK(client().performRequest(request)); + } + + void createFrozenSearchableSnapshotIndex() throws IOException { + // Create an index, large enough that its data is not all captured in the file headers + final String indexName = randomValueOtherThan(mountedIndexName, ESTestCase::randomIdentifier); + createIndex(indexName, indexSettings(1, 0).build()); + try (var bodyStream = new ByteArrayOutputStream()) { + for (int i = 0; i < 1024; i++) { + try (XContentBuilder bodyLineBuilder = new XContentBuilder(XContentType.JSON.xContent(), bodyStream)) { + bodyLineBuilder.startObject().startObject("index").endObject().endObject(); + } + bodyStream.write(0x0a); + try (XContentBuilder bodyLineBuilder = new XContentBuilder(XContentType.JSON.xContent(), bodyStream)) { + bodyLineBuilder.startObject().field("foo", "bar").endObject(); + } + bodyStream.write(0x0a); + } + bodyStream.flush(); + final Request request = new Request("PUT", indexName + "/_bulk"); + request.setEntity(new ByteArrayEntity(bodyStream.toByteArray(), ContentType.APPLICATION_JSON)); + client().performRequest(request); + } + + // Take a snapshot and delete the original index + final String snapshotName = randomIdentifier(); + final Request createSnapshotRequest = new Request(HttpPut.METHOD_NAME, "_snapshot/" + repositoryName + '/' + snapshotName); + createSnapshotRequest.addParameter("wait_for_completion", "true"); + createSnapshotRequest.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warningsHandler)); + assertOK(client().performRequest(createSnapshotRequest)); + + deleteIndex(indexName); + + // Mount the snapshotted index as a searchable snapshot + final Request mountRequest = newXContentRequest( + HttpMethod.POST, + "/_snapshot/" + repositoryName + "/" + snapshotName + "/_mount", + (b, p) -> b.field("index", indexName).field("renamed_index", mountedIndexName) + ); + mountRequest.addParameter("wait_for_completion", "true"); + mountRequest.addParameter("storage", "shared_cache"); + assertOK(client().performRequest(mountRequest)); + ensureGreen(mountedIndexName); + } + + void ensureSearchSuccess() throws IOException { + final Request searchRequest = new Request("GET", mountedIndexName + "/_search"); + searchRequest.addParameter("size", "10000"); + assertEquals( + "bar", + ObjectPath.createFromResponse(assertOK(client().performRequest(searchRequest))).evaluate("hits.hits.0._source.foo") + ); + } + + void ensureSearchFailure() throws IOException { + assertOK(client().performRequest(new Request("POST", "/_searchable_snapshots/cache/clear"))); + final Request searchRequest = new Request("GET", mountedIndexName + "/_search"); + searchRequest.addParameter("size", "10000"); + assertThat( + expectThrows(ResponseException.class, () -> client().performRequest(searchRequest)).getMessage(), + allOf( + containsString("Bad access key"), + containsString("Status Code: 403"), + containsString("Error Code: AccessDenied"), + containsString("failed to read data from cache") + ) + ); + } + } + +} diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java index 8bb4c45e54ab3..33982536cd634 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java @@ -543,9 +543,9 @@ public Map getRecoveryStateFactories() { return Map.of(SNAPSHOT_RECOVERY_STATE_FACTORY_KEY, SearchableSnapshotRecoveryState::new); } - public static final String CACHE_FETCH_ASYNC_THREAD_POOL_NAME = "searchable_snapshots_cache_fetch_async"; + public static final String CACHE_FETCH_ASYNC_THREAD_POOL_NAME = BlobStoreRepository.SEARCHABLE_SNAPSHOTS_CACHE_FETCH_ASYNC_THREAD_NAME; public static final String CACHE_FETCH_ASYNC_THREAD_POOL_SETTING = "xpack.searchable_snapshots.cache_fetch_async_thread_pool"; - public static final String CACHE_PREWARMING_THREAD_POOL_NAME = "searchable_snapshots_cache_prewarming"; + public static final String CACHE_PREWARMING_THREAD_POOL_NAME = BlobStoreRepository.SEARCHABLE_SNAPSHOTS_CACHE_PREWARMING_THREAD_NAME; public static final String CACHE_PREWARMING_THREAD_POOL_SETTING = "xpack.searchable_snapshots.cache_prewarming_thread_pool"; public static ScalingExecutorBuilder[] executorBuilders(Settings settings) { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/BlobContainerSupplier.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/BlobContainerSupplier.java new file mode 100644 index 0000000000000..335c8e311ace6 --- /dev/null +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/BlobContainerSupplier.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.searchablesnapshots.store; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.OperationPurpose; +import org.elasticsearch.common.blobstore.support.FilterBlobContainer; +import org.elasticsearch.repositories.IndexId; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; + +import java.io.IOException; +import java.io.InputStream; +import java.util.function.Supplier; + +public class BlobContainerSupplier implements Supplier { + + private static final Logger logger = LogManager.getLogger(BlobContainerSupplier.class); + + private final Supplier repositorySupplier; + private final IndexId indexId; + private final int shardId; + + private volatile LastKnownState lastKnownState = new LastKnownState(null, null); + + public BlobContainerSupplier(Supplier repositorySupplier, IndexId indexId, int shardId) { + this.repositorySupplier = repositorySupplier; + this.indexId = indexId; + this.shardId = shardId; + } + + @Override + public BlobContainer get() { + final LastKnownState lastKnownState = this.lastKnownState; + final BlobStoreRepository currentRepository = repositorySupplier.get(); + + if (lastKnownState.blobStoreRepository() == currentRepository) { + return lastKnownState.blobContainer(); + } else { + return refreshAndGet(); + } + } + + private synchronized BlobContainer refreshAndGet() { + final BlobStoreRepository currentRepository = repositorySupplier.get(); + if (lastKnownState.blobStoreRepository() == currentRepository) { + return lastKnownState.blobContainer(); + } else { + logger.debug("creating new blob container [{}][{}][{}]", currentRepository.getMetadata().name(), indexId, shardId); + final BlobContainer newContainer = new RateLimitingBlobContainer( + currentRepository, + currentRepository.shardContainer(indexId, shardId) + ); + lastKnownState = new LastKnownState(currentRepository, newContainer); + return newContainer; + } + } + + private record LastKnownState(BlobStoreRepository blobStoreRepository, BlobContainer blobContainer) {} + + /** + * A {@link FilterBlobContainer} that uses {@link BlobStoreRepository#maybeRateLimitRestores(InputStream)} to limit the rate at which + * blobs are read from the repository. + */ + private static class RateLimitingBlobContainer extends FilterBlobContainer { + + private final BlobStoreRepository blobStoreRepository; + + RateLimitingBlobContainer(BlobStoreRepository blobStoreRepository, BlobContainer blobContainer) { + super(blobContainer); + this.blobStoreRepository = blobStoreRepository; + } + + @Override + protected BlobContainer wrapChild(BlobContainer child) { + return new RateLimitingBlobContainer(blobStoreRepository, child); + } + + @Override + public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { + return blobStoreRepository.maybeRateLimitRestores(super.readBlob(purpose, blobName)); + } + + @Override + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { + return blobStoreRepository.maybeRateLimitRestores(super.readBlob(purpose, blobName, position, length)); + } + } +} diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/RepositorySupplier.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/RepositorySupplier.java new file mode 100644 index 0000000000000..63522ce2309a1 --- /dev/null +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/RepositorySupplier.java @@ -0,0 +1,83 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.searchablesnapshots.store; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.RepositoryMissingException; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots; + +import java.util.Map; +import java.util.Objects; +import java.util.function.Supplier; + +public class RepositorySupplier implements Supplier { + + private static final Logger logger = LogManager.getLogger(BlobContainerSupplier.class); + + private final RepositoriesService repositoriesService; + + private final String repositoryName; + + @Nullable // if repository specified only by name + private final String repositoryUuid; + + private volatile String repositoryNameHint; + + public RepositorySupplier(RepositoriesService repositoriesService, String repositoryName, String repositoryUuid) { + this.repositoriesService = Objects.requireNonNull(repositoriesService); + this.repositoryName = Objects.requireNonNull(repositoryName); + this.repositoryUuid = repositoryUuid; + this.repositoryNameHint = repositoryName; + } + + @Override + public BlobStoreRepository get() { + return SearchableSnapshots.getSearchableRepository(getRepository()); + } + + private Repository getRepository() { + if (repositoryUuid == null) { + // repository containing pre-7.12 snapshots has no UUID so we assume it matches by name + final Repository repository = repositoriesService.repository(repositoryName); + assert repository.getMetadata().name().equals(repositoryName) : repository.getMetadata().name() + " vs " + repositoryName; + return repository; + } + + final Map repositoriesByName = repositoriesService.getRepositories(); + + final String currentRepositoryNameHint = repositoryNameHint; + final Repository repositoryByLastKnownName = repositoriesByName.get(currentRepositoryNameHint); + if (repositoryByLastKnownName != null) { + final var foundRepositoryUuid = repositoryByLastKnownName.getMetadata().uuid(); + if (Objects.equals(repositoryUuid, foundRepositoryUuid)) { + return repositoryByLastKnownName; + } + } + + for (final Repository repository : repositoriesByName.values()) { + if (repository.getMetadata().uuid().equals(repositoryUuid)) { + final var newRepositoryName = repository.getMetadata().name(); + logger.debug( + "getRepository: repository [{}] with uuid [{}] replacing repository [{}]", + newRepositoryName, + repositoryUuid, + currentRepositoryNameHint + ); + repositoryNameHint = repository.getMetadata().name(); + return repository; + } + } + + throw new RepositoryMissingException("uuid [" + repositoryUuid + "], original name [" + repositoryName + "]"); + } +} diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java index b56cd28e9dc6c..bbdf371e1ed7b 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java @@ -24,8 +24,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.common.blobstore.BlobContainer; -import org.elasticsearch.common.blobstore.OperationPurpose; -import org.elasticsearch.common.blobstore.support.FilterBlobContainer; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; import org.elasticsearch.common.settings.Settings; @@ -43,8 +41,6 @@ import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.repositories.Repository; -import org.elasticsearch.repositories.RepositoryMissingException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.threadpool.ThreadPool; @@ -62,7 +58,6 @@ import java.io.FileNotFoundException; import java.io.IOException; -import java.io.InputStream; import java.io.UncheckedIOException; import java.nio.file.Files; import java.nio.file.Path; @@ -134,7 +129,6 @@ public class SearchableSnapshotDirectory extends BaseDirectory { // volatile fields are updated once under `this` lock, all together, iff loaded is not true. private volatile BlobStoreIndexShardSnapshot snapshot; - private volatile BlobContainer blobContainer; private volatile boolean loaded; private volatile SearchableSnapshotRecoveryState recoveryState; @@ -182,7 +176,6 @@ public SearchableSnapshotDirectory( private synchronized boolean invariant() { assert loaded != (snapshot == null); - assert loaded != (blobContainer == null); assert loaded != (recoveryState == null); return true; } @@ -212,7 +205,6 @@ public boolean loadSnapshot( synchronized (this) { alreadyLoaded = this.loaded; if (alreadyLoaded == false) { - this.blobContainer = blobContainerSupplier.get(); this.snapshot = snapshotSupplier.get(); this.loaded = true; cleanExistingRegularShardFiles(); @@ -226,14 +218,12 @@ public boolean loadSnapshot( return alreadyLoaded == false; } - @Nullable public BlobContainer blobContainer() { - final BlobContainer blobContainer = this.blobContainer; + final BlobContainer blobContainer = blobContainerSupplier.get(); assert blobContainer != null; return blobContainer; } - @Nullable public BlobStoreIndexShardSnapshot snapshot() { final BlobStoreIndexShardSnapshot snapshot = this.snapshot; assert snapshot != null; @@ -590,23 +580,15 @@ public static Directory create( ); } - Repository repository; - final String repositoryName; - if (SNAPSHOT_REPOSITORY_UUID_SETTING.exists(indexSettings.getSettings())) { - repository = repositoryByUuid( - repositories.getRepositories(), - SNAPSHOT_REPOSITORY_UUID_SETTING.get(indexSettings.getSettings()), - SNAPSHOT_REPOSITORY_NAME_SETTING.get(indexSettings.getSettings()) - ); - repositoryName = repository.getMetadata().name(); - } else { - // repository containing pre-7.12 snapshots has no UUID so we assume it matches by name - repositoryName = SNAPSHOT_REPOSITORY_NAME_SETTING.get(indexSettings.getSettings()); - repository = repositories.repository(repositoryName); - assert repository.getMetadata().name().equals(repositoryName) : repository.getMetadata().name() + " vs " + repositoryName; - } + final Supplier repositorySupplier = new RepositorySupplier( + repositories, + SNAPSHOT_REPOSITORY_NAME_SETTING.get(indexSettings.getSettings()), + SNAPSHOT_REPOSITORY_UUID_SETTING.exists(indexSettings.getSettings()) + ? SNAPSHOT_REPOSITORY_UUID_SETTING.get(indexSettings.getSettings()) + : null + ); - final BlobStoreRepository blobStoreRepository = SearchableSnapshots.getSearchableRepository(repository); + final BlobStoreRepository initialRepository = repositorySupplier.get(); final IndexId indexId = new IndexId( SNAPSHOT_INDEX_NAME_SETTING.get(indexSettings.getSettings()), @@ -617,14 +599,14 @@ public static Directory create( SNAPSHOT_SNAPSHOT_ID_SETTING.get(indexSettings.getSettings()) ); - final LazyInitializable lazyBlobContainer = new LazyInitializable<>( - () -> new RateLimitingBlobContainer( - blobStoreRepository, - blobStoreRepository.shardContainer(indexId, shardPath.getShardId().id()) - ) + final Supplier blobContainerSupplier = new BlobContainerSupplier( + repositorySupplier, + indexId, + shardPath.getShardId().id() ); + final LazyInitializable lazySnapshot = new LazyInitializable<>( - () -> blobStoreRepository.loadShardSnapshot(lazyBlobContainer.getOrCompute(), snapshotId) + () -> repositorySupplier.get().loadShardSnapshot(blobContainerSupplier.get(), snapshotId) ); final Path cacheDir = CacheService.getShardCachePath(shardPath).resolve(snapshotId.getUUID()); @@ -632,10 +614,10 @@ public static Directory create( return new InMemoryNoOpCommitDirectory( new SearchableSnapshotDirectory( - lazyBlobContainer::getOrCompute, + blobContainerSupplier, lazySnapshot::getOrCompute, blobStoreCacheService, - repositoryName, + initialRepository.getMetadata().name(), snapshotId, indexId, shardPath.getShardId(), @@ -690,42 +672,4 @@ public void putCachedBlob(String name, ByteRange range, BytesReference content, public SharedBlobCacheService.CacheFile getFrozenCacheFile(String fileName, long length) { return sharedBlobCacheService.getCacheFile(createCacheKey(fileName), length); } - - private static Repository repositoryByUuid(Map repositories, String repositoryUuid, String originalName) { - for (Repository repository : repositories.values()) { - if (repository.getMetadata().uuid().equals(repositoryUuid)) { - return repository; - } - } - throw new RepositoryMissingException("uuid [" + repositoryUuid + "], original name [" + originalName + "]"); - } - - /** - * A {@link FilterBlobContainer} that uses {@link BlobStoreRepository#maybeRateLimitRestores(InputStream)} to limit the rate at which - * blobs are read from the repository. - */ - private static class RateLimitingBlobContainer extends FilterBlobContainer { - - private final BlobStoreRepository blobStoreRepository; - - RateLimitingBlobContainer(BlobStoreRepository blobStoreRepository, BlobContainer blobContainer) { - super(blobContainer); - this.blobStoreRepository = blobStoreRepository; - } - - @Override - protected BlobContainer wrapChild(BlobContainer child) { - return new RateLimitingBlobContainer(blobStoreRepository, child); - } - - @Override - public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { - return blobStoreRepository.maybeRateLimitRestores(super.readBlob(purpose, blobName)); - } - - @Override - public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { - return blobStoreRepository.maybeRateLimitRestores(super.readBlob(purpose, blobName, position, length)); - } - } } diff --git a/x-pack/plugin/security/cli/build.gradle b/x-pack/plugin/security/cli/build.gradle index 8fd3dd29f87a4..d450a38dd1d29 100644 --- a/x-pack/plugin/security/cli/build.gradle +++ b/x-pack/plugin/security/cli/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.build' diff --git a/x-pack/plugin/security/qa/basic-enable-security/build.gradle b/x-pack/plugin/security/qa/basic-enable-security/build.gradle index a6930d38d41e5..72deed1af72dd 100644 --- a/x-pack/plugin/security/qa/basic-enable-security/build.gradle +++ b/x-pack/plugin/security/qa/basic-enable-security/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { diff --git a/x-pack/plugin/security/qa/jwt-realm/build.gradle b/x-pack/plugin/security/qa/jwt-realm/build.gradle index bc7178f11d9fc..1f7b7c1038fad 100644 --- a/x-pack/plugin/security/qa/jwt-realm/build.gradle +++ b/x-pack/plugin/security/qa/jwt-realm/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/security/qa/multi-cluster/build.gradle b/x-pack/plugin/security/qa/multi-cluster/build.gradle index 8ee449d39dcce..5b682cfdccade 100644 --- a/x-pack/plugin/security/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/security/qa/multi-cluster/build.gradle @@ -5,7 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java index 0f39104511be0..8bccc2e3c5c23 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java @@ -76,7 +76,7 @@ public class CrossClusterEsqlRCS1MissingIndicesIT extends AbstractRemoteClusterS record ExpectedCluster(String clusterAlias, String indexExpression, String status, Integer totalShards) {} @SuppressWarnings("unchecked") - public void assertExpectedClustersForMissingIndicesTests(Map responseMap, List expected) { + void assertExpectedClustersForMissingIndicesTests(Map responseMap, List expected) { Map clusters = (Map) responseMap.get("_clusters"); assertThat((int) responseMap.get("took"), greaterThan(0)); @@ -220,7 +220,7 @@ public void testSearchesAgainstNonMatchingIndicesWithSkipUnavailableTrue() throw ); } - // since at least one index of the query matches on some cluster, a wildcarded index on skip_un=true is not an error + // since at least one index of the query matches on some cluster, a missing wildcarded index on skip_un=true is not an error { String q = Strings.format("FROM %s,%s:nomatch*", INDEX1, REMOTE_CLUSTER_ALIAS); @@ -358,7 +358,7 @@ public void testSearchesAgainstNonMatchingIndicesWithSkipUnavailableFalse() thro String limit0 = q + " | LIMIT 0"; e = expectThrows(ResponseException.class, () -> client().performRequest(esqlRequest(limit0))); - assertThat(e.getMessage(), Matchers.containsString("Unknown index [nomatch]")); + assertThat(e.getMessage(), containsString("Unknown index [nomatch]")); } // missing concrete remote index is not fatal when skip_unavailable=true (as long as an index matches on another cluster) @@ -371,7 +371,7 @@ public void testSearchesAgainstNonMatchingIndicesWithSkipUnavailableFalse() thro String limit0 = q + " | LIMIT 0"; e = expectThrows(ResponseException.class, () -> client().performRequest(esqlRequest(limit0))); - assertThat(e.getMessage(), Matchers.containsString(Strings.format("Unknown index [%s:nomatch]", REMOTE_CLUSTER_ALIAS))); + assertThat(e.getMessage(), containsString(Strings.format("Unknown index [%s:nomatch]", REMOTE_CLUSTER_ALIAS))); } // since there is at least one matching index in the query, the missing wildcarded local index is not an error diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java new file mode 100644 index 0000000000000..52cd0655fbfdf --- /dev/null +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java @@ -0,0 +1,316 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.remotecluster; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; + +public class CrossClusterEsqlRCS2UnavailableRemotesIT extends AbstractRemoteClusterSecurityTestCase { + private static final AtomicReference> API_KEY_MAP_REF = new AtomicReference<>(); + + static { + fulfillingCluster = ElasticsearchCluster.local() + .name("fulfilling-cluster") + .nodes(1) + .module("x-pack-esql") + .apply(commonClusterConfig) + .setting("remote_cluster.port", "0") + .setting("xpack.ml.enabled", "false") + .setting("xpack.security.remote_cluster_server.ssl.enabled", "true") + .setting("xpack.security.remote_cluster_server.ssl.key", "remote-cluster.key") + .setting("xpack.security.remote_cluster_server.ssl.certificate", "remote-cluster.crt") + .setting("xpack.security.authc.token.enabled", "true") + .keystore("xpack.security.remote_cluster_server.ssl.secure_key_passphrase", "remote-cluster-password") + .node(0, spec -> spec.setting("remote_cluster_server.enabled", "true")) + .build(); + + queryCluster = ElasticsearchCluster.local() + .name("query-cluster") + .module("x-pack-esql") + .apply(commonClusterConfig) + .setting("xpack.ml.enabled", "false") + .setting("xpack.security.remote_cluster_client.ssl.enabled", "true") + .setting("xpack.security.remote_cluster_client.ssl.certificate_authorities", "remote-cluster-ca.crt") + .setting("xpack.security.authc.token.enabled", "true") + .keystore("cluster.remote.my_remote_cluster.credentials", () -> { + if (API_KEY_MAP_REF.get() == null) { + final Map apiKeyMap = createCrossClusterAccessApiKey(""" + { + "search": [ + { + "names": ["*"] + } + ] + }"""); + API_KEY_MAP_REF.set(apiKeyMap); + } + return (String) API_KEY_MAP_REF.get().get("encoded"); + }) + .rolesFile(Resource.fromClasspath("roles.yml")) + .user(REMOTE_METRIC_USER, PASS.toString(), "read_remote_shared_metrics", false) + .build(); + } + + @ClassRule + public static TestRule clusterRule = RuleChain.outerRule(fulfillingCluster).around(queryCluster); + + @Before + public void setupPreRequisites() throws Exception { + setupRolesAndPrivileges(); + loadData(); + } + + public void testEsqlRcs2UnavailableRemoteScenarios() throws Exception { + clusterShutDownWithRandomSkipUnavailable(); + remoteClusterShutdownWithSkipUnavailableTrue(); + remoteClusterShutdownWithSkipUnavailableFalse(); + } + + private void clusterShutDownWithRandomSkipUnavailable() throws Exception { + // skip_unavailable is set to a random boolean value. + // However, no clusters are stopped. Hence, we do not expect any other behaviour + // other than a 200-OK. + + configureRemoteCluster("my_remote_cluster", fulfillingCluster, false, randomBoolean(), randomBoolean()); + String query = "FROM *,my_remote_cluster:* | LIMIT 10"; + Response response = performRequestWithRemoteSearchUser(esqlRequest(query)); + + Map map = responseAsMap(response); + ArrayList columns = (ArrayList) map.get("columns"); + ArrayList values = (ArrayList) map.get("values"); + Map clusters = (Map) map.get("_clusters"); + Map clusterDetails = (Map) clusters.get("details"); + Map localClusterDetails = (Map) clusterDetails.get("(local)"); + Map remoteClusterDetails = (Map) clusterDetails.get("my_remote_cluster"); + + assertOK(response); + assertThat((int) map.get("took"), greaterThan(0)); + assertThat(columns.size(), is(4)); + assertThat(values.size(), is(9)); + + assertThat((int) clusters.get("total"), is(2)); + assertThat((int) clusters.get("successful"), is(2)); + assertThat((int) clusters.get("running"), is(0)); + assertThat((int) clusters.get("skipped"), is(0)); + assertThat((int) clusters.get("partial"), is(0)); + assertThat((int) clusters.get("failed"), is(0)); + + assertThat(clusterDetails.size(), is(2)); + assertThat((int) localClusterDetails.get("took"), greaterThan(0)); + assertThat(localClusterDetails.get("status"), is("successful")); + + assertThat((int) remoteClusterDetails.get("took"), greaterThan(0)); + assertThat(remoteClusterDetails.get("status"), is("successful")); + } + + @SuppressWarnings("unchecked") + private void remoteClusterShutdownWithSkipUnavailableTrue() throws Exception { + // Remote cluster is stopped and skip unavailable is set to true. + // We expect no exception and partial results from the remaining open cluster. + + configureRemoteCluster("my_remote_cluster", fulfillingCluster, false, randomBoolean(), true); + + try { + // Stop remote cluster. + fulfillingCluster.stop(true); + + // A simple query that targets our remote cluster. + String query = "FROM *,my_remote_cluster:* | LIMIT 10"; + Response response = performRequestWithRemoteSearchUser(esqlRequest(query)); + + Map map = responseAsMap(response); + ArrayList columns = (ArrayList) map.get("columns"); + ArrayList values = (ArrayList) map.get("values"); + Map clusters = (Map) map.get("_clusters"); + Map clusterDetails = (Map) clusters.get("details"); + Map localClusterDetails = (Map) clusterDetails.get("(local)"); + Map remoteClusterDetails = (Map) clusterDetails.get("my_remote_cluster"); + + // Assert results obtained from the local cluster and that remote cluster was + // skipped. + assertOK(response); + assertThat((int) map.get("took"), greaterThan(0)); + + assertThat(columns.size(), is(2)); + assertThat(values.size(), is(5)); + + assertThat((int) clusters.get("total"), is(2)); + assertThat((int) clusters.get("successful"), is(1)); + assertThat((int) clusters.get("skipped"), is(1)); + assertThat((int) clusters.get("running"), is(0)); + assertThat((int) clusters.get("partial"), is(0)); + assertThat((int) clusters.get("failed"), is(0)); + + assertThat(clusterDetails.size(), is(2)); + assertThat((int) localClusterDetails.get("took"), greaterThan(0)); + assertThat(localClusterDetails.get("status"), is("successful")); + + assertThat((int) remoteClusterDetails.get("took"), greaterThan(0)); + assertThat(remoteClusterDetails.get("status"), is("skipped")); + + ArrayList remoteClusterFailures = (ArrayList) remoteClusterDetails.get("failures"); + assertThat(remoteClusterFailures.size(), equalTo(1)); + Map failuresMap = (Map) remoteClusterFailures.get(0); + + Map reason = (Map) failuresMap.get("reason"); + assertThat(reason.get("type").toString(), equalTo("connect_transport_exception")); + assertThat(reason.get("reason").toString(), containsString("Unable to connect to [my_remote_cluster]")); + } finally { + fulfillingCluster.start(); + closeFulfillingClusterClient(); + initFulfillingClusterClient(); + } + } + + private void remoteClusterShutdownWithSkipUnavailableFalse() throws Exception { + // Remote cluster is stopped and skip_unavailable is set to false. + // Although the other cluster is open, we expect an Exception. + + configureRemoteCluster("my_remote_cluster", fulfillingCluster, false, randomBoolean(), false); + + try { + // Stop remote cluster. + fulfillingCluster.stop(true); + + // A simple query that targets our remote cluster. + String query = "FROM *,my_remote_cluster:* | LIMIT 10"; + ResponseException ex = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(esqlRequest(query))); + assertThat(ex.getMessage(), containsString("connect_transport_exception")); + } finally { + fulfillingCluster.start(); + closeFulfillingClusterClient(); + initFulfillingClusterClient(); + } + } + + private void setupRolesAndPrivileges() throws IOException { + var putUserRequest = new Request("PUT", "/_security/user/" + REMOTE_SEARCH_USER); + putUserRequest.setJsonEntity(""" + { + "password": "x-pack-test-password", + "roles" : ["remote_search"] + }"""); + assertOK(adminClient().performRequest(putUserRequest)); + + var putRoleOnRemoteClusterRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); + putRoleOnRemoteClusterRequest.setJsonEntity(""" + { + "indices": [ + { + "names": ["task", "hits"], + "privileges": ["read", "read_cross_cluster", "create_index", "monitor"] + } + ], + "remote_indices": [ + { + "names": ["task", "hits"], + "privileges": ["read", "read_cross_cluster", "create_index", "monitor"], + "clusters": ["*"] + } + ] + }"""); + assertOK(adminClient().performRequest(putRoleOnRemoteClusterRequest)); + } + + private void loadData() throws IOException { + Request createIndex = new Request("PUT", "task"); + createIndex.setJsonEntity(""" + { + "mappings": { + "properties": { + "id": { "type": "integer" }, + "time_taken_millis": { "type": "integer" } + } + } + } + """); + assertOK(client().performRequest(createIndex)); + + Request bulkRequest = new Request("POST", "/_bulk?refresh=true"); + bulkRequest.setJsonEntity(""" + { "index": { "_index": "task" } } + { "id": 1, "time_taken_millis": 39} + { "index": { "_index": "task" } } + { "id": 2, "time_taken_millis": 25} + { "index": { "_index": "task" } } + { "id": 3, "time_taken_millis": 42} + { "index": { "_index": "task" } } + { "id": 4, "time_taken_millis": 16} + { "index": { "_index": "task" } } + { "id": 5, "time_taken_millis": 62} + """); + assertOK(client().performRequest(bulkRequest)); + + createIndex = new Request("PUT", "hits"); + createIndex.setJsonEntity(""" + { + "mappings": { + "properties": { + "endpoint_id": { "type": "integer" }, + "t_hits": { "type": "integer" } + } + } + } + """); + assertOK(performRequestAgainstFulfillingCluster(createIndex)); + + bulkRequest = new Request("POST", "/_bulk?refresh=true"); + bulkRequest.setJsonEntity(""" + { "index": {"_index": "hits"}} + { "endpoint_id": 1, "t_hits": 1267 } + { "index": {"_index": "hits"}} + { "endpoint_id": 2, "t_hits": 1389 } + { "index": {"_index": "hits"}} + { "endpoint_id": 3, "t_hits": 1922 } + { "index": {"_index": "hits"}} + { "endpoint_id": 4, "t_hits": 1547 } + """); + assertOK(performRequestAgainstFulfillingCluster(bulkRequest)); + } + + private Response performRequestWithRemoteSearchUser(final Request request) throws IOException { + request.setOptions( + RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", headerFromRandomAuthMethod(REMOTE_SEARCH_USER, PASS)) + ); + return client().performRequest(request); + } + + private Request esqlRequest(String query) throws IOException { + XContentBuilder body = JsonXContent.contentBuilder(); + + body.startObject(); + body.field("query", query); + body.field("include_ccs_metadata", true); + body.endObject(); + + Request request = new Request("POST", "_query"); + request.setJsonEntity(org.elasticsearch.common.Strings.toString(body)); + + return request; + } +} diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java index 74ef6f0dafe63..09449f81121fd 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; @@ -31,6 +32,7 @@ import java.io.IOException; import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.Base64; import java.util.List; import java.util.Map; @@ -43,9 +45,12 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; public class RemoteClusterSecurityEsqlIT extends AbstractRemoteClusterSecurityTestCase { private static final AtomicReference> API_KEY_MAP_REF = new AtomicReference<>(); @@ -347,7 +352,7 @@ public void testCrossClusterQuery() throws Exception { assertRemoteOnlyResults(response); // same as above but authenticate with API key - response = performRequestWithRemoteSearchUserViaAPIKey(request); + response = performRequestWithRemoteSearchUserViaAPIKey(request, createRemoteSearchUserAPIKey()); assertRemoteOnlyResults(response); // query remote and local cluster @@ -704,7 +709,7 @@ public void testCrossClusterEnrich() throws Exception { assertWithEnrich(response); // same as above but authenticate with API key - response = performRequestWithRemoteSearchUserViaAPIKey(request); + response = performRequestWithRemoteSearchUserViaAPIKey(request, createRemoteSearchUserAPIKey()); assertWithEnrich(response); // Query cluster @@ -968,6 +973,462 @@ public void testAlias() throws Exception { removeAliases(); } + @SuppressWarnings("unchecked") + public void testSearchesAgainstNonMatchingIndicesWithSkipUnavailableTrue() throws Exception { + configureRemoteCluster(REMOTE_CLUSTER_ALIAS, fulfillingCluster, false, randomBoolean(), true); + populateData(); + { + final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); + putRoleRequest.setJsonEntity(""" + { + "indices": [{"names": ["employees*"], "privileges": ["read","read_cross_cluster"]}], + "cluster": [ "manage_own_api_key" ], + "remote_indices": [ + { + "names": ["employees*"], + "privileges": ["read"], + "clusters": ["my_remote_cluster"] + } + ] + }"""); + Response response = adminClient().performRequest(putRoleRequest); + assertOK(response); + } + + String remoteSearchUserAPIKey = createRemoteSearchUserAPIKey(); + + // sanity check - init queries to ensure we can query employees on local and employees,employees2 on remote + { + Request request = esqlRequest(""" + FROM employees,my_remote_cluster:employees,my_remote_cluster:employees2 + | SORT emp_id ASC + | LIMIT 9 + | KEEP emp_id, department"""); + + CheckedConsumer verifier = resp -> { + assertOK(resp); + Map map = responseAsMap(resp); + assertThat(((ArrayList) map.get("columns")).size(), greaterThanOrEqualTo(1)); + assertThat(((ArrayList) map.get("values")).size(), greaterThanOrEqualTo(1)); + assertExpectedClustersForMissingIndicesTests( + map, + List.of( + // local cluster is never marked as SKIPPED even when no matching indices - just marked as 0 shards searched + new ExpectedCluster("(local)", "nomatch*", "successful", null), + new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees,employees2", "successful", null) + ) + ); + }; + + verifier.accept(performRequestWithRemoteSearchUser(request)); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(request, remoteSearchUserAPIKey)); + } + + // missing concrete local index is an error + { + String q = "FROM employees_nomatch,my_remote_cluster:employees"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + } + + // missing concrete remote index is not fatal when skip_unavailable=true (as long as an index matches on another cluster) + { + String q = "FROM employees,my_remote_cluster:employees_nomatch"; + + CheckedBiConsumer verifier = new CheckedBiConsumer() { + @Override + public void accept(Response response, Boolean limit0) throws Exception { + assertOK(response); + Map map = responseAsMap(response); + assertThat(((List) map.get("columns")).size(), greaterThanOrEqualTo(1)); + if (limit0) { + assertThat(((List) map.get("values")).size(), equalTo(0)); + } else { + assertThat(((List) map.get("values")).size(), greaterThanOrEqualTo(1)); + } + assertExpectedClustersForMissingIndicesTests( + map, + List.of( + new ExpectedCluster("(local)", "employees", "successful", limit0 ? 0 : null), + new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees_nomatch", "skipped", 0) + ) + ); + } + }; + Request limit1 = esqlRequest(q + " | LIMIT 1"); + verifier.accept(performRequestWithRemoteSearchUser(limit1), false); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey), false); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + verifier.accept(performRequestWithRemoteSearchUser(limit0), true); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey), true); + } + + // since there is at least one matching index in the query, the missing wildcarded local index is not an error + { + String q = "FROM employees_nomatch*,my_remote_cluster:employees"; + + CheckedBiConsumer verifier = (response, limit0) -> { + assertOK(response); + Map map = responseAsMap(response); + assertThat(((List) map.get("columns")).size(), greaterThanOrEqualTo(1)); + if (limit0) { + assertThat(((List) map.get("values")).size(), equalTo(0)); + } else { + assertThat(((List) map.get("values")).size(), greaterThanOrEqualTo(1)); + } + assertExpectedClustersForMissingIndicesTests( + map, + List.of( + // local cluster is never marked as SKIPPED even when no matching indices - just marked as 0 shards searched + new ExpectedCluster("(local)", "employees_nomatch*", "successful", 0), + new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees", "successful", limit0 ? 0 : null) + ) + ); + }; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + verifier.accept(performRequestWithRemoteSearchUser(limit1), false); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey), false); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + verifier.accept(performRequestWithRemoteSearchUser(limit0), true); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey), true); + } + + // since at least one index of the query matches on some cluster, a missing wildcarded index on skip_un=true is not an error + { + String q = "FROM employees,my_remote_cluster:employees_nomatch*"; + + CheckedBiConsumer verifier = (response, limit0) -> { + assertOK(response); + Map map = responseAsMap(response); + assertThat(((List) map.get("columns")).size(), greaterThanOrEqualTo(1)); + if (limit0) { + assertThat(((List) map.get("values")).size(), equalTo(0)); + } else { + assertThat(((List) map.get("values")).size(), greaterThanOrEqualTo(1)); + } + assertExpectedClustersForMissingIndicesTests( + map, + List.of( + new ExpectedCluster("(local)", "employees", "successful", limit0 ? 0 : null), + new ExpectedCluster("my_remote_cluster", "employees_nomatch*", "skipped", 0) + ) + ); + }; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + verifier.accept(performRequestWithRemoteSearchUser(limit1), false); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey), false); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + verifier.accept(performRequestWithRemoteSearchUser(limit0), true); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey), true); + } + + // an error is thrown if there are no matching indices at all, even when the cluster is skip_unavailable=true + { + // with non-matching concrete index + String q = "FROM my_remote_cluster:employees_nomatch"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + } + + // an error is thrown if there are no matching indices at all, even when the cluster is skip_unavailable=true and the + // index was wildcarded + { + String localExpr = randomFrom("nomatch", "nomatch*"); + String remoteExpr = randomFrom("nomatch", "nomatch*"); + String q = Strings.format("FROM %s,%s:%s", localExpr, REMOTE_CLUSTER_ALIAS, remoteExpr); + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + } + + // missing concrete index on skip_unavailable=true cluster is not an error + { + String q = "FROM employees,my_remote_cluster:employees_nomatch,my_remote_cluster:employees*"; + + CheckedBiConsumer verifier = (response, limit0) -> { + assertOK(response); + Map map = responseAsMap(response); + assertThat(((List) map.get("columns")).size(), greaterThanOrEqualTo(1)); + if (limit0) { + assertThat(((List) map.get("values")).size(), equalTo(0)); + } else { + assertThat(((List) map.get("values")).size(), greaterThanOrEqualTo(1)); + } + final List expectedClusters = List.of( + new ExpectedCluster("(local)", "employees", "successful", limit0 ? 0 : null), + new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees_nomatch,employees*", "successful", 0) + ); + assertExpectedClustersForMissingIndicesTests(map, expectedClusters); + }; + + // TODO: uncomment in follow on PR handling skip_unavailable errors at execution time + // Request limit1 = esqlRequest(q + " | LIMIT 1"); + // verifier.accept(performRequestWithRemoteSearchUser(limit1), false); + // verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey), false); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + verifier.accept(performRequestWithRemoteSearchUser(limit0), true); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey), true); + } + } + + @SuppressWarnings("unchecked") + public void testSearchesAgainstNonMatchingIndicesWithSkipUnavailableFalse() throws Exception { + configureRemoteCluster(REMOTE_CLUSTER_ALIAS, fulfillingCluster, false, randomBoolean(), false); + populateData(); + + { + final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); + putRoleRequest.setJsonEntity(""" + { + "indices": [{"names": ["employees*"], "privileges": ["read","read_cross_cluster"]}], + "cluster": [ "manage_own_api_key" ], + "remote_indices": [ + { + "names": ["employees*"], + "privileges": ["read"], + "clusters": ["my_remote_cluster"] + } + ] + }"""); + Response response = adminClient().performRequest(putRoleRequest); + assertOK(response); + } + + String remoteSearchUserAPIKey = createRemoteSearchUserAPIKey(); + + // sanity check - init queries to ensure we can query employees on local and employees,employees2 on remote + { + Request request = esqlRequest(""" + FROM employees,my_remote_cluster:employees,my_remote_cluster:employees2 + | SORT emp_id ASC + | LIMIT 5 + | KEEP emp_id, department"""); + + CheckedConsumer verifier = resp -> { + assertOK(resp); + Map map = responseAsMap(resp); + assertThat(((List) map.get("columns")).size(), greaterThanOrEqualTo(1)); + assertThat(((List) map.get("values")).size(), greaterThanOrEqualTo(1)); + assertExpectedClustersForMissingIndicesTests( + map, + List.of( + // local cluster is never marked as SKIPPED even when no matching indices - just marked as 0 shards searched + new ExpectedCluster("(local)", "nomatch*", "successful", null), + new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees,employees2", "successful", null) + ) + ); + }; + + final Response response = performRequestWithRemoteSearchUser(request); + assertOK(response); + verifier.accept(performRequestWithRemoteSearchUser(request)); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(request, remoteSearchUserAPIKey)); + } + + // missing concrete local index is an error + { + String q = "FROM employees_nomatch,my_remote_cluster:employees"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + } + + // missing concrete remote index is fatal error when skip_unavailable=false + { + String q = "FROM employees,my_remote_cluster:employees_nomatch"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + } + + // since there is at least one matching index in the query, the missing wildcarded local index is not an error + { + String q = "FROM employees_nomatch*,my_remote_cluster:employees"; + + CheckedBiConsumer verifier = (response, limit0) -> { + assertOK(response); + Map map = responseAsMap(response); + assertThat(((List) map.get("columns")).size(), greaterThanOrEqualTo(1)); + if (limit0) { + assertThat(((List) map.get("values")).size(), equalTo(0)); + } else { + assertThat(((List) map.get("values")).size(), greaterThanOrEqualTo(1)); + } + assertExpectedClustersForMissingIndicesTests( + map, + List.of( + // local cluster is never marked as SKIPPED even when no matching indices - just marked as 0 shards searched + new ExpectedCluster("(local)", "employees_nomatch*", "successful", 0), + new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees", "successful", limit0 ? 0 : null) + ) + ); + }; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + verifier.accept(performRequestWithRemoteSearchUser(limit1), false); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey), false); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + verifier.accept(performRequestWithRemoteSearchUser(limit0), true); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey), true); + } + + // query is fatal since the remote cluster has skip_unavailable=false and has no matching indices + { + String q = "FROM employees,my_remote_cluster:employees_nomatch*"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch*]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch*]")); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch*]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch*]")); + } + + // an error is thrown if there are no matching indices at all + { + // with non-matching concrete index + String q = "FROM my_remote_cluster:employees_nomatch"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + } + + // an error is thrown if there are no matching indices at all + { + String localExpr = randomFrom("nomatch", "nomatch*"); + String remoteExpr = randomFrom("nomatch", "nomatch*"); + String q = Strings.format("FROM %s,%s:%s", localExpr, REMOTE_CLUSTER_ALIAS, remoteExpr); + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + } + + // error since the remote cluster with skip_unavailable=false specified a concrete index that is not found + { + String q = "FROM employees,my_remote_cluster:employees_nomatch,my_remote_cluster:employees*"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + /* Example error: + *{"error":{"root_cause":[{"type":"security_exception","reason":"action [indices:data/read/esql/cluster] towards + * remote cluster is unauthorized for user [remote_search_user] with assigned roles [remote_search] authenticated by + * API key id [zaeMK5MBeGk5jCIiFtqB] of user [test_user] on indices [employees_nomatch], this action is granted by + * the index privileges [read,all]"}],"type":"security_exception","reason":"action [indices:data/read/esql/cluster] + * towards remote cluster is unauthorized for user [remote_search_user] with assigned roles [remote_search] authenticated + * by API key id [zaeMK5MBeGk5jCIiFtqB] of user [test_user] on indices [employees_nomatch], this action is granted by the + * index privileges [read,all]"},"status":403}" + */ + assertThat(e.getMessage(), containsString("unauthorized for user [remote_search_user]")); + assertThat(e.getMessage(), containsString("on indices [employees_nomatch]")); + assertThat(e.getMessage(), containsString("security_exception")); + + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + /* Example error: + * {"error":{"root_cause":[{"type":"security_exception","reason":"action [indices:data/read/esql/cluster] towards + * remote cluster is unauthorized for API key id [sxuSK5MBSfGSGj4YFLyv] of user [remote_search_user] authenticated by + * API key id [cUiRK5MB5j18U5stsvQj] of user [test_user] on indices [employees_nomatch], this action is granted by + * the index privileges [read,all]"}],"type":"security_exception","reason":"action [indices:data/read/esql/cluster] + * towards remote cluster is unauthorized for API key id [sxuSK5MBSfGSGj4YFLyv] of user [remote_search_user] authenticated + * by API key id [cUiRK5MB5j18U5stsvQj] of user [test_user] on indices [employees_nomatch], this action is granted by the + * index privileges [read,all]"},"status":403}" + */ + assertThat(e.getMessage(), containsString("unauthorized for API key id")); + assertThat(e.getMessage(), containsString("of user [remote_search_user]")); + assertThat(e.getMessage(), containsString("on indices [employees_nomatch]")); + assertThat(e.getMessage(), containsString("security_exception")); + + // TODO: in follow on PR, add support for throwing a VerificationException for this scenario - no exception is currently thrown + // Request limit0 = esqlRequest(q + " | LIMIT 0"); + // e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + // assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + } + } + protected Request esqlRequest(String command) throws IOException { XContentBuilder body = JsonXContent.contentBuilder(); body.startObject(); @@ -1007,7 +1468,12 @@ private Response performRequestWithRemoteSearchUser(final Request request) throw return client().performRequest(request); } - private Response performRequestWithRemoteSearchUserViaAPIKey(final Request request) throws IOException { + private Response performRequestWithRemoteSearchUserViaAPIKey(Request request, String encodedApiKey) throws IOException { + request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "ApiKey " + encodedApiKey)); + return client().performRequest(request); + } + + private String createRemoteSearchUserAPIKey() throws IOException { final Request createApiKeyRequest = new Request("POST", "_security/api_key"); createApiKeyRequest.setJsonEntity(""" { @@ -1021,8 +1487,7 @@ private Response performRequestWithRemoteSearchUserViaAPIKey(final Request reque assertOK(response); final Map responseAsMap = responseAsMap(response); final String encoded = (String) responseAsMap.get("encoded"); - request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "ApiKey " + encoded)); - return client().performRequest(request); + return encoded; } @SuppressWarnings("unchecked") @@ -1145,4 +1610,54 @@ private void assertWithEnrich(Response response) throws IOException { assertThat(flatList, containsInAnyOrder(2, 3, "usa", "canada")); } + record ExpectedCluster(String clusterAlias, String indexExpression, String status, Integer totalShards) {} + + @SuppressWarnings("unchecked") + void assertExpectedClustersForMissingIndicesTests(Map responseMap, List expected) { + Map clusters = (Map) responseMap.get("_clusters"); + assertThat((int) responseMap.get("took"), greaterThan(0)); + + Map detailsMap = (Map) clusters.get("details"); + assertThat(detailsMap.size(), is(expected.size())); + + assertThat((int) clusters.get("total"), is(expected.size())); + assertThat((int) clusters.get("successful"), is((int) expected.stream().filter(ec -> ec.status().equals("successful")).count())); + assertThat((int) clusters.get("skipped"), is((int) expected.stream().filter(ec -> ec.status().equals("skipped")).count())); + assertThat((int) clusters.get("failed"), is((int) expected.stream().filter(ec -> ec.status().equals("failed")).count())); + + for (ExpectedCluster expectedCluster : expected) { + Map clusterDetails = (Map) detailsMap.get(expectedCluster.clusterAlias()); + String msg = expectedCluster.clusterAlias(); + + assertThat(msg, (int) clusterDetails.get("took"), greaterThan(0)); + assertThat(msg, clusterDetails.get("status"), is(expectedCluster.status())); + Map shards = (Map) clusterDetails.get("_shards"); + if (expectedCluster.totalShards() == null) { + assertThat(msg, (int) shards.get("total"), greaterThan(0)); + } else { + assertThat(msg, (int) shards.get("total"), is(expectedCluster.totalShards())); + } + + if (expectedCluster.status().equals("successful")) { + assertThat((int) shards.get("successful"), is((int) shards.get("total"))); + assertThat((int) shards.get("skipped"), is(0)); + + } else if (expectedCluster.status().equals("skipped")) { + assertThat((int) shards.get("successful"), is(0)); + assertThat((int) shards.get("skipped"), is((int) shards.get("total"))); + ArrayList failures = (ArrayList) clusterDetails.get("failures"); + assertThat(failures.size(), is(1)); + Map failure1 = (Map) failures.get(0); + Map innerReason = (Map) failure1.get("reason"); + String expectedMsg = "Unknown index [" + expectedCluster.indexExpression() + "]"; + assertThat(innerReason.get("reason").toString(), containsString(expectedMsg)); + assertThat(innerReason.get("type").toString(), containsString("verification_exception")); + + } else { + fail(msg + "; Unexpected status: " + expectedCluster.status()); + } + // currently failed shards is always zero - change this once we start allowing partial data for individual shard failures + assertThat((int) shards.get("failed"), is(0)); + } + } } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityReloadCredentialsRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityReloadCredentialsRestIT.java index 42982e6183613..fb941e9e815cf 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityReloadCredentialsRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityReloadCredentialsRestIT.java @@ -42,7 +42,7 @@ import static org.hamcrest.Matchers.nullValue; // account for slow stored secure settings updates (involves removing and re-creating the keystore) -@TimeoutSuite(millis = 10 * TimeUnits.MINUTE) +@TimeoutSuite(millis = 20 * TimeUnits.MINUTE) public class RemoteClusterSecurityReloadCredentialsRestIT extends AbstractRemoteClusterSecurityTestCase { private static final MutableSettingsProvider keystoreSettings = new MutableSettingsProvider(); diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index df97c489cc6b7..bfff63442281d 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -358,6 +358,7 @@ public class Constants { "cluster:monitor/nodes/data_tier_usage", "cluster:monitor/nodes/features", "cluster:monitor/nodes/hot_threads", + "cluster:monitor/nodes/index_mode_stats", "cluster:monitor/nodes/info", "cluster:monitor/nodes/stats", "cluster:monitor/nodes/usage", @@ -399,6 +400,7 @@ public class Constants { "cluster:monitor/xpack/info/frozen_indices", "cluster:monitor/xpack/info/graph", "cluster:monitor/xpack/info/ilm", + "cluster:monitor/xpack/info/logsdb", "cluster:monitor/xpack/info/logstash", "cluster:monitor/xpack/info/ml", "cluster:monitor/xpack/info/monitoring", @@ -463,6 +465,7 @@ public class Constants { "cluster:monitor/xpack/usage/health_api", "cluster:monitor/xpack/usage/ilm", "cluster:monitor/xpack/usage/inference", + "cluster:monitor/xpack/usage/logsdb", "cluster:monitor/xpack/usage/logstash", "cluster:monitor/xpack/usage/ml", "cluster:monitor/xpack/usage/monitoring", @@ -488,6 +491,7 @@ public class Constants { "indices:admin/block/add[s]", "indices:admin/cache/clear", "indices:admin/data_stream/lazy_rollover", + "indices:admin/data_stream/reindex", "indices:internal/admin/ccr/restore/file_chunk/get", "indices:internal/admin/ccr/restore/session/clear", "indices:internal/admin/ccr/restore/session/put", diff --git a/x-pack/plugin/security/qa/profile/build.gradle b/x-pack/plugin/security/qa/profile/build.gradle index 7465ef9917258..b0a1927ab9dfe 100644 --- a/x-pack/plugin/security/qa/profile/build.gradle +++ b/x-pack/plugin/security/qa/profile/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/security/qa/security-basic/build.gradle b/x-pack/plugin/security/qa/security-basic/build.gradle index 30751705bd75f..8740354646346 100644 --- a/x-pack/plugin/security/qa/security-basic/build.gradle +++ b/x-pack/plugin/security/qa/security-basic/build.gradle @@ -1,8 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-java-rest-test' -import org.elasticsearch.gradle.internal.info.BuildParams - dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('security')))) javaRestTestImplementation(testArtifact(project(xpackModule('core')))) diff --git a/x-pack/plugin/security/qa/security-disabled/build.gradle b/x-pack/plugin/security/qa/security-disabled/build.gradle index 0a05eae479d33..6fa100f392b9a 100644 --- a/x-pack/plugin/security/qa/security-disabled/build.gradle +++ b/x-pack/plugin/security/qa/security-disabled/build.gradle @@ -1,3 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + /* * This QA project tests the security plugin when security is explicitly disabled. * It is intended to cover security functionality which is supposed to @@ -5,7 +12,6 @@ * For example: If a cluster has a pipeline with the set_security_user processor * defined, it should be not fail */ -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/security/qa/tls-basic/build.gradle b/x-pack/plugin/security/qa/tls-basic/build.gradle index e3b51bde45cc8..c0df6a4f27f58 100644 --- a/x-pack/plugin/security/qa/tls-basic/build.gradle +++ b/x-pack/plugin/security/qa/tls-basic/build.gradle @@ -1,6 +1,11 @@ -apply plugin: 'elasticsearch.legacy-java-rest-test' +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ -import org.elasticsearch.gradle.internal.info.BuildParams +apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('security')))) diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SslClientAuthenticationTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SslClientAuthenticationTests.java index bc01b0693af0a..2851af1461012 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SslClientAuthenticationTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SslClientAuthenticationTests.java @@ -107,7 +107,7 @@ public void testThatHttpFailsWithoutSslClientAuth() throws IOException { if (inFipsJvm()) { Throwable t = ExceptionsHelper.unwrap(e, CertificateException.class); assertThat(t, instanceOf(CertificateException.class)); - assertThat(t.getMessage(), containsString("Unable to find certificate chain")); + assertThat(t.getMessage(), containsString("Unable to construct a valid chain")); } else { Throwable t = ExceptionsHelper.unwrap(e, CertPathBuilderException.class); assertThat(t, instanceOf(CertPathBuilderException.class)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 0b387a738a2c5..ef66392a87260 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -1048,8 +1048,6 @@ Collection createComponents( getClock(), client, systemIndices.getProfileIndexManager(), - clusterService, - featureService, realms ); components.add(profileService); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java index d0292f32cd75f..53ecafa280715 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java @@ -7,18 +7,14 @@ package org.elasticsearch.xpack.security; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import java.util.Map; import java.util.Set; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MIGRATION_FRAMEWORK; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_PROFILE_ORIGIN_FEATURE; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_ROLES_METADATA_FLATTENED; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_ROLE_MAPPING_CLEANUP; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.VERSION_SECURITY_PROFILE_ORIGIN; public class SecurityFeatures implements FeatureSpecification { @@ -26,9 +22,4 @@ public class SecurityFeatures implements FeatureSpecification { public Set getFeatures() { return Set.of(SECURITY_ROLE_MAPPING_CLEANUP, SECURITY_ROLES_METADATA_FLATTENED, SECURITY_MIGRATION_FRAMEWORK); } - - @Override - public Map getHistoricalFeatures() { - return Map.of(SECURITY_PROFILE_ORIGIN_FEATURE, VERSION_SECURITY_PROFILE_ORIGIN); - } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java index 13e3e40887d89..429b632cdac18 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java @@ -27,10 +27,11 @@ public class AuditUtil { public static String restRequestContent(RestRequest request) { if (request.hasContent()) { + var content = request.releasableContent(); try { - return XContentHelper.convertToJson(request.content(), false, false, request.getXContentType()); + return XContentHelper.convertToJson(content, false, false, request.getXContentType()); } catch (IOException ioe) { - return "Invalid Format: " + request.content().utf8ToString(); + return "Invalid Format: " + content.utf8ToString(); } } return ""; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java index b347c278aae08..a3ee313c7f1d9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java @@ -35,7 +35,6 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -45,7 +44,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.MultiMatchQueryBuilder; @@ -100,14 +98,12 @@ import static org.elasticsearch.action.bulk.TransportSingleItemBulkWriteAction.toSingleItemBulkRequest; import static org.elasticsearch.common.Strings.collectionToCommaDelimitedString; import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_PROFILE_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.security.authc.Authentication.isFileOrNativeRealm; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.Availability.PRIMARY_SHARDS; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.Availability.SEARCH_SHARDS; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_PROFILE_ALIAS; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_PROFILE_ORIGIN_FEATURE; public class ProfileService { private static final Logger logger = LogManager.getLogger(ProfileService.class); @@ -120,26 +116,14 @@ public class ProfileService { private final Clock clock; private final Client client; private final SecurityIndexManager profileIndex; - private final ClusterService clusterService; - private final FeatureService featureService; private final Function domainConfigLookup; private final Function realmRefLookup; - public ProfileService( - Settings settings, - Clock clock, - Client client, - SecurityIndexManager profileIndex, - ClusterService clusterService, - FeatureService featureService, - Realms realms - ) { + public ProfileService(Settings settings, Clock clock, Client client, SecurityIndexManager profileIndex, Realms realms) { this.settings = settings; this.clock = clock; this.client = client; this.profileIndex = profileIndex; - this.clusterService = clusterService; - this.featureService = featureService; this.domainConfigLookup = realms::getDomainConfig; this.realmRefLookup = realms::getRealmRef; } @@ -273,7 +257,7 @@ public void suggestProfile(SuggestProfilesRequest request, TaskId parentTaskId, listener::onFailure, () -> executeAsyncWithOrigin( client, - getActionOrigin(), + SECURITY_PROFILE_ORIGIN, TransportSearchAction.TYPE, searchRequest, ActionListener.wrap(searchResponse -> { @@ -403,7 +387,7 @@ public void usageStats(ActionListener> listener) { listener::onFailure, () -> executeAsyncWithOrigin( client, - getActionOrigin(), + SECURITY_PROFILE_ORIGIN, TransportMultiSearchAction.TYPE, multiSearchRequest, ActionListener.wrap(multiSearchResponse -> { @@ -484,7 +468,7 @@ private void getVersionedDocument(String uid, ActionListener listener::onFailure, () -> executeAsyncWithOrigin( client, - getActionOrigin(), + SECURITY_PROFILE_ORIGIN, TransportGetAction.TYPE, getRequest, ActionListener.wrap(response -> { @@ -514,7 +498,7 @@ private void getVersionedDocuments(Collection uids, ActionListener { frozenProfileIndex.checkIndexVersionThenExecute( listener::onFailure, - () -> new OriginSettingClient(client, getActionOrigin()).prepareMultiGet() + () -> new OriginSettingClient(client, SECURITY_PROFILE_ORIGIN).prepareMultiGet() .addIds(frozenProfileIndex.aliasName(), uids.stream().map(ProfileService::uidToDocId).toArray(String[]::new)) .execute(ActionListener.wrap(multiGetResponse -> { List retrievedDocs = new ArrayList<>(multiGetResponse.getResponses().length); @@ -589,7 +573,7 @@ private void searchVersionedDocumentsForSubjects( subjects.forEach(subject -> multiSearchRequest.add(buildSearchRequestForSubject(subject))); executeAsyncWithOrigin( client, - getActionOrigin(), + SECURITY_PROFILE_ORIGIN, TransportMultiSearchAction.TYPE, multiSearchRequest, ActionListener.wrap( @@ -742,7 +726,7 @@ void createNewProfile(Subject subject, String uid, ActionListener liste listener::onFailure, () -> executeAsyncWithOrigin( client, - getActionOrigin(), + SECURITY_PROFILE_ORIGIN, TransportBulkAction.TYPE, bulkRequest, TransportBulkAction.unwrappingSingleItemBulkResponse(ActionListener.wrap(indexResponse -> { @@ -1007,7 +991,7 @@ void doUpdate(UpdateRequest updateRequest, ActionListener listen listener::onFailure, () -> executeAsyncWithOrigin( client, - getActionOrigin(), + SECURITY_PROFILE_ORIGIN, TransportUpdateAction.TYPE, updateRequest, ActionListener.wrap(updateResponse -> { @@ -1019,15 +1003,6 @@ void doUpdate(UpdateRequest updateRequest, ActionListener listen ); } - private String getActionOrigin() { - // profile origin and user is not available before v8.3.0 - if (featureService.clusterHasFeature(clusterService.state(), SECURITY_PROFILE_ORIGIN_FEATURE)) { - return SECURITY_PROFILE_ORIGIN; - } else { - return SECURITY_ORIGIN; - } - } - private static String uidToDocId(String uid) { return DOC_ID_PREFIX + uid; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java index f0405e42f1f22..df21f5d4eeb0b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java @@ -75,7 +75,7 @@ protected final RestChannelConsumer prepareRequest(RestRequest request, NodeClie return innerPrepareRequest(request, client); } else { request.params().keySet().forEach(key -> request.param(key, "")); - request.content(); + request.releasableContent(); // mark content consumed return channel -> channel.sendResponse(new RestResponse(channel, failedFeature)); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java index 609e6696bcb0f..7b3f6a8d2ae55 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.Version; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -57,8 +56,6 @@ public class SecuritySystemIndices { public static final String INTERNAL_SECURITY_PROFILE_INDEX_8 = ".security-profile-8"; public static final String SECURITY_PROFILE_ALIAS = ".security-profile"; - public static final Version VERSION_SECURITY_PROFILE_ORIGIN = Version.V_8_3_0; - public static final NodeFeature SECURITY_PROFILE_ORIGIN_FEATURE = new NodeFeature("security.security_profile_origin"); public static final NodeFeature SECURITY_MIGRATION_FRAMEWORK = new NodeFeature("security.migration_framework"); public static final NodeFeature SECURITY_ROLES_METADATA_FLATTENED = new NodeFeature("security.roles_metadata_flattened"); public static final NodeFeature SECURITY_ROLE_MAPPING_CLEANUP = new NodeFeature("security.role_mapping_cleanup"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java index 5adc1e351931d..3be40c280874d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java @@ -2614,7 +2614,7 @@ public void testAuthenticationSuccessRest() throws Exception { checkedFields.put(LoggingAuditTrail.REQUEST_METHOD_FIELD_NAME, request.method().toString()); checkedFields.put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); checkedFields.put(LoggingAuditTrail.URL_PATH_FIELD_NAME, "_uri"); - if (includeRequestBody && Strings.hasLength(request.content())) { + if (includeRequestBody && request.hasContent()) { checkedFields.put(LoggingAuditTrail.REQUEST_BODY_FIELD_NAME, request.content().utf8ToString()); } if (params.isEmpty() == false) { @@ -2643,8 +2643,8 @@ public void testAuthenticationSuccessRest() throws Exception { checkedFields.put(LoggingAuditTrail.REQUEST_METHOD_FIELD_NAME, request.method().toString()); checkedFields.put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); checkedFields.put(LoggingAuditTrail.URL_PATH_FIELD_NAME, "_uri"); - if (includeRequestBody && Strings.hasLength(request.content())) { - checkedFields.put(LoggingAuditTrail.REQUEST_BODY_FIELD_NAME, request.getHttpRequest().body().asFull().bytes().utf8ToString()); + if (includeRequestBody && request.hasContent()) { + checkedFields.put(LoggingAuditTrail.REQUEST_BODY_FIELD_NAME, request.content().utf8ToString()); } if (params.isEmpty() == false) { checkedFields.put(LoggingAuditTrail.URL_QUERY_FIELD_NAME, "foo=bar&evac=true"); @@ -2672,7 +2672,7 @@ public void testAuthenticationSuccessRest() throws Exception { checkedFields.put(LoggingAuditTrail.REQUEST_METHOD_FIELD_NAME, request.method().toString()); checkedFields.put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); checkedFields.put(LoggingAuditTrail.URL_PATH_FIELD_NAME, "_uri"); - if (includeRequestBody && Strings.hasLength(request.content().utf8ToString())) { + if (includeRequestBody && request.hasContent()) { checkedFields.put(LoggingAuditTrail.REQUEST_BODY_FIELD_NAME, request.content().utf8ToString()); } if (params.isEmpty() == false) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java index f076dc24e5d5b..6da1ddb61f11f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java @@ -36,9 +36,6 @@ import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -48,7 +45,6 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Tuple; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.query.BoolQueryBuilder; @@ -88,7 +84,6 @@ import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.profile.ProfileDocument.ProfileDocumentUser; import org.elasticsearch.xpack.security.support.SecurityIndexManager; -import org.elasticsearch.xpack.security.support.SecuritySystemIndices; import org.elasticsearch.xpack.security.test.SecurityMocks; import org.hamcrest.Matchers; import org.junit.After; @@ -115,7 +110,6 @@ import static java.util.Collections.emptyMap; import static org.elasticsearch.common.util.concurrent.ThreadContext.ACTION_ORIGIN_TRANSIENT_NAME; import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; -import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_PROFILE_ORIGIN; import static org.elasticsearch.xpack.core.security.support.Validation.VALID_NAME_CHARS; import static org.elasticsearch.xpack.security.Security.SECURITY_CRYPTO_THREAD_POOL_NAME; @@ -187,7 +181,6 @@ public class ProfileServiceTests extends ESTestCase { private SecurityIndexManager profileIndex; private ProfileService profileService; Function realmRefLookup; - private boolean useProfileOrigin; @Before public void prepare() { @@ -208,29 +201,11 @@ public void prepare() { when(client.threadPool()).thenReturn(threadPool); when(client.prepareSearch(SECURITY_PROFILE_ALIAS)).thenReturn(new SearchRequestBuilder(client).setIndices(SECURITY_PROFILE_ALIAS)); this.profileIndex = SecurityMocks.mockSecurityIndexManager(SECURITY_PROFILE_ALIAS); - final ClusterService clusterService = mock(ClusterService.class); - final ClusterState clusterState = mock(ClusterState.class); - when(clusterService.state()).thenReturn(clusterState); - final DiscoveryNodes discoveryNodes = mock(DiscoveryNodes.class); - when(clusterState.nodes()).thenReturn(discoveryNodes); - useProfileOrigin = randomBoolean(); - FeatureService featureService = mock(FeatureService.class); - when(featureService.clusterHasFeature(any(), eq(SecuritySystemIndices.SECURITY_PROFILE_ORIGIN_FEATURE))).thenReturn( - useProfileOrigin - ); realmRefLookup = realmIdentifier -> null; Realms realms = mock(Realms.class); when(realms.getDomainConfig(anyString())).then(args -> new DomainConfig(args.getArgument(0), Set.of(), false, null)); when(realms.getRealmRef(any(RealmConfig.RealmIdentifier.class))).then(args -> realmRefLookup.apply(args.getArgument(0))); - this.profileService = new ProfileService( - Settings.EMPTY, - Clock.systemUTC(), - client, - profileIndex, - clusterService, - featureService, - realms - ); + this.profileService = new ProfileService(Settings.EMPTY, Clock.systemUTC(), client, profileIndex, realms); } @After @@ -331,10 +306,7 @@ public void testGetProfileSubjectsWithMissingUids() throws Exception { final Collection allProfileUids = randomList(1, 5, () -> randomAlphaOfLength(20)); final Collection missingProfileUids = randomSubsetOf(allProfileUids); doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final MultiGetRequest multiGetRequest = (MultiGetRequest) invocation.getArguments()[1]; List responses = new ArrayList<>(); for (MultiGetRequest.Item item : multiGetRequest.getItems()) { @@ -397,10 +369,7 @@ public void testGetProfileSubjectsWithMissingUids() throws Exception { public void testGetProfileSubjectWithFailures() throws Exception { final ElasticsearchException mGetException = new ElasticsearchException("mget Exception"); doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onFailure(mGetException); return null; @@ -413,10 +382,7 @@ public void testGetProfileSubjectWithFailures() throws Exception { final Collection errorProfileUids = randomSubsetOf(allProfileUids); final Collection missingProfileUids = Sets.difference(Set.copyOf(allProfileUids), Set.copyOf(errorProfileUids)); doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final MultiGetRequest multiGetRequest = (MultiGetRequest) invocation.getArguments()[1]; List responses = new ArrayList<>(); for (MultiGetRequest.Item item : multiGetRequest.getItems()) { @@ -504,15 +470,7 @@ public void testLiteralUsernameWillThrowOnDuplicate() throws IOException { final Subject subject = new Subject(AuthenticationTestHelper.randomUser(), AuthenticationTestHelper.randomRealmRef(true)); Realms realms = mock(Realms.class); when(realms.getDomainConfig(anyString())).then(args -> new DomainConfig(args.getArgument(0), Set.of(), true, "suffix")); - final ProfileService service = new ProfileService( - Settings.EMPTY, - Clock.systemUTC(), - client, - profileIndex, - mock(ClusterService.class), - mock(FeatureService.class), - realms - ); + final ProfileService service = new ProfileService(Settings.EMPTY, Clock.systemUTC(), client, profileIndex, realms); final PlainActionFuture future = new PlainActionFuture<>(); service.maybeIncrementDifferentiatorAndCreateNewProfile( subject, @@ -593,10 +551,7 @@ public void testBuildSearchRequest() { public void testSecurityProfileOrigin() { // Activate profile doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); @SuppressWarnings("unchecked") final ActionListener listener = (ActionListener) invocation.getArguments()[2]; var resp = new MultiSearchResponse( @@ -616,10 +571,7 @@ public void testSecurityProfileOrigin() { final RuntimeException expectedException = new RuntimeException("expected"); doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onFailure(expectedException); return null; @@ -632,10 +584,7 @@ public void testSecurityProfileOrigin() { // Update doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onFailure(expectedException); return null; @@ -647,10 +596,7 @@ public void testSecurityProfileOrigin() { // Suggest doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onFailure(expectedException); return null; @@ -675,17 +621,7 @@ public void testActivateProfileWithDifferentUidFormats() throws IOException { return new DomainConfig(domainName, Set.of(), true, "suffix"); } }); - final ProfileService service = spy( - new ProfileService( - Settings.EMPTY, - Clock.systemUTC(), - client, - profileIndex, - mock(ClusterService.class), - mock(FeatureService.class), - realms - ) - ); + final ProfileService service = spy(new ProfileService(Settings.EMPTY, Clock.systemUTC(), client, profileIndex, realms)); doAnswer(invocation -> { @SuppressWarnings("unchecked") @@ -1098,10 +1034,7 @@ public void testProfileSearchForApiKeyOwnerWithoutDomain() throws Exception { MultiSearchResponse emptyMultiSearchResponse = new MultiSearchResponse(responseItems, randomNonNegativeLong()); try { doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); MultiSearchRequest multiSearchRequest = (MultiSearchRequest) invocation.getArguments()[1]; assertThat(multiSearchRequest.requests(), iterableWithSize(1)); assertThat(multiSearchRequest.requests().get(0).source().query(), instanceOf(BoolQueryBuilder.class)); @@ -1153,10 +1086,7 @@ public void testProfileSearchForApiKeyOwnerWithDomain() throws Exception { MultiSearchResponse emptyMultiSearchResponse = new MultiSearchResponse(responseItems, randomNonNegativeLong()); try { doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); MultiSearchRequest multiSearchRequest = (MultiSearchRequest) invocation.getArguments()[1]; assertThat(multiSearchRequest.requests(), iterableWithSize(1)); assertThat(multiSearchRequest.requests().get(0).source().query(), instanceOf(BoolQueryBuilder.class)); @@ -1218,10 +1148,7 @@ public void testProfileSearchForOwnerOfMultipleApiKeys() throws Exception { MultiSearchResponse emptyMultiSearchResponse = new MultiSearchResponse(responseItems, randomNonNegativeLong()); try { doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); MultiSearchRequest multiSearchRequest = (MultiSearchRequest) invocation.getArguments()[1]; // a single search request for a single owner of multiple keys assertThat(multiSearchRequest.requests(), iterableWithSize(1)); @@ -1277,10 +1204,7 @@ public void testProfileSearchErrorForApiKeyOwner() { MultiSearchResponse multiSearchResponseWithError = new MultiSearchResponse(responseItems, randomNonNegativeLong()); try { doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); // a single search request for a single owner of multiple keys MultiSearchRequest multiSearchRequest = (MultiSearchRequest) invocation.getArguments()[1]; // 2 search requests for the 2 Api key owners @@ -1402,10 +1326,7 @@ private void mockMultiGetRequest(List sampleDocumentPar private void mockMultiGetRequest(List sampleDocumentParameters, Map errors) { doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final MultiGetRequest multiGetRequest = (MultiGetRequest) invocation.getArguments()[1]; @SuppressWarnings("unchecked") final ActionListener listener = (ActionListener) invocation.getArguments()[2]; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandlerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandlerTests.java index 8509a6475aa71..5d4ea0f30cb15 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandlerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandlerTests.java @@ -58,7 +58,7 @@ protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClien } }; FakeRestRequest fakeRestRequest = new FakeRestRequest(); - FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, true, securityEnabled ? 0 : 1); + FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, randomBoolean(), securityEnabled ? 0 : 1); try (var threadPool = createThreadPool()) { final var client = new NoOpNodeClient(threadPool); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/ApiKeyBaseRestHandlerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/ApiKeyBaseRestHandlerTests.java index b734e602ec291..6ff05faf22d11 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/ApiKeyBaseRestHandlerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/ApiKeyBaseRestHandlerTests.java @@ -56,7 +56,7 @@ protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClien } }; final var fakeRestRequest = new FakeRestRequest(); - final var fakeRestChannel = new FakeRestChannel(fakeRestRequest, true, requiredSettingsEnabled ? 0 : 1); + final var fakeRestChannel = new FakeRestChannel(fakeRestRequest, randomBoolean(), requiredSettingsEnabled ? 0 : 1); try (var threadPool = createThreadPool()) { final var client = new NoOpNodeClient(threadPool); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateApiKeyActionTests.java index 79dba637d53d0..9a05230d82ae6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateApiKeyActionTests.java @@ -75,7 +75,7 @@ public void testCreateApiKeyApi() throws Exception { ).withParams(Collections.singletonMap("refresh", randomFrom("false", "true", "wait_for"))).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateCrossClusterApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateCrossClusterApiKeyActionTests.java index a47855731b37a..812354986d5bc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateCrossClusterApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateCrossClusterApiKeyActionTests.java @@ -115,7 +115,7 @@ public void testLicenseEnforcement() throws Exception { } }"""), XContentType.JSON).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java index c65634a76b532..d88a217cd0949 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java @@ -91,7 +91,7 @@ public void testGetApiKey() throws Exception { final FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(params).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -159,7 +159,7 @@ public void testGetApiKeyWithProfileUid() throws Exception { } final FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(param).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -224,7 +224,7 @@ public void testGetApiKeyOwnedByCurrentAuthenticatedUser() throws Exception { final FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(param).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyActionTests.java index 2cb1b6a66b02b..ac472378d4874 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyActionTests.java @@ -77,7 +77,7 @@ public void testInvalidateApiKey() throws Exception { ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -144,7 +144,7 @@ public void testInvalidateApiKeyOwnedByCurrentAuthenticatedUser() throws Excepti ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java index 7005b5158e626..d5aa249b1d0f5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java @@ -110,7 +110,7 @@ public void testQueryParsing() throws Exception { ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -184,7 +184,7 @@ public void testAggsAndAggregationsTogether() { XContentType.JSON ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -230,7 +230,7 @@ public void testParsingSearchParameters() throws Exception { ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -290,7 +290,7 @@ public void testQueryApiKeyWithProfileUid() throws Exception { } FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(param).build(); SetOnce responseSetOnce = new SetOnce<>(); - RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java index 6c71f30243eaf..f2fe28b2a936f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java @@ -94,7 +94,7 @@ public void testLicenseEnforcement() throws Exception { "metadata": {} }"""), XContentType.JSON).withParams(Map.of("id", randomAlphaOfLength(10))).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenActionTests.java index bd665560f425f..2ac33a780313e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenActionTests.java @@ -43,7 +43,7 @@ public class RestGetTokenActionTests extends ESTestCase { public void testListenerHandlesExceptionProperly() { FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).build(); final SetOnce responseSetOnce = new SetOnce<>(); - RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -67,7 +67,7 @@ public void sendResponse(RestResponse restResponse) { public void testSendResponse() { FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).build(); final SetOnce responseSetOnce = new SetOnce<>(); - RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -114,7 +114,7 @@ public void sendResponse(RestResponse restResponse) { public void testSendResponseKerberosError() { FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).build(); final SetOnce responseSetOnce = new SetOnce<>(); - RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserActionTests.java index 38405a2167808..4a593eeb24ac6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserActionTests.java @@ -73,7 +73,7 @@ public void testQueryParsing() throws Exception { ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -132,7 +132,7 @@ public void testParsingSearchParameters() throws Exception { ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java index c5c5e14934408..e381663d4174e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java @@ -571,7 +571,11 @@ public void testClientChannelUsesSeparateSslConfigurationForRemoteCluster() thro final ConnectTransportException e = openConnectionExpectFailure(qcService, node, connectionProfile); assertThat( e.getRootCause().getMessage(), - anyOf(containsString("unable to find valid certification path"), containsString("Unable to find certificate chain")) + anyOf( + containsString("unable to find valid certification path"), + containsString("Unable to find certificate chain"), + containsString("Unable to construct a valid chain") + ) ); } diff --git a/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle b/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle index 515ffca4a59bf..60b0b372ba14c 100644 --- a/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle +++ b/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle b/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle index 4c98276abe154..17996ce82a453 100644 --- a/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle +++ b/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle @@ -6,7 +6,7 @@ */ import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams + import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/x-pack/plugin/slm/build.gradle b/x-pack/plugin/slm/build.gradle index d9511fe67e8e0..b54e31315f709 100644 --- a/x-pack/plugin/slm/build.gradle +++ b/x-pack/plugin/slm/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/x-pack/plugin/slm/qa/multi-node/build.gradle b/x-pack/plugin/slm/qa/multi-node/build.gradle index d6b1fe8a1e219..afbae8932e292 100644 --- a/x-pack/plugin/slm/qa/multi-node/build.gradle +++ b/x-pack/plugin/slm/qa/multi-node/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java index 192807d667abb..cc01d5b101106 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java @@ -130,7 +130,6 @@ public Collection createComponents(PluginServices services) { SnapshotLifecycleTemplateRegistry templateRegistry = new SnapshotLifecycleTemplateRegistry( settings, clusterService, - services.featureService(), threadPool, client, services.xContentRegistry() diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleFeatures.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleFeatures.java index 96b962f70a1b6..274dec75865a8 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleFeatures.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleFeatures.java @@ -7,12 +7,9 @@ package org.elasticsearch.xpack.slm; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.xpack.slm.history.SnapshotLifecycleTemplateRegistry; -import java.util.Map; import java.util.Set; public class SnapshotLifecycleFeatures implements FeatureSpecification { @@ -20,9 +17,4 @@ public class SnapshotLifecycleFeatures implements FeatureSpecification { public Set getFeatures() { return Set.of(SnapshotLifecycleService.INTERVAL_SCHEDULE); } - - @Override - public Map getHistoricalFeatures() { - return Map.of(SnapshotLifecycleTemplateRegistry.MANAGED_BY_DATA_STREAM_LIFECYCLE, Version.V_8_12_0); - } } diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistry.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistry.java index f40ea5a56463a..31c624df67813 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistry.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistry.java @@ -8,13 +8,10 @@ package org.elasticsearch.xpack.slm.history; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ilm.IndexLifecycleMetadata; @@ -47,13 +44,11 @@ public class SnapshotLifecycleTemplateRegistry extends IndexTemplateRegistry { // version 6: manage by data stream lifecycle // version 7: version the index template name so we can upgrade existing deployments public static final int INDEX_TEMPLATE_VERSION = 7; - public static final NodeFeature MANAGED_BY_DATA_STREAM_LIFECYCLE = new NodeFeature("slm-history-managed-by-dsl"); public static final String SLM_TEMPLATE_VERSION_VARIABLE = "xpack.slm.template.version"; public static final String SLM_TEMPLATE_NAME = ".slm-history-" + INDEX_TEMPLATE_VERSION; public static final String SLM_POLICY_NAME = "slm-history-ilm-policy"; - private final FeatureService featureService; @Override protected boolean requiresMasterNode() { @@ -65,13 +60,11 @@ protected boolean requiresMasterNode() { public SnapshotLifecycleTemplateRegistry( Settings nodeSettings, ClusterService clusterService, - FeatureService featureService, ThreadPool threadPool, Client client, NamedXContentRegistry xContentRegistry ) { super(nodeSettings, clusterService, threadPool, client, xContentRegistry); - this.featureService = featureService; slmHistoryEnabled = SLM_HISTORY_INDEX_ENABLED_SETTING.get(nodeSettings); } @@ -122,9 +115,4 @@ public boolean validate(ClusterState state) { boolean allPoliciesPresent = maybePolicies.map(policies -> policies.keySet().containsAll(policyNames)).orElse(false); return allTemplatesPresent && allPoliciesPresent; } - - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - return featureService.clusterHasFeature(event.state(), MANAGED_BY_DATA_STREAM_LIFECYCLE); - } } diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistryTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistryTests.java index d5a8faea1c0a0..8f25a4e70388e 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistryTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistryTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; @@ -48,7 +47,6 @@ import org.elasticsearch.xpack.core.ilm.TimeseriesLifecycleType; import org.elasticsearch.xpack.core.ilm.action.ILMActions; import org.elasticsearch.xpack.core.ilm.action.PutLifecycleRequest; -import org.elasticsearch.xpack.slm.SnapshotLifecycleFeatures; import org.junit.After; import org.junit.Before; @@ -102,14 +100,7 @@ public void createRegistryAndClient() { ) ); xContentRegistry = new NamedXContentRegistry(entries); - registry = new SnapshotLifecycleTemplateRegistry( - Settings.EMPTY, - clusterService, - new FeatureService(List.of(new SnapshotLifecycleFeatures())), - threadPool, - client, - xContentRegistry - ); + registry = new SnapshotLifecycleTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client, xContentRegistry); } @After @@ -124,7 +115,6 @@ public void testDisabledDoesNotAddTemplates() { SnapshotLifecycleTemplateRegistry disabledRegistry = new SnapshotLifecycleTemplateRegistry( settings, clusterService, - new FeatureService(List.of(new SnapshotLifecycleFeatures())), threadPool, client, xContentRegistry diff --git a/x-pack/plugin/snapshot-based-recoveries/qa/azure/build.gradle b/x-pack/plugin/snapshot-based-recoveries/qa/azure/build.gradle index cb2831f0cf273..7f69d6b7e56eb 100644 --- a/x-pack/plugin/snapshot-based-recoveries/qa/azure/build.gradle +++ b/x-pack/plugin/snapshot-based-recoveries/qa/azure/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { diff --git a/x-pack/plugin/snapshot-based-recoveries/qa/gcs/build.gradle b/x-pack/plugin/snapshot-based-recoveries/qa/gcs/build.gradle index 7550ab8585e13..4d39ca95312aa 100644 --- a/x-pack/plugin/snapshot-based-recoveries/qa/gcs/build.gradle +++ b/x-pack/plugin/snapshot-based-recoveries/qa/gcs/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/snapshot-based-recoveries/qa/s3/build.gradle b/x-pack/plugin/snapshot-based-recoveries/qa/s3/build.gradle index e676e1f1f2162..07909bf4cdbc1 100644 --- a/x-pack/plugin/snapshot-based-recoveries/qa/s3/build.gradle +++ b/x-pack/plugin/snapshot-based-recoveries/qa/s3/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/build.gradle index af4ed719a9c2f..5f195e983d191 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ - -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/gcs/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/gcs/build.gradle index b7e1036ab3e26..176a441279aab 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/gcs/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/gcs/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle index 14e2b05bc140e..81eb82a522389 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/s3/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/s3/build.gradle index 313a11f8ce431..33398d5b8064b 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/s3/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/s3/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' diff --git a/x-pack/plugin/spatial/build.gradle b/x-pack/plugin/spatial/build.gradle index 4304bae5b9991..6299908f0dc14 100644 --- a/x-pack/plugin/spatial/build.gradle +++ b/x-pack/plugin/spatial/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/x-pack/plugin/sql/build.gradle b/x-pack/plugin/sql/build.gradle index d1dcbc3adbd95..69468bf574956 100644 --- a/x-pack/plugin/sql/build.gradle +++ b/x-pack/plugin/sql/build.gradle @@ -1,8 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' -import org.elasticsearch.gradle.internal.info.BuildParams - esplugin { name = 'x-pack-sql' description 'The Elasticsearch plugin that powers SQL for Elasticsearch' diff --git a/x-pack/plugin/sql/qa/jdbc/build.gradle b/x-pack/plugin/sql/qa/jdbc/build.gradle index a444399ed28ce..e93d3b72f1de9 100644 --- a/x-pack/plugin/sql/qa/jdbc/build.gradle +++ b/x-pack/plugin/sql/qa/jdbc/build.gradle @@ -1,7 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.BwcVersions.UnreleasedVersionInfo import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask description = 'Integration tests for SQL JDBC driver' diff --git a/x-pack/plugin/sql/qa/jdbc/security/build.gradle b/x-pack/plugin/sql/qa/jdbc/security/build.gradle index c446755e91929..82510285cb996 100644 --- a/x-pack/plugin/sql/qa/jdbc/security/build.gradle +++ b/x-pack/plugin/sql/qa/jdbc/security/build.gradle @@ -1,4 +1,8 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils apply plugin: 'elasticsearch.internal-test-artifact' @@ -11,7 +15,10 @@ dependencies { Project mainProject = project + subprojects { + def clusterPath = getPath() + // Use tests from the root security qa project in subprojects configurations.create('testArtifacts').transitive(false) @@ -46,6 +53,17 @@ subprojects { dependsOn copyTestClasses classpath += configurations.testArtifacts testClassesDirs = project.files(testArtifactsDir) + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("javaRestTest") + it.parameters.service = serviceProvider + } + nonInputProperties.systemProperty 'tests.audit.logfile', "${-> testClusters.javaRestTest.singleNode().getAuditLog()}" nonInputProperties.systemProperty 'tests.audit.yesterday.logfile', diff --git a/x-pack/plugin/sql/qa/jdbc/security/with-ssl/build.gradle b/x-pack/plugin/sql/qa/jdbc/security/with-ssl/build.gradle index 971c7bf319244..1637cad33c76d 100644 --- a/x-pack/plugin/sql/qa/jdbc/security/with-ssl/build.gradle +++ b/x-pack/plugin/sql/qa/jdbc/security/with-ssl/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.test-with-ssl' diff --git a/x-pack/plugin/sql/qa/mixed-node/build.gradle b/x-pack/plugin/sql/qa/mixed-node/build.gradle index 06e3b61d5b303..35600fda0eb33 100644 --- a/x-pack/plugin/sql/qa/mixed-node/build.gradle +++ b/x-pack/plugin/sql/qa/mixed-node/build.gradle @@ -1,10 +1,16 @@ -apply plugin: 'elasticsearch.legacy-java-rest-test' -apply plugin: 'elasticsearch.bwc-test' +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask +apply plugin: 'elasticsearch.legacy-java-rest-test' +apply plugin: 'elasticsearch.bwc-test' + dependencies { javaRestTestImplementation project(':x-pack:qa') javaRestTestImplementation(project(xpackModule('ql:test-fixtures'))) diff --git a/x-pack/plugin/sql/qa/server/security/with-ssl/build.gradle b/x-pack/plugin/sql/qa/server/security/with-ssl/build.gradle index 51a3f83a909af..0b9c515c48be2 100644 --- a/x-pack/plugin/sql/qa/server/security/with-ssl/build.gradle +++ b/x-pack/plugin/sql/qa/server/security/with-ssl/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.test-with-ssl' diff --git a/x-pack/plugin/sql/sql-cli/build.gradle b/x-pack/plugin/sql/sql-cli/build.gradle index cd24dcc15c863..bd8788191cfa2 100644 --- a/x-pack/plugin/sql/sql-cli/build.gradle +++ b/x-pack/plugin/sql/sql-cli/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ /* * This project is named sql-cli because it is in the "org.elasticsearch.plugin" diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 411a4cda868f0..f9fed2b8f6a7d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -38,7 +38,6 @@ import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.TaskCancelledException; -import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.ql.execution.search.FieldExtraction; import org.elasticsearch.xpack.ql.execution.search.extractor.AbstractFieldHitExtractor; import org.elasticsearch.xpack.ql.execution.search.extractor.BucketExtractor; @@ -360,11 +359,6 @@ static class ImplicitGroupActionListener extends BaseAggActionListener { private static final List EMPTY_BUCKET = singletonList(new Bucket() { - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - throw new SqlIllegalArgumentException("No group-by/aggs defined"); - } - @Override public Object getKey() { throw new SqlIllegalArgumentException("No group-by/aggs defined"); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java index cb832cbd4b2d4..8f8f5917ae123 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java @@ -8,9 +8,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation.Bucket; -import org.elasticsearch.xcontent.XContentBuilder; -import java.io.IOException; import java.util.Map; class TestBucket implements Bucket { @@ -25,11 +23,6 @@ class TestBucket implements Bucket { this.aggs = aggs; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - throw new UnsupportedOperationException(); - } - @Override public Map getKey() { return key; diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml index 83234901ae8f2..35cfbac5e3439 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml @@ -170,4 +170,43 @@ setup: catch: /cannot sort on _source/ esql.query: body: - query: 'FROM test metadata _source | sort _source' + query: 'FROM test metadata _source | SORT _source' + +--- +"sort returning _source is allowed": + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [sort_returning_source_ok] + reason: "Sorts returning _source should be ok, but weren't in older versions" + - do: + esql.query: + body: + query: 'FROM test METADATA _source | SORT case ASC | KEEP case, _source | LIMIT 5' + - length: { columns: 2 } + - length: { values: 3 } + - match: {columns.0.name: "case"} + - match: {columns.0.type: "keyword"} + - match: {columns.1.name: "_source"} + - match: {columns.1.type: "_source"} + - match: {values.0.0: "all_ignored"} + - match: {values.0.1: { + "integer" : "not-an-integer", + "keyword" : "long-keyword", + "case" : "all_ignored" + }} + - match: {values.1.0: "integer_ignored"} + - match: {values.1.1: { + "integer" : "not-an-integer", + "keyword" : "ok", + "case" : "integer_ignored" + }} + - match: {values.2.0: "ok"} + - match: {values.2.1: { + "integer" : 10, + "keyword" : "ok", + "case" : "ok" + }} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index 4c3b16c5dc309..72c7c51655378 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -92,7 +92,7 @@ setup: - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} # Testing for the entire function set isn't feasbile, so we just check that we return the correct count as an approximation. - - length: {esql.functions: 120} # check the "sister" test below for a likely update to the same esql.functions length check + - length: {esql.functions: 121} # check the "sister" test below for a likely update to the same esql.functions length check --- "Basic ESQL usage output (telemetry) non-snapshot version": @@ -163,4 +163,4 @@ setup: - match: {esql.functions.cos: $functions_cos} - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} - - length: {esql.functions: 117} # check the "sister" test above for a likely update to the same esql.functions length check + - length: {esql.functions: 118} # check the "sister" test above for a likely update to the same esql.functions length check diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml index 076bf116292d0..3f2bcb4ed7f4d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml @@ -162,14 +162,38 @@ teardown: --- "Invalid IP strings": - requires: - cluster_features: ["gte_v8.14.0"] - reason: "IP range ENRICH support was added in 8.14.0" + capabilities: + - method: POST + path: /_query + parameters: [method, path, parameters, capabilities] + capabilities: [enrich_strict_range_types] + reason: "Runtime range type checking was added" + test_runner_features: [capabilities, allowed_warnings_regex, warnings_regex] - do: - catch: /'invalid_[\d\.]+' is not an IP string literal/ + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + - "Line (1:68|-1:-1): evaluation of \\[(ENRICH networks-policy ON ip_str|)\\] failed, treating result as null. Only first 20 failures recorded." + - "Line (1:68|-1:-1): java.lang.IllegalArgumentException: 'invalid_' is not an IP string literal." + esql.query: body: - query: 'FROM events | eval ip_str = concat("invalid_", to_string(ip)) | ENRICH networks-policy ON ip_str | sort @timestamp | KEEP ip, name, department, message' + query: 'FROM events | eval ip_str = mv_concat("invalid_", to_string(ip)) | ENRICH networks-policy ON ip_str | sort @timestamp | KEEP ip, name, department, message' + + - match: { columns.0.name: "ip" } + - match: { columns.0.type: "ip" } + - match: { columns.1.name: "name" } + - match: { columns.1.type: "keyword" } + - match: { columns.2.name: "department" } + - match: { columns.2.type: "keyword" } + - match: { columns.3.name: "message" } + - match: { columns.3.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ "10.100.0.21", null, null, "network connected" ] } + - match: { values.1: [ [ "10.100.0.21", "10.101.0.107" ], null, null, "sending messages" ] } + - match: { values.2: [ "10.101.0.107" , null, null, "network disconnected" ] } + - match: { values.3: [ "13.101.0.114" , null, null, "authentication failed" ] } --- "IP": diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/63_enrich_int_range.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/63_enrich_int_range.yml new file mode 100644 index 0000000000000..4d84a10507504 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/63_enrich_int_range.yml @@ -0,0 +1,199 @@ +--- +setup: + - requires: + capabilities: + - method: POST + path: /_query + parameters: [method, path, parameters, capabilities] + capabilities: [enrich_strict_range_types] + reason: "Strict range type checking was added" + test_runner_features: [capabilities, allowed_warnings_regex, warnings_regex] + + - do: + indices.create: + index: ages + body: + settings: + index.number_of_shards: 1 + index.routing.rebalance.enable: "none" + mappings: + properties: + age_range: + type: "integer_range" + description: + type: "keyword" + + - do: + bulk: + index: ages + refresh: true + body: + - { "index": { } } + - { "age_range": { "gte": 0, "lt": 2 }, "description": "Baby" } + - { "index": { } } + - { "age_range": { "gte": 2, "lt": 4 }, "description": "Toddler" } + - { "index": { } } + - { "age_range": { "gte": 3, "lt": 5 }, "description": "Preschooler" } + - { "index": { } } + - { "age_range": { "gte": 5, "lt": 12 }, "description": "Child" } + - { "index": { } } + - { "age_range": { "gte": 13, "lt": 20 }, "description": "Adolescent" } + - { "index": { } } + - { "age_range": { "gte": 20, "lt": 40 }, "description": "Young Adult" } + - { "index": { } } + - { "age_range": { "gte": 40, "lt": 60 }, "description": "Middle-aged" } + - { "index": { } } + - { "age_range": { "gte": 60, "lt": 80 }, "description": "Senior" } + - { "index": { } } + - { "age_range": { "gte": 80, "lt": 100 }, "description": "Elderly" } + - { "index": { } } + - { "age_range": { "gte": 100, "lt": 200 }, "description": "Incredible" } + - do: + cluster.health: + wait_for_no_initializing_shards: true + wait_for_events: languid + + - do: + enrich.put_policy: + name: ages-policy + body: + range: + indices: [ "ages" ] + match_field: "age_range" + enrich_fields: [ "description" ] + + - do: + enrich.execute_policy: + name: ages-policy + + - do: + indices.create: + index: employees + body: + mappings: + properties: + name: + type: keyword + age: + type: integer + ak: + type: keyword + salary: + type: double + + - do: + bulk: + index: employees + refresh: true + body: + - { "index": { } } + - { "name": "Joe Soap", "age": 36, "ak": "36", "salary": 55.55 } + - { "index": { } } + - { "name": "Jane Doe", "age": 31, "ak": "31", "salary": 55.55 } + - { "index": { } } + - { "name": "Jane Immortal", "age": -1, "ak": "immortal", "salary": 55.55 } + - { "index": { } } + - { "name": "Magic Mike", "age": 44, "ak": "44", "salary": 55.55 } + - { "index": { } } + - { "name": "Anon Ymous", "age": 61, "ak": "61", "salary": 55.55 } + +--- +teardown: + - do: + enrich.delete_policy: + name: ages-policy + +--- +"ages": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM employees | ENRICH ages-policy ON age | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ 2, "Young Adult" ] } + - match: { values.1: [ 1, "Middle-aged" ] } + - match: { values.2: [ 1, "Senior" ] } + - match: { values.3: [ 1, null ] } + +--- +"ages as typecast keywords": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + - "Line 1:29: evaluation of \\[ak::integer\\] failed, treating result as null. Only first 20 failures recorded." + - "Line 1:29: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number \\[immortal\\]" + esql.query: + body: + query: 'FROM employees | EVAL aki = ak::integer | ENRICH ages-policy ON aki | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ 2, "Young Adult" ] } + - match: { values.1: [ 1, "Middle-aged" ] } + - match: { values.2: [ 1, "Senior" ] } + - match: { values.3: [ 1, null ] } + +--- +"ages as keywords": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + - "Line (1:18|-1:-1): evaluation of \\[(ENRICH ages-policy ON ak|)\\] failed, treating result as null. Only first 20 failures recorded." + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"immortal\\"' + esql.query: + body: + query: 'FROM employees | ENRICH ages-policy ON ak | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ 2, "Young Adult" ] } + - match: { values.1: [ 1, "Middle-aged" ] } + - match: { values.2: [ 1, "Senior" ] } + - match: { values.3: [ 1, null ] } + +--- +"Invalid age as keyword": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + - "Line (1:18|-1:-1): evaluation of \\[(ENRICH ages-policy ON name|)\\] failed, treating result as null. Only first 20 failures recorded." + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Joe Soap\\"' + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Jane Doe\\"' + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Jane Immortal\\"' + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Magic Mike\\"' + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Anon Ymous\\"' + esql.query: + body: + query: 'FROM employees | ENRICH ages-policy ON name | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 1 } + - match: { values.0: [ 5, null ] } + +--- +"Invalid age as double": + - do: + catch: /ENRICH range and input types are incompatible. range\[INTEGER\], input\[DOUBLE\]/ + esql.query: + body: + query: 'FROM employees | ENRICH ages-policy ON salary | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/64_enrich_int_match.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/64_enrich_int_match.yml new file mode 100644 index 0000000000000..ef11e5098f5c2 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/64_enrich_int_match.yml @@ -0,0 +1,222 @@ +--- +setup: + - requires: + capabilities: + - method: POST + path: /_query + parameters: [method, path, parameters, capabilities] + capabilities: [enrich_strict_range_types] + reason: "Strict range type checking was added" + test_runner_features: [capabilities, allowed_warnings_regex, warnings_regex] + + - do: + indices.create: + index: decades + body: + settings: + index.number_of_shards: 1 + index.routing.rebalance.enable: "none" + mappings: + properties: + decade: + type: "integer" + description: + type: "keyword" + + - do: + bulk: + index: decades + refresh: true + body: + - { "index": { } } + - { "decade": 1900, "description": "Gay Nineties" } + - { "index": { } } + - { "decade": 1910, "description": "Teens" } + - { "index": { } } + - { "decade": 1920, "description": "Roaring Twenties" } + - { "index": { } } + - { "decade": 1930, "description": "Dirty Thirties" } + - { "index": { } } + - { "decade": 1940, "description": "War Years" } + - { "index": { } } + - { "decade": 1950, "description": "Fabulous Fifties" } + - { "index": { } } + - { "decade": 1960, "description": "Swinging Sixties" } + - { "index": { } } + - { "decade": 1970, "description": "Me Decade" } + - { "index": { } } + - { "decade": 1980, "description": "Decade of Greed" } + - { "index": { } } + - { "decade": 1990, "description": "Nineties" } + - { "index": { } } + - { "decade": 2000, "description": "Aughts" } + - { "index": { } } + - { "decade": 2010, "description": "Digital Age" } + - { "index": { } } + - { "decade": 2020, "description": "Roaring Twenties 2.0" } + - do: + cluster.health: + wait_for_no_initializing_shards: true + wait_for_events: languid + + - do: + enrich.put_policy: + name: decades-policy + body: + match: + indices: [ "decades" ] + match_field: "decade" + enrich_fields: [ "description" ] + + - do: + enrich.execute_policy: + name: decades-policy + + - do: + indices.create: + index: songs + body: + mappings: + properties: + title: + type: keyword + year: + type: integer + singer: + type: keyword + + - do: + bulk: + index: songs + refresh: true + body: + - { "index": { } } + - { "singer": "Louis Armstrong", "title": "What a Wonderful World", "year": 1967 } + - { "index": { } } + - { "singer": "The Beatles", "title": "Hey Jude", "year": 1968 } + - { "index": { } } + - { "singer": "Elvis Presley", "title": "Jailhouse Rock", "year": 1957 } + - { "index": { } } + - { "singer": "Billie Holiday", "title": "Strange Fruit", "year": 1939 } + - { "index": { } } + - { "singer": "Frank Sinatra", "title": "Fly Me to the Moon", "year": 1964 } + - { "index": { } } + - { "singer": "Bob Dylan", "title": "Blowin' in the Wind", "year": 1963 } + - { "index": { } } + - { "singer": "Queen", "title": "Bohemian Rhapsody", "year": 1975 } + - { "index": { } } + - { "singer": "ABBA", "title": "Dancing Queen", "year": 1976 } + - { "index": { } } + - { "singer": "Michael Jackson", "title": "Thriller", "year": 1982 } + - { "index": { } } + - { "singer": "Nirvana", "title": "Smells Like Teen Spirit", "year": 1991 } + - { "index": { } } + - { "singer": "Whitney Houston", "title": "I Will Always Love You", "year": 1992 } + - { "index": { } } + - { "singer": "Aretha Franklin", "title": "Respect", "year": 1967 } + - { "index": { } } + - { "singer": "Chuck Berry", "title": "Johnny B. Goode", "year": 1958 } + - { "index": { } } + - { "singer": "Madonna", "title": "Like a Prayer", "year": 1989 } + - { "index": { } } + - { "singer": "The Rolling Stones", "title": "(I Can't Get No) Satisfaction", "year": 1965 } + - { "index": { } } + - { "singer": "Beyoncé", "title": "Single Ladies (Put a Ring on It)", "year": 2008 } + - { "index": { } } + - { "singer": "Adele", "title": "Rolling in the Deep", "year": 2010 } + - { "index": { } } + - { "singer": "Lady Gaga", "title": "Bad Romance", "year": 2009 } + - { "index": { } } + - { "singer": "Billie Eilish", "title": "Bad Guy", "year": 2019 } + - { "index": { } } + - { "singer": "Taylor Swift", "title": "Anti-Hero", "year": 2022 } + +--- +teardown: + - do: + enrich.delete_policy: + name: decades-policy + +--- +"decades": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM songs | EVAL decade = (10*FLOOR(year/10))::integer | ENRICH decades-policy ON decade | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 9 } + - match: { values.0: [ 6, "Swinging Sixties" ] } + - match: { values.1: [ 2, "Aughts" ] } + - match: { values.2: [ 2, "Decade of Greed" ] } + - match: { values.3: [ 2, "Digital Age" ] } + - match: { values.4: [ 2, "Fabulous Fifties" ] } + - match: { values.5: [ 2, "Me Decade" ] } + - match: { values.6: [ 2, "Nineties" ] } + - match: { values.7: [ 1, "Dirty Thirties" ] } + - match: { values.8: [ 1, "Roaring Twenties 2.0" ] } + +--- +"decades as typecast keywords": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM songs | EVAL decade = (10*FLOOR(year/10))::keyword | ENRICH decades-policy ON decade | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 9 } + - match: { values.0: [ 6, "Swinging Sixties" ] } + - match: { values.1: [ 2, "Aughts" ] } + - match: { values.2: [ 2, "Decade of Greed" ] } + - match: { values.3: [ 2, "Digital Age" ] } + - match: { values.4: [ 2, "Fabulous Fifties" ] } + - match: { values.5: [ 2, "Me Decade" ] } + - match: { values.6: [ 2, "Nineties" ] } + - match: { values.7: [ 1, "Dirty Thirties" ] } + - match: { values.8: [ 1, "Roaring Twenties 2.0" ] } + +--- +"Invalid decade as keyword": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM songs | ENRICH decades-policy ON singer | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 1 } + - match: { values.0: [ 20, null ] } + +--- +"Invalid decade as double": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM songs | EVAL decade = 10.0*FLOOR(year/10) | ENRICH decades-policy ON decade | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 1 } + - match: { values.0: [ 20, null ] } diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java index b2dc04c1178e4..c89a8237d40b7 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java @@ -10,12 +10,10 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -35,7 +33,6 @@ import java.util.Map; import static org.elasticsearch.xpack.stack.StackTemplateRegistry.STACK_TEMPLATES_ENABLED; -import static org.elasticsearch.xpack.stack.StackTemplateRegistry.STACK_TEMPLATES_FEATURE; @Deprecated(since = "8.12.0", forRemoval = true) public class LegacyStackTemplateRegistry extends IndexTemplateRegistry { @@ -48,7 +45,6 @@ public class LegacyStackTemplateRegistry extends IndexTemplateRegistry { public static final String TEMPLATE_VERSION_VARIABLE = "xpack.stack.template.version"; private final ClusterService clusterService; - private final FeatureService featureService; private volatile boolean stackTemplateEnabled; private static final Map ADDITIONAL_TEMPLATE_VARIABLES = Map.of("xpack.stack.template.deprecated", "true"); @@ -95,12 +91,10 @@ public LegacyStackTemplateRegistry( ClusterService clusterService, ThreadPool threadPool, Client client, - NamedXContentRegistry xContentRegistry, - FeatureService featureService + NamedXContentRegistry xContentRegistry ) { super(nodeSettings, clusterService, threadPool, client, xContentRegistry); this.clusterService = clusterService; - this.featureService = featureService; this.stackTemplateEnabled = STACK_TEMPLATES_ENABLED.get(nodeSettings); } @@ -282,12 +276,4 @@ protected boolean requiresMasterNode() { // there and the ActionNotFoundTransportException errors are then prevented. return true; } - - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - // Ensure current version of the components are installed only once all nodes are updated to 8.9.0. - // This is necessary to prevent an error caused nby the usage of the ignore_missing_pipeline property - // in the pipeline processor, which has been introduced only in 8.9.0 - return featureService.clusterHasFeature(event.state(), STACK_TEMPLATES_FEATURE); - } } diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java index 71d01798323d3..73c18a3cc2619 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java @@ -33,8 +33,7 @@ public Collection createComponents(PluginServices services) { services.clusterService(), services.threadPool(), services.client(), - services.xContentRegistry(), - services.featureService() + services.xContentRegistry() ); legacyStackTemplateRegistry.initialize(); StackTemplateRegistry stackTemplateRegistry = new StackTemplateRegistry( @@ -42,8 +41,7 @@ public Collection createComponents(PluginServices services) { services.clusterService(), services.threadPool(), services.client(), - services.xContentRegistry(), - services.featureService() + services.xContentRegistry() ); stackTemplateRegistry.initialize(); return List.of(legacyStackTemplateRegistry, stackTemplateRegistry); diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java index ce1b664a46887..aeb9bf2bfa5cb 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java @@ -10,14 +10,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -38,13 +35,6 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { private static final Logger logger = LogManager.getLogger(StackTemplateRegistry.class); - // Historical node feature kept here as LegacyStackTemplateRegistry is deprecated - public static final NodeFeature STACK_TEMPLATES_FEATURE = new NodeFeature("stack.templates_supported"); - - // this node feature is a redefinition of {@link DataStreamFeatures#DATA_STREAM_LIFECYCLE} and it's meant to avoid adding a - // dependency to the data-streams module just for this - public static final NodeFeature DATA_STREAM_LIFECYCLE = new NodeFeature("data_stream.lifecycle"); - // The stack template registry version. This number must be incremented when we make changes // to built-in templates. public static final int REGISTRY_VERSION = 14; @@ -58,7 +48,6 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { ); private final ClusterService clusterService; - private final FeatureService featureService; private final Map componentTemplateConfigs; private volatile boolean stackTemplateEnabled; @@ -121,12 +110,10 @@ public StackTemplateRegistry( ClusterService clusterService, ThreadPool threadPool, Client client, - NamedXContentRegistry xContentRegistry, - FeatureService featureService + NamedXContentRegistry xContentRegistry ) { super(nodeSettings, clusterService, threadPool, client, xContentRegistry); this.clusterService = clusterService; - this.featureService = featureService; this.stackTemplateEnabled = STACK_TEMPLATES_ENABLED.get(nodeSettings); this.componentTemplateConfigs = loadComponentTemplateConfigs(); } @@ -355,11 +342,4 @@ protected boolean requiresMasterNode() { // there and the ActionNotFoundTransportException errors are then prevented. return true; } - - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - // Ensure current version of the components are installed only after versions that support data stream lifecycle - // due to .kibana-reporting making use of the feature - return featureService.clusterHasFeature(event.state(), DATA_STREAM_LIFECYCLE); - } } diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplatesFeatures.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplatesFeatures.java deleted file mode 100644 index 7b05231fcfd15..0000000000000 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplatesFeatures.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.stack; - -import org.elasticsearch.Version; -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Map; - -public class StackTemplatesFeatures implements FeatureSpecification { - @Override - public Map getHistoricalFeatures() { - return Map.of(StackTemplateRegistry.STACK_TEMPLATES_FEATURE, Version.V_8_9_0); - } -} diff --git a/x-pack/plugin/stack/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/x-pack/plugin/stack/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification deleted file mode 100644 index 30a1498a54725..0000000000000 --- a/x-pack/plugin/stack/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -# or more contributor license agreements. Licensed under the Elastic License -# 2.0; you may not use this file except in compliance with the Elastic License -# 2.0. -# - -org.elasticsearch.xpack.stack.StackTemplatesFeatures diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java index 39f58e638aa68..654cf494e0e6f 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; @@ -25,8 +24,6 @@ import org.junit.After; import org.junit.Before; -import java.util.List; - public class LegacyStackTemplateRegistryTests extends ESTestCase { private LegacyStackTemplateRegistry registry; private ThreadPool threadPool; @@ -36,15 +33,7 @@ public void createRegistryAndClient() { threadPool = new TestThreadPool(this.getClass().getName()); Client client = new NoOpClient(threadPool); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); - var featureService = new FeatureService(List.of(new StackTemplatesFeatures())); - registry = new LegacyStackTemplateRegistry( - Settings.EMPTY, - clusterService, - threadPool, - client, - NamedXContentRegistry.EMPTY, - featureService - ); + registry = new LegacyStackTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client, NamedXContentRegistry.EMPTY); } @After @@ -67,7 +56,7 @@ public void testThatTemplatesAreDeprecated() { registry.getIngestPipelines() .stream() .map(ipc -> new PipelineConfiguration(ipc.getId(), ipc.loadConfig(), XContentType.JSON)) - .map(PipelineConfiguration::getConfigAsMap) + .map(PipelineConfiguration::getConfig) .forEach(p -> assertTrue((Boolean) p.get("deprecated"))); } diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackRegistryWithNonRequiredTemplates.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackRegistryWithNonRequiredTemplates.java index c1c855867599a..7f674e24658dd 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackRegistryWithNonRequiredTemplates.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackRegistryWithNonRequiredTemplates.java @@ -11,7 +11,6 @@ import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.template.IndexTemplateConfig; @@ -24,10 +23,9 @@ class StackRegistryWithNonRequiredTemplates extends StackTemplateRegistry { ClusterService clusterService, ThreadPool threadPool, Client client, - NamedXContentRegistry xContentRegistry, - FeatureService featureService + NamedXContentRegistry xContentRegistry ) { - super(nodeSettings, clusterService, threadPool, client, xContentRegistry, featureService); + super(nodeSettings, clusterService, threadPool, client, xContentRegistry); } @Override diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java index 25ff3b5311fa2..a8043f3d5e4e5 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.stack; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -29,8 +28,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.datastreams.DataStreamFeatures; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.test.ClusterServiceUtils; @@ -71,7 +68,6 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -82,22 +78,13 @@ public class StackTemplateRegistryTests extends ESTestCase { private ClusterService clusterService; private ThreadPool threadPool; private VerifyingClient client; - private FeatureService featureService; @Before public void createRegistryAndClient() { threadPool = new TestThreadPool(this.getClass().getName()); client = new VerifyingClient(threadPool); clusterService = ClusterServiceUtils.createClusterService(threadPool); - featureService = new FeatureService(List.of(new StackTemplatesFeatures(), new DataStreamFeatures())); - registry = new StackTemplateRegistry( - Settings.EMPTY, - clusterService, - threadPool, - client, - NamedXContentRegistry.EMPTY, - featureService - ); + registry = new StackTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client, NamedXContentRegistry.EMPTY); } @After @@ -114,8 +101,7 @@ public void testDisabledDoesNotAddIndexTemplates() { clusterService, threadPool, client, - NamedXContentRegistry.EMPTY, - featureService + NamedXContentRegistry.EMPTY ); assertThat(disabledRegistry.getComposableTemplateConfigs(), anEmptyMap()); } @@ -127,8 +113,7 @@ public void testDisabledStillAddsComponentTemplatesAndIlmPolicies() { clusterService, threadPool, client, - NamedXContentRegistry.EMPTY, - featureService + NamedXContentRegistry.EMPTY ); assertThat(disabledRegistry.getComponentTemplateConfigs(), not(anEmptyMap())); assertThat( @@ -371,8 +356,7 @@ public void testMissingNonRequiredTemplates() throws Exception { clusterService, threadPool, client, - NamedXContentRegistry.EMPTY, - featureService + NamedXContentRegistry.EMPTY ); DiscoveryNode node = DiscoveryNodeUtils.create("node"); @@ -519,25 +503,6 @@ public void testThatMissingMasterNodeDoesNothing() { registry.clusterChanged(event); } - public void testThatNothingIsInstalledWhenAllNodesAreNotUpdated() { - DiscoveryNode updatedNode = DiscoveryNodeUtils.create("updatedNode"); - DiscoveryNode outdatedNode = DiscoveryNodeUtils.create("outdatedNode", ESTestCase.buildNewFakeTransportAddress(), Version.V_8_10_0); - DiscoveryNodes nodes = DiscoveryNodes.builder() - .localNodeId("updatedNode") - .masterNodeId("updatedNode") - .add(updatedNode) - .add(outdatedNode) - .build(); - - client.setVerifier((a, r, l) -> { - fail("if some cluster mode are not updated to at least v.8.11.0 nothing should happen"); - return null; - }); - - ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), nodes); - registry.clusterChanged(event); - } - public void testThatTemplatesAreNotDeprecated() { for (ComposableIndexTemplate it : registry.getComposableTemplateConfigs().values()) { assertFalse(it.isDeprecated()); @@ -551,15 +516,10 @@ public void testThatTemplatesAreNotDeprecated() { registry.getIngestPipelines() .stream() .map(ipc -> new PipelineConfiguration(ipc.getId(), ipc.loadConfig(), XContentType.JSON)) - .map(PipelineConfiguration::getConfigAsMap) + .map(PipelineConfiguration::getConfig) .forEach(p -> assertFalse((Boolean) p.get("deprecated"))); } - public void testDataStreamLifecycleNodeFeatureId() { - // let's make sure these ids remain in-sync - assertThat(StackTemplateRegistry.DATA_STREAM_LIFECYCLE.id(), is(DataStreamFeatures.DATA_STREAM_LIFECYCLE.id())); - } - // ------------- /** diff --git a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle index eb0551a4d10e1..b4ee0bee76d9d 100644 --- a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle @@ -1,8 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java index 681ec38e9a57a..7359071996cc8 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java @@ -918,14 +918,14 @@ public void testRangeAggExtractor() { Aggregation agg = createRangeAgg( "p_agg", List.of( - new InternalRange.Bucket(null, Double.NEGATIVE_INFINITY, 10.5, 10, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, 10.5, 19.5, 30, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, 19.5, 200, 30, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, 20, Double.POSITIVE_INFINITY, 0, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, -10, -5, 0, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, -11.0, -6.0, 0, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, -11.0, 0, 0, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket("custom-0", 0, 10, 777, InternalAggregations.EMPTY, false, DocValueFormat.RAW) + new InternalRange.Bucket(null, Double.NEGATIVE_INFINITY, 10.5, 10, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, 10.5, 19.5, 30, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, 19.5, 200, 30, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, 20, Double.POSITIVE_INFINITY, 0, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, -10, -5, 0, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, -11.0, -6.0, 0, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, -11.0, 0, 0, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket("custom-0", 0, 10, 777, InternalAggregations.EMPTY, DocValueFormat.RAW) ) ); assertThat( diff --git a/x-pack/plugin/watcher/qa/rest/build.gradle b/x-pack/plugin/watcher/qa/rest/build.gradle index 8382a71092720..2d5fc8349b5e0 100644 --- a/x-pack/plugin/watcher/qa/rest/build.gradle +++ b/x-pack/plugin/watcher/qa/rest/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-java-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-test' diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java index 1691a464d8061..99fb626ad9474 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java @@ -16,11 +16,13 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.node.InternalSettingsPreparer; import org.elasticsearch.node.MockNode; import org.elasticsearch.node.Node; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; @@ -96,18 +98,18 @@ public static void main(String[] args) throws Exception { ); System.out.println("and heap_max=" + JvmInfo.jvmInfo().getMem().getHeapMax()); + Environment internalNodeEnv = InternalSettingsPreparer.prepareEnvironment( + Settings.builder().put(SETTINGS).put("node.data", false).build(), + emptyMap(), + null, + () -> { + throw new IllegalArgumentException("settings must have [node.name]"); + } + ); + // First clean everything and index the watcher (but not via put alert api!) try ( - Node node = new Node( - InternalSettingsPreparer.prepareEnvironment( - Settings.builder().put(SETTINGS).put("node.data", false).build(), - emptyMap(), - null, - () -> { - throw new IllegalArgumentException("settings must have [node.name]"); - } - ) - ).start() + Node node = new Node(internalNodeEnv, new PluginsLoader(internalNodeEnv.modulesFile(), internalNodeEnv.pluginsFile())).start() ) { final Client client = node.client(); ClusterHealthResponse response = client.admin().cluster().prepareHealth(TimeValue.THIRTY_SECONDS).setWaitForNodes("2").get(); diff --git a/x-pack/plugin/wildcard/build.gradle b/x-pack/plugin/wildcard/build.gradle index b582f3fcea903..1a4f133402582 100644 --- a/x-pack/plugin/wildcard/build.gradle +++ b/x-pack/plugin/wildcard/build.gradle @@ -1,7 +1,12 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-es-plugin' -apply plugin: 'elasticsearch.legacy-yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' esplugin { name 'wildcard' @@ -19,9 +24,3 @@ dependencies { compileOnly project(path: xpackModule('core')) testImplementation(testArtifact(project(xpackModule('core')))) } - -if (buildParams.isSnapshotBuild() == false) { - tasks.named("test").configure { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } -} diff --git a/x-pack/plugin/wildcard/src/yamlRestTest/java/org/elasticsearch/xpack/wildcard/WildcardClientYamlTestSuiteIT.java b/x-pack/plugin/wildcard/src/yamlRestTest/java/org/elasticsearch/xpack/wildcard/WildcardClientYamlTestSuiteIT.java index 61eb0c8b0de3e..c9ec7d71b1805 100644 --- a/x-pack/plugin/wildcard/src/yamlRestTest/java/org/elasticsearch/xpack/wildcard/WildcardClientYamlTestSuiteIT.java +++ b/x-pack/plugin/wildcard/src/yamlRestTest/java/org/elasticsearch/xpack/wildcard/WildcardClientYamlTestSuiteIT.java @@ -10,8 +10,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; /** Runs yaml rest tests */ public class WildcardClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -24,4 +26,12 @@ public WildcardClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testC public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("wildcard").build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/x-pack/qa/core-rest-tests-with-security/build.gradle b/x-pack/qa/core-rest-tests-with-security/build.gradle index 8a67a2c1dde0d..65f2282014dc4 100644 --- a/x-pack/qa/core-rest-tests-with-security/build.gradle +++ b/x-pack/qa/core-rest-tests-with-security/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.internal-yaml-rest-test' -import org.elasticsearch.gradle.internal.info.BuildParams dependencies { testImplementation project(':x-pack:qa') diff --git a/x-pack/qa/full-cluster-restart/build.gradle b/x-pack/qa/full-cluster-restart/build.gradle index d6b05242f613b..ee0955c6db082 100644 --- a/x-pack/qa/full-cluster-restart/build.gradle +++ b/x-pack/qa/full-cluster-restart/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index a56ddaabe8280..fe4c1c20c69c4 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -23,13 +23,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.search.RestSearchAction; -import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.StreamsUtils; import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xcontent.ObjectPath; import org.elasticsearch.xcontent.XContentBuilder; @@ -53,8 +50,6 @@ import java.util.stream.Collectors; import static org.elasticsearch.core.TimeValue.timeValueSeconds; -import static org.elasticsearch.test.MapMatcher.assertMap; -import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.elasticsearch.upgrades.FullClusterRestartIT.assertNumHits; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; @@ -295,10 +290,6 @@ public void testWatcherWithApiKey() throws Exception { } public void testServiceAccountApiKey() throws IOException { - @UpdateForV9(owner = UpdateForV9.Owner.SECURITY) - var originalClusterSupportsServiceAccounts = oldClusterHasFeature(RestTestLegacyFeatures.SERVICE_ACCOUNTS_SUPPORTED); - assumeTrue("no service accounts in versions before 7.13", originalClusterSupportsServiceAccounts); - if (isRunningAgainstOldCluster()) { final Request createServiceTokenRequest = new Request("POST", "/_security/service/elastic/fleet-server/credential/token"); final Response createServiceTokenResponse = client().performRequest(createServiceTokenRequest); @@ -484,10 +475,6 @@ public void testRollupAfterRestart() throws Exception { } public void testTransformLegacyTemplateCleanup() throws Exception { - @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) - var originalClusterSupportsTransform = oldClusterHasFeature(RestTestLegacyFeatures.TRANSFORM_SUPPORTED); - assumeTrue("Before 7.2 transforms didn't exist", originalClusterSupportsTransform); - if (isRunningAgainstOldCluster()) { // create the source index @@ -562,9 +549,6 @@ public void testTransformLegacyTemplateCleanup() throws Exception { } public void testSlmPolicyAndStats() throws IOException { - @UpdateForV9(owner = UpdateForV9.Owner.DATA_MANAGEMENT) - var originalClusterSupportsSlm = oldClusterHasFeature(RestTestLegacyFeatures.SLM_SUPPORTED); - SnapshotLifecyclePolicy slmPolicy = new SnapshotLifecyclePolicy( "test-policy", "test-policy", @@ -573,7 +557,7 @@ public void testSlmPolicyAndStats() throws IOException { Collections.singletonMap("indices", Collections.singletonList("*")), null ); - if (isRunningAgainstOldCluster() && originalClusterSupportsSlm) { + if (isRunningAgainstOldCluster()) { Request createRepoRequest = new Request("PUT", "_snapshot/test-repo"); String repoCreateJson = "{" + " \"type\": \"fs\"," + " \"settings\": {" + " \"location\": \"test-repo\"" + " }" + "}"; createRepoRequest.setJsonEntity(repoCreateJson); @@ -587,7 +571,7 @@ public void testSlmPolicyAndStats() throws IOException { client().performRequest(createSlmPolicyRequest); } - if (isRunningAgainstOldCluster() == false && originalClusterSupportsSlm) { + if (isRunningAgainstOldCluster() == false) { Request getSlmPolicyRequest = new Request("GET", "_slm/policy/test-policy"); Response response = client().performRequest(getSlmPolicyRequest); Map responseMap = entityAsMap(response); @@ -914,14 +898,6 @@ private void waitForRollUpJob(final String rollupJob, final Matcher expectedS @SuppressWarnings("unchecked") public void testDataStreams() throws Exception { - - @UpdateForV9(owner = UpdateForV9.Owner.DATA_MANAGEMENT) - var originalClusterSupportsDataStreams = oldClusterHasFeature(RestTestLegacyFeatures.DATA_STREAMS_SUPPORTED); - - @UpdateForV9(owner = UpdateForV9.Owner.DATA_MANAGEMENT) - var originalClusterDataStreamHasDateInIndexName = oldClusterHasFeature(RestTestLegacyFeatures.NEW_DATA_STREAMS_INDEX_NAME_FORMAT); - - assumeTrue("no data streams in versions before 7.9.0", originalClusterSupportsDataStreams); if (isRunningAgainstOldCluster()) { createComposableTemplate(client(), "dst", "ds"); @@ -958,89 +934,10 @@ public void testDataStreams() throws Exception { List> indices = (List>) ds.get("indices"); assertEquals("ds", ds.get("name")); assertEquals(1, indices.size()); - assertEquals( - DataStreamTestHelper.getLegacyDefaultBackingIndexName("ds", 1, timestamp, originalClusterDataStreamHasDateInIndexName), - indices.get(0).get("index_name") - ); + assertEquals(DataStreamTestHelper.getLegacyDefaultBackingIndexName("ds", 1, timestamp), indices.get(0).get("index_name")); assertNumHits("ds", 1, 1); } - /** - * Tests that a single document survives. Super basic smoke test. - */ - @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_FOUNDATIONS) // Can be removed - public void testDisableFieldNameField() throws IOException { - assumeFalse( - "can only disable field names field before 8.0", - oldClusterHasFeature(RestTestLegacyFeatures.DISABLE_FIELD_NAMES_FIELD_REMOVED) - ); - - String docLocation = "/nofnf/_doc/1"; - String doc = """ - { - "dv": "test", - "no_dv": "test" - }"""; - - if (isRunningAgainstOldCluster()) { - Request createIndex = new Request("PUT", "/nofnf"); - createIndex.setJsonEntity(""" - { - "settings": { - "index": { - "number_of_replicas": 1 - } - }, - "mappings": { - "_field_names": { "enabled": false }, - "properties": { - "dv": { "type": "keyword" }, - "no_dv": { "type": "keyword", "doc_values": false } - } - } - }"""); - createIndex.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(fieldNamesFieldOk())); - client().performRequest(createIndex); - - Request createDoc = new Request("PUT", docLocation); - createDoc.addParameter("refresh", "true"); - createDoc.setJsonEntity(doc); - client().performRequest(createDoc); - } - - Request getRequest = new Request("GET", docLocation); - assertThat(toStr(client().performRequest(getRequest)), containsString(doc)); - - if (isRunningAgainstOldCluster() == false) { - Request esql = new Request("POST", "_query"); - esql.setJsonEntity(""" - { - "query": "FROM nofnf | LIMIT 1" - }"""); - // {"columns":[{"name":"dv","type":"keyword"},{"name":"no_dv","type":"keyword"}],"values":[["test",null]]} - try { - Map result = entityAsMap(client().performRequest(esql)); - MapMatcher mapMatcher = matchesMap(); - if (result.get("took") != null) { - mapMatcher = mapMatcher.entry("took", ((Integer) result.get("took")).intValue()); - } - assertMap( - result, - mapMatcher.entry( - "columns", - List.of(Map.of("name", "dv", "type", "keyword"), Map.of("name", "no_dv", "type", "keyword")) - ).entry("values", List.of(List.of("test", "test"))) - ); - } catch (ResponseException e) { - logger.error( - "failed to query index without field name field. Existing indices:\n{}", - EntityUtils.toString(client().performRequest(new Request("GET", "_cat/indices")).getEntity()) - ); - throw e; - } - } - } - /** * Ignore the warning about the {@code _field_names} field. We intentionally * turn that field off sometimes. And other times old versions spuriously diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java index c825de31a7f6e..91820299da8a5 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xpack.test.rest.IndexMappingTemplateAsserter; import org.elasticsearch.xpack.test.rest.XPackRestTestConstants; @@ -48,11 +47,7 @@ protected Settings restClientSettings() { public void waitForMlTemplates() throws Exception { // We shouldn't wait for ML templates during the upgrade - production won't if (isRunningAgainstOldCluster()) { - XPackRestTestHelper.waitForTemplates( - client(), - XPackRestTestConstants.ML_POST_V7120_TEMPLATES, - clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED) - ); + XPackRestTestHelper.waitForTemplates(client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES); } } diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java index 7dc0a2f48bbc9..a83ad5b4f8da4 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; import org.elasticsearch.core.Tuple; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -64,11 +63,7 @@ protected Settings restClientSettings() { public void waitForMlTemplates() throws Exception { // We shouldn't wait for ML templates during the upgrade - production won't if (isRunningAgainstOldCluster()) { - XPackRestTestHelper.waitForTemplates( - client(), - XPackRestTestConstants.ML_POST_V7120_TEMPLATES, - clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED) - ); + XPackRestTestHelper.waitForTemplates(client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES); } } diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java index 0b15e98f201a0..74f62fac26488 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java @@ -18,7 +18,6 @@ import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -58,11 +57,7 @@ protected Settings restClientSettings() { public void waitForMlTemplates() throws Exception { // We shouldn't wait for ML templates during the upgrade - production won't if (isRunningAgainstOldCluster()) { - XPackRestTestHelper.waitForTemplates( - client(), - XPackRestTestConstants.ML_POST_V7120_TEMPLATES, - clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED) - ); + XPackRestTestHelper.waitForTemplates(client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES); } } diff --git a/x-pack/qa/mixed-tier-cluster/build.gradle b/x-pack/qa/mixed-tier-cluster/build.gradle index 79e7d6a655993..bee28c47dc867 100644 --- a/x-pack/qa/mixed-tier-cluster/build.gradle +++ b/x-pack/qa/mixed-tier-cluster/build.gradle @@ -1,10 +1,16 @@ -apply plugin: 'elasticsearch.legacy-java-rest-test' -apply plugin: 'elasticsearch.bwc-test' +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask +apply plugin: 'elasticsearch.legacy-java-rest-test' +apply plugin: 'elasticsearch.bwc-test' + dependencies { javaRestTestImplementation project(':x-pack:qa') } diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle index 9c0648abca21b..83c231da7529c 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle b/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle index ca79bb7ec3825..6e95d718b19de 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle b/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle index b9f8369763476..5c6235e092458 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE diff --git a/x-pack/qa/oidc-op-tests/build.gradle b/x-pack/qa/oidc-op-tests/build.gradle index b53539b224861..43d1cd12cdfb7 100644 --- a/x-pack/qa/oidc-op-tests/build.gradle +++ b/x-pack/qa/oidc-op-tests/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/qa/repository-old-versions/build.gradle b/x-pack/qa/repository-old-versions/build.gradle index 78cfc0f688e4a..ecd02ac9d209f 100644 --- a/x-pack/qa/repository-old-versions/build.gradle +++ b/x-pack/qa/repository-old-versions/build.gradle @@ -9,7 +9,6 @@ import org.elasticsearch.gradle.Architecture import org.elasticsearch.gradle.OS import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.BwcVersions -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.AntFixture import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask import org.elasticsearch.gradle.transform.UnzipTransform diff --git a/x-pack/qa/rolling-upgrade-basic/build.gradle b/x-pack/qa/rolling-upgrade-basic/build.gradle index 09b3b7db7c917..9a447f35eb13c 100644 --- a/x-pack/qa/rolling-upgrade-basic/build.gradle +++ b/x-pack/qa/rolling-upgrade-basic/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle b/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle index 0d1cfbd5ff022..ebcb4cd9760fe 100644 --- a/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle +++ b/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index 60fb55e9a2593..2049ccb5d9cc8 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.BwcVersions -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java index 8a775c7f7d3d8..25e8aed73bda2 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java @@ -21,7 +21,6 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.test.rest.ObjectPath; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.transport.RemoteClusterPortSettings; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -118,20 +117,18 @@ public void testCreatingAndUpdatingApiKeys() throws Exception { ); RestClient client = client(); - if (isUpdateApiSupported(client)) { - var updateException = expectThrows( - Exception.class, - () -> updateOrBulkUpdateApiKey(client, apiKey.v1(), randomRoleDescriptors(true)) - ); + var updateException = expectThrows( + Exception.class, + () -> updateOrBulkUpdateApiKey(client, apiKey.v1(), randomRoleDescriptors(true)) + ); - assertThat( - updateException.getMessage(), - anyOf( - containsString("failed to parse role [my_role]. unexpected field [remote_indices]"), - containsString("remote indices not supported for API keys") - ) - ); - } + assertThat( + updateException.getMessage(), + anyOf( + containsString("failed to parse role [my_role]. unexpected field [remote_indices]"), + containsString("remote indices not supported for API keys") + ) + ); } case MIXED -> { try { @@ -145,20 +142,18 @@ public void testCreatingAndUpdatingApiKeys() throws Exception { // fail when remote_indices are provided: // against old node - if (isUpdateApiSupported(oldVersionClient)) { - Exception e = expectThrows( - Exception.class, - () -> updateOrBulkUpdateApiKey(oldVersionClient, apiKey.v1(), randomRoleDescriptors(true)) - ); - assertThat( - e.getMessage(), - anyOf( - containsString("failed to parse role [my_role]. unexpected field [remote_indices]"), - containsString("remote indices not supported for API keys") - ) - ); - } - Exception e = expectThrows(Exception.class, () -> createOrGrantApiKey(oldVersionClient, randomRoleDescriptors(true))); + Exception e = expectThrows( + Exception.class, + () -> updateOrBulkUpdateApiKey(oldVersionClient, apiKey.v1(), randomRoleDescriptors(true)) + ); + assertThat( + e.getMessage(), + anyOf( + containsString("failed to parse role [my_role]. unexpected field [remote_indices]"), + containsString("remote indices not supported for API keys") + ) + ); + e = expectThrows(Exception.class, () -> createOrGrantApiKey(oldVersionClient, randomRoleDescriptors(true))); assertThat( e.getMessage(), anyOf( @@ -263,28 +258,9 @@ private void updateOrBulkUpdateApiKey(String id, String roles) throws IOExceptio updateOrBulkUpdateApiKey(client(), id, roles); } - private boolean isUpdateApiSupported(RestClient client) { - return switch (CLUSTER_TYPE) { - case OLD -> clusterHasFeature(RestTestLegacyFeatures.SECURITY_UPDATE_API_KEY); // Update API was introduced in 8.4.0. - case MIXED -> clusterHasFeature(RestTestLegacyFeatures.SECURITY_UPDATE_API_KEY) || client == newVersionClient; - case UPGRADED -> true; - }; - } - - private boolean isBulkUpdateApiSupported(RestClient client) { - return switch (CLUSTER_TYPE) { - case OLD -> clusterHasFeature(RestTestLegacyFeatures.SECURITY_BULK_UPDATE_API_KEY); // Bulk update API was introduced in 8.5.0. - case MIXED -> clusterHasFeature(RestTestLegacyFeatures.SECURITY_BULK_UPDATE_API_KEY) || client == newVersionClient; - case UPGRADED -> true; - }; - } - private void updateOrBulkUpdateApiKey(RestClient client, String id, String roles) throws IOException { - if (false == isUpdateApiSupported(client)) { - return; // Update API is not supported. - } final Request updateApiKeyRequest; - final boolean bulkUpdate = randomBoolean() && isBulkUpdateApiSupported(client); + final boolean bulkUpdate = randomBoolean(); if (bulkUpdate) { updateApiKeyRequest = new Request("POST", "_security/api_key/_bulk_update"); updateApiKeyRequest.setJsonEntity(org.elasticsearch.common.Strings.format(""" diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java index 74165eeb07b8a..aa166311f6465 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java @@ -15,7 +15,6 @@ import org.elasticsearch.core.Strings; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; @@ -83,11 +82,7 @@ public void testMlAssignmentPlannerUpgrade() throws Exception { // assert correct memory format is used assertOldMemoryFormat("old_memory_format"); - if (clusterHasFeature(RestTestLegacyFeatures.ML_NEW_MEMORY_FORMAT)) { - assertNewMemoryFormat("new_memory_format"); - } else { - assertOldMemoryFormat("new_memory_format"); - } + assertNewMemoryFormat("new_memory_format"); } case MIXED -> { ensureHealth(".ml-inference-*,.ml-config*", (request -> { @@ -99,12 +94,7 @@ public void testMlAssignmentPlannerUpgrade() throws Exception { // assert correct memory format is used assertOldMemoryFormat("old_memory_format"); - if (clusterHasFeature(RestTestLegacyFeatures.ML_NEW_MEMORY_FORMAT)) { - assertNewMemoryFormat("new_memory_format"); - } else { - assertOldMemoryFormat("new_memory_format"); - } - + assertNewMemoryFormat("new_memory_format"); } case UPGRADED -> { ensureHealth(".ml-inference-*,.ml-config*", (request -> { @@ -137,14 +127,12 @@ private void waitForDeploymentStarted(String modelId) throws Exception { @SuppressWarnings("unchecked") private void assertOldMemoryFormat(String modelId) throws Exception { - // There was a change in the MEMORY_OVERHEAD value in 8.3.0, see #86416 - long memoryOverheadMb = clusterHasFeature(RestTestLegacyFeatures.ML_MEMORY_OVERHEAD_FIXED) ? 240 : 270; var response = getTrainedModelStats(modelId); Map map = entityAsMap(response); List> stats = (List>) map.get("trained_model_stats"); assertThat(stats, hasSize(1)); var stat = stats.get(0); - Long expectedMemoryUsage = ByteSizeValue.ofMb(memoryOverheadMb).getBytes() + RAW_MODEL_SIZE * 2; + Long expectedMemoryUsage = ByteSizeValue.ofMb(240).getBytes() + RAW_MODEL_SIZE * 2; Integer actualMemoryUsage = (Integer) XContentMapValues.extractValue("model_size_stats.required_native_memory_bytes", stat); assertThat( Strings.format("Memory usage mismatch for the model %s in cluster state %s", modelId, CLUSTER_TYPE.toString()), diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransportVersionClusterStateUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransportVersionClusterStateUpgradeIT.java deleted file mode 100644 index e864a579bd0b0..0000000000000 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransportVersionClusterStateUpgradeIT.java +++ /dev/null @@ -1,171 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.upgrades; - -import org.elasticsearch.Build; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.client.Request; -import org.elasticsearch.common.util.Maps; -import org.elasticsearch.test.rest.ObjectPath; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; - -import java.util.Map; - -import static org.elasticsearch.cluster.ClusterState.INFERRED_TRANSPORT_VERSION; -import static org.elasticsearch.cluster.ClusterState.VERSION_INTRODUCING_TRANSPORT_VERSIONS; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.everyItem; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.oneOf; - -public class TransportVersionClusterStateUpgradeIT extends AbstractUpgradeTestCase { - - public void testReadsInferredTransportVersions() throws Exception { - // waitUntil because the versions fixup on upgrade happens in the background so may need a retry - assertTrue(waitUntil(() -> { - try { - // check several responses in order to sample from a selection of nodes - for (int i = getClusterHosts().size(); i > 0; i--) { - if (runTransportVersionsTest() == false) { - return false; - } - } - return true; - } catch (Exception e) { - throw new AssertionError(e); - } - })); - } - - private boolean runTransportVersionsTest() throws Exception { - final var clusterState = ObjectPath.createFromResponse( - client().performRequest(new Request("GET", "/_cluster/state" + randomFrom("", "/nodes") + randomFrom("", "?local"))) - ); - final var description = clusterState.toString(); - - final var nodeIds = clusterState.evaluateMapKeys("nodes"); - final Map versionsByNodeId = Maps.newHashMapWithExpectedSize(nodeIds.size()); - for (final var nodeId : nodeIds) { - versionsByNodeId.put(nodeId, clusterState.evaluate("nodes." + nodeId + ".version")); - } - - final var hasTransportVersions = clusterState.evaluate("transport_versions") != null; - final var hasNodesVersions = clusterState.evaluate("nodes_versions") != null; - assertFalse(description, hasNodesVersions && hasTransportVersions); - - switch (CLUSTER_TYPE) { - case OLD -> { - if (clusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED) == false) { - // Before 8.8.0 there was only DiscoveryNode#version - assertFalse(description, hasTransportVersions); - assertFalse(description, hasNodesVersions); - } else if (clusterHasFeature(RestTestLegacyFeatures.STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION) == false) { - // In [8.8.0, 8.11.0) we exposed just transport_versions - assertTrue(description, hasTransportVersions); - assertFalse(description, hasNodesVersions); - } else { - // From 8.11.0 onwards we exposed nodes_versions - assertFalse(description, hasTransportVersions); - assertTrue(description, hasNodesVersions); - } - } - case MIXED -> { - if (clusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED) == false) { - // Responding node might be <8.8.0 (so no extra versions) or >=8.11.0 (includes nodes_versions) - assertFalse(description, hasTransportVersions); - } else if (clusterHasFeature(RestTestLegacyFeatures.STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION) == false) { - // Responding node might be in [8.8.0, 8.11.0) (transport_versions) or >=8.11.0 (includes nodes_versions) but not both - assertTrue(description, hasNodesVersions || hasTransportVersions); - } else { - // Responding node is ≥8.11.0 so has nodes_versions for sure - assertFalse(description, hasTransportVersions); - assertTrue(description, hasNodesVersions); - } - } - case UPGRADED -> { - // All nodes are Version.CURRENT, ≥8.11.0, so we definitely have nodes_versions - assertFalse(description, hasTransportVersions); - assertTrue(description, hasNodesVersions); - assertThat(description, versionsByNodeId.values(), everyItem(equalTo(Build.current().version()))); - } - } - - if (hasTransportVersions) { - // Upgrading from [8.8.0, 8.11.0) and the responding node is still on the old version - assertFalse(description, clusterHasFeature(RestTestLegacyFeatures.STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION)); - assertTrue(description, clusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED)); - assertNotEquals(description, ClusterType.UPGRADED, CLUSTER_TYPE); - - // transport_versions includes the correct version for all nodes, no inference is needed - assertEquals(description, nodeIds.size(), clusterState.evaluateArraySize("transport_versions")); - for (int i = 0; i < nodeIds.size(); i++) { - final var path = "transport_versions." + i; - final String nodeId = clusterState.evaluate(path + ".node_id"); - final var nodeDescription = nodeId + "/" + description; - final var transportVersion = TransportVersion.fromString(clusterState.evaluate(path + ".transport_version")); - final var nodeVersion = versionsByNodeId.get(nodeId); - assertNotNull(nodeDescription, nodeVersion); - if (nodeVersion.equals(Build.current().version())) { - assertEquals(nodeDescription, TransportVersion.current(), transportVersion); - } else { - // There's no relationship between node versions and transport versions anymore, although we can be sure of this: - assertThat(nodeDescription, transportVersion, greaterThanOrEqualTo(INFERRED_TRANSPORT_VERSION)); - } - } - } else if (hasNodesVersions) { - // Either upgrading from ≥8.11.0 (the responding node might be old or new), or from <8.8.0 (the responding node is new) - assertFalse( - description, - clusterHasFeature(RestTestLegacyFeatures.STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION) == false - && CLUSTER_TYPE == ClusterType.OLD - ); - - // nodes_versions includes _a_ version for all nodes; it might be correct, or it might be inferred if we're upgrading from - // <8.8.0 and the master is still an old node or the TransportVersionsFixupListener hasn't run yet - assertEquals(description, nodeIds.size(), clusterState.evaluateArraySize("nodes_versions")); - for (int i = 0; i < nodeIds.size(); i++) { - final var path = "nodes_versions." + i; - final String nodeId = clusterState.evaluate(path + ".node_id"); - final var nodeDescription = nodeId + "/" + description; - final var transportVersion = TransportVersion.fromString(clusterState.evaluate(path + ".transport_version")); - final var nodeVersion = versionsByNodeId.get(nodeId); - assertNotNull(nodeDescription, nodeVersion); - if (nodeVersion.equals(Build.current().version())) { - // Either the responding node is upgraded or the upgrade is trivial; if the responding node is upgraded but the master - // is not then its transport version may be temporarily inferred as 8.8.0 until TransportVersionsFixupListener runs. - assertThat( - nodeDescription, - transportVersion, - clusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED) - ? equalTo(TransportVersion.current()) - : oneOf(TransportVersion.current(), INFERRED_TRANSPORT_VERSION) - ); - if (CLUSTER_TYPE == ClusterType.UPGRADED && transportVersion.equals(INFERRED_TRANSPORT_VERSION)) { - // TransportVersionsFixupListener should run soon, retry - logger.info("{} - not fixed up yet, retrying", nodeDescription); - return false; - } - } else { - var version = parseLegacyVersion(nodeVersion); - // All non-semantic versions are after 8.8.0 and have transport version - var transportVersionIntroduced = version.map(v -> v.after(VERSION_INTRODUCING_TRANSPORT_VERSIONS)).orElse(true); - if (transportVersionIntroduced) { - // There's no relationship between node versions and transport versions anymore, although we can be sure of this: - assertThat(nodeDescription, transportVersion, greaterThan(INFERRED_TRANSPORT_VERSION)); - } else { - // Responding node is not upgraded, and no later than 8.8.0, so we infer its version correctly. - assertEquals(nodeDescription, TransportVersion.fromId(version.get().id()), transportVersion); - } - } - } - } - - return true; - } -} diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java index d7d2676163851..0c57baad1a09b 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java @@ -27,7 +27,6 @@ import java.util.Map; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.test.rest.RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -43,12 +42,7 @@ public class UpgradeClusterClientYamlTestSuiteIT extends ESClientYamlSuiteTestCa public void waitForTemplates() throws Exception { if (AbstractUpgradeTestCase.CLUSTER_TYPE == AbstractUpgradeTestCase.ClusterType.OLD) { try { - boolean clusterUnderstandsComposableTemplates = clusterHasFeature(COMPONENT_TEMPLATE_SUPPORTED); - XPackRestTestHelper.waitForTemplates( - client(), - XPackRestTestConstants.ML_POST_V7120_TEMPLATES, - clusterUnderstandsComposableTemplates - ); + XPackRestTestHelper.waitForTemplates(client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES); } catch (AssertionError e) { throw new AssertionError("Failure in test setup: Failed to initialize ML index templates", e); } diff --git a/x-pack/qa/smoke-test-plugins-ssl/build.gradle b/x-pack/qa/smoke-test-plugins-ssl/build.gradle index da2d095c001d4..461ebc4beb443 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/build.gradle +++ b/x-pack/qa/smoke-test-plugins-ssl/build.gradle @@ -1,6 +1,11 @@ -import org.apache.tools.ant.filters.ReplaceTokens +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ -import org.elasticsearch.gradle.internal.info.BuildParams +import org.apache.tools.ant.filters.ReplaceTokens apply plugin: 'elasticsearch.legacy-yaml-rest-test' diff --git a/x-pack/qa/smoke-test-plugins/build.gradle b/x-pack/qa/smoke-test-plugins/build.gradle index 427aa39f02e49..a51a67dd75b8a 100644 --- a/x-pack/qa/smoke-test-plugins/build.gradle +++ b/x-pack/qa/smoke-test-plugins/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.rest-resources' diff --git a/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java b/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java index 31b74b8706877..88b01defb1a86 100644 --- a/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java +++ b/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.xcontent.json.JsonXContent; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; @@ -34,32 +33,27 @@ private XPackRestTestHelper() {} * @throws InterruptedException If the wait is interrupted */ @SuppressWarnings("unchecked") - public static void waitForTemplates(RestClient client, List expectedTemplates, boolean clusterUnderstandsComposableTemplates) - throws Exception { + public static void waitForTemplates(RestClient client, List expectedTemplates) throws Exception { // TODO: legacy support can be removed once all X-Pack plugins use only composable // templates in the oldest version we test upgrades from assertBusy(() -> { Map response; - if (clusterUnderstandsComposableTemplates) { - final Request request = new Request("GET", "_index_template"); - request.addParameter("error_trace", "true"); + final Request request = new Request("GET", "_index_template"); + request.addParameter("error_trace", "true"); - String string = EntityUtils.toString(client.performRequest(request).getEntity()); - List> templateList = (List>) XContentHelper.convertToMap( - JsonXContent.jsonXContent, - string, - false - ).get("index_templates"); - response = templateList.stream().collect(Collectors.toMap(m -> (String) m.get("name"), m -> m.get("index_template"))); - } else { - response = Collections.emptyMap(); - } + String string = EntityUtils.toString(client.performRequest(request).getEntity()); + List> templateList = (List>) XContentHelper.convertToMap( + JsonXContent.jsonXContent, + string, + false + ).get("index_templates"); + response = templateList.stream().collect(Collectors.toMap(m -> (String) m.get("name"), m -> m.get("index_template"))); final Set templates = new TreeSet<>(response.keySet()); final Request legacyRequest = new Request("GET", "_template"); legacyRequest.addParameter("error_trace", "true"); - String string = EntityUtils.toString(client.performRequest(legacyRequest).getEntity()); + string = EntityUtils.toString(client.performRequest(legacyRequest).getEntity()); Map legacyResponse = XContentHelper.convertToMap(JsonXContent.jsonXContent, string, false); final Set legacyTemplates = new TreeSet<>(legacyResponse.keySet()); diff --git a/x-pack/qa/third-party/jira/build.gradle b/x-pack/qa/third-party/jira/build.gradle index 626693a8f295f..7e3d0485545a6 100644 --- a/x-pack/qa/third-party/jira/build.gradle +++ b/x-pack/qa/third-party/jira/build.gradle @@ -1,10 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import groovy.json.JsonSlurper import javax.net.ssl.HttpsURLConnection import java.nio.charset.StandardCharsets -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.legacy-yaml-rest-test' dependencies { diff --git a/x-pack/qa/third-party/pagerduty/build.gradle b/x-pack/qa/third-party/pagerduty/build.gradle index 86ed67ccbb2d6..20f7b9b654b66 100644 --- a/x-pack/qa/third-party/pagerduty/build.gradle +++ b/x-pack/qa/third-party/pagerduty/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-yaml-rest-test' diff --git a/x-pack/qa/third-party/slack/build.gradle b/x-pack/qa/third-party/slack/build.gradle index ff501a7c99c9b..54821a9d2b71a 100644 --- a/x-pack/qa/third-party/slack/build.gradle +++ b/x-pack/qa/third-party/slack/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-yaml-rest-test'