diff --git a/.ci/bwcVersions b/.ci/bwcVersions index a8be643665224..3f3200264bad8 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -51,5 +51,6 @@ BWC_VERSION: - "7.17.0" - "7.17.1" - "8.0.0" + - "8.0.1" - "8.1.0" - "8.2.0" diff --git a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc.yml b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc-snapshots.yml similarity index 69% rename from .ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc.yml rename to .ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc-snapshots.yml index ab5f17d60b933..70509792a9d8c 100644 --- a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc.yml +++ b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc-snapshots.yml @@ -1,7 +1,9 @@ --- -jjbb-template: generic-gradle-unix.yml +jjbb-template: matrix-gradle-unix.yml vars: - - job-name: elastic+elasticsearch+%BRANCH%+intake+multijob+bwc + - job-name: elastic+elasticsearch+%BRANCH%+intake+multijob+bwc-snapshots - job-display-name: "elastic / elasticsearch # %BRANCH% - intake bwc" - job-description: Elasticsearch %BRANCH% branch intake backwards compatibility checks. - - gradle-args: "-Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files bwcTestSnapshots" + - matrix-yaml-file: ".ci/snapshotBwcVersions" + - matrix-variable: BWC_VERSION + - gradle-args: "-Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files v$BWC_VERSION#bwcTest" diff --git a/.ci/jobs.t/elastic+elasticsearch+intake.yml b/.ci/jobs.t/elastic+elasticsearch+intake.yml index 6da6161c56763..645408771b70a 100644 --- a/.ci/jobs.t/elastic+elasticsearch+intake.yml +++ b/.ci/jobs.t/elastic+elasticsearch+intake.yml @@ -42,7 +42,7 @@ kill-phase-on: NEVER current-parameters: true git-revision: true - - name: elastic+elasticsearch+%BRANCH%+intake+multijob+bwc + - name: elastic+elasticsearch+%BRANCH%+intake+multijob+bwc-snapshots kill-phase-on: NEVER current-parameters: true git-revision: true diff --git a/.ci/jobs.t/elastic+elasticsearch+multijob+platform-support-arm.yml b/.ci/jobs.t/elastic+elasticsearch+multijob+platform-support-arm.yml index 2753cae719faa..a45989ae00fbf 100644 --- a/.ci/jobs.t/elastic+elasticsearch+multijob+platform-support-arm.yml +++ b/.ci/jobs.t/elastic+elasticsearch+multijob+platform-support-arm.yml @@ -14,6 +14,7 @@ type: label-expression name: os values: + - "almalinux-8-aarch64&&immutable" - "ubuntu-1804-aarch64&&immutable" builders: - inject: diff --git a/.ci/jobs.t/elastic+elasticsearch+pull-request+bwc.yml b/.ci/jobs.t/elastic+elasticsearch+pull-request+bwc-snapshots.yml similarity index 74% rename from .ci/jobs.t/elastic+elasticsearch+pull-request+bwc.yml rename to .ci/jobs.t/elastic+elasticsearch+pull-request+bwc-snapshots.yml index 606d906556c64..2194bd986a891 100644 --- a/.ci/jobs.t/elastic+elasticsearch+pull-request+bwc.yml +++ b/.ci/jobs.t/elastic+elasticsearch+pull-request+bwc-snapshots.yml @@ -1,9 +1,11 @@ --- - job: - name: "elastic+elasticsearch+pull-request+bwc" + name: "elastic+elasticsearch+pull-request+bwc-snapshots" display-name: "elastic / elasticsearch - pull request bwc" description: "Testing of Elasticsearch pull requests - bwc" - workspace: "/dev/shm/elastic+elasticsearch+pull-request+bwc" + project-type: matrix + node: master + child-workspace: "/dev/shm/elastic+elasticsearch+pull-request+bwc" scm: - git: refspec: "+refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*" @@ -23,6 +25,16 @@ black-list-labels: - '>test-mute' - 'test-full-bwc' + axes: + - axis: + type: slave + name: nodes + values: + - "general-purpose" + - axis: + type: yaml + filename: ".ci/snapshotBwcVersions" + name: "BWC_VERSION" builders: - inject: properties-file: '.ci/java-versions.properties' @@ -34,4 +46,4 @@ JAVA16_HOME=$HOME/.java/openjdk16 - shell: | #!/usr/local/bin/runbld --redirect-stderr - $WORKSPACE/.ci/scripts/run-gradle.sh -Dignore.tests.seed bwcTestSnapshots + $WORKSPACE/.ci/scripts/run-gradle.sh -Dignore.tests.seed v$BWC_VERSION#bwcTest diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions new file mode 100644 index 0000000000000..3fdb4a121405a --- /dev/null +++ b/.ci/snapshotBwcVersions @@ -0,0 +1,5 @@ +BWC_VERSION: + - "7.17.1" + - "8.0.1" + - "8.1.0" + - "8.2.0" diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000000000..1a965ee4b6eb0 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,3 @@ +# Elasticsearch Changlog + +Please see the [release notes](https://www.elastic.co/guide/en/elasticsearch/reference/current/es-release-notes.html) in the reference manual. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index fc9935ca69794..287b28c5718e1 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -112,11 +112,11 @@ Contributing to the Elasticsearch codebase **Repository:** [https://github.com/elastic/elasticsearch](https://github.com/elastic/elasticsearch) -JDK 16 is required to build Elasticsearch. You must have a JDK 16 installation +JDK 17 is required to build Elasticsearch. You must have a JDK 17 installation with the environment variable `JAVA_HOME` referencing the path to Java home for -your JDK 16 installation. By default, tests use the same runtime as `JAVA_HOME`. +your JDK 17 installation. By default, tests use the same runtime as `JAVA_HOME`. However, since Elasticsearch supports JDK 11, the build supports compiling with -JDK 16 and testing on a JDK 11 runtime; to do this, set `RUNTIME_JAVA_HOME` +JDK 17 and testing on a JDK 11 runtime; to do this, set `RUNTIME_JAVA_HOME` pointing to the Java home of a JDK 11 installation. Note that this mechanism can be used to test against other JDKs as well, this is not only limited to JDK 11. @@ -151,9 +151,9 @@ and then run `curl` in another window like this: ### Importing the project into IntelliJ IDEA The minimum IntelliJ IDEA version required to import the Elasticsearch project is 2020.1 -Elasticsearch builds using Java 16. When importing into IntelliJ you will need +Elasticsearch builds using Java 17. When importing into IntelliJ you will need to define an appropriate SDK. The convention is that **this SDK should be named -"16"** so that the project import will detect it automatically. For more details +"17"** so that the project import will detect it automatically. For more details on defining an SDK in IntelliJ please refer to [their documentation](https://www.jetbrains.com/help/idea/sdk.html#define-sdk). SDK definitions are global, so you can add the JDK from any project, or after project import. Importing with a missing JDK will still work, IntelliJ will diff --git a/benchmarks/README.md b/benchmarks/README.md index a4d238c343c1c..9e86dd49b60f6 100644 --- a/benchmarks/README.md +++ b/benchmarks/README.md @@ -5,7 +5,7 @@ This directory contains the microbenchmark suite of Elasticsearch. It relies on ## Purpose We do not want to microbenchmark everything but the kitchen sink and should typically rely on our -[macrobenchmarks](https://elasticsearch-benchmarks.elastic.co/app/kibana#/dashboard/Nightly-Benchmark-Overview) with +[macrobenchmarks](https://elasticsearch-benchmarks.elastic.co/) with [Rally](http://github.com/elastic/rally). Microbenchmarks are intended to spot performance regressions in performance-critical components. The microbenchmark suite is also handy for ad-hoc microbenchmarks but please remove them again before merging your PR. diff --git a/benchmarks/src/main/resources/org/elasticsearch/benchmark/xcontent/monitor_cluster_stats.json b/benchmarks/src/main/resources/org/elasticsearch/benchmark/xcontent/monitor_cluster_stats.json index de460d770d249..eea13dec75ffd 100644 --- a/benchmarks/src/main/resources/org/elasticsearch/benchmark/xcontent/monitor_cluster_stats.json +++ b/benchmarks/src/main/resources/org/elasticsearch/benchmark/xcontent/monitor_cluster_stats.json @@ -1233,6 +1233,11 @@ "total" : 0, "failed" : 0 } + }, + "archive" : { + "available" : false, + "enabled" : true, + "indices_count" : 0 } } } diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesLoader.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesLoader.java index 1702c03f91177..510a8df411285 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesLoader.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesLoader.java @@ -43,7 +43,6 @@ protected static void loadBuildSrcVersion(Properties loadedProps, ProviderFactor ); } String qualifier = providers.systemProperty("build.version_qualifier") - .forUseAtConfigurationTime() .getOrElse(""); if (qualifier.isEmpty() == false) { if (qualifier.matches("(alpha|beta|rc)\\d+") == false) { @@ -52,7 +51,6 @@ protected static void loadBuildSrcVersion(Properties loadedProps, ProviderFactor elasticsearch += "-" + qualifier; } final String buildSnapshotSystemProperty = providers.systemProperty("build.snapshot") - .forUseAtConfigurationTime() .getOrElse("true"); switch (buildSnapshotSystemProperty) { case "true": diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesPlugin.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesPlugin.java index 2d56e42e3d951..c4c664df46bc3 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesPlugin.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesPlugin.java @@ -32,6 +32,6 @@ public void apply(Project project) { .registerIfAbsent("versions", VersionPropertiesBuildService.class, spec -> { spec.getParameters().getInfoPath().set(infoPath); }); - project.getExtensions().add("versions", serviceProvider.forUseAtConfigurationTime().get().getProperties()); + project.getExtensions().add("versions", serviceProvider.get().getProperties()); } } diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/ParallelDetector.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/ParallelDetector.java index 7359d1728b96c..53b48c0c58935 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/ParallelDetector.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/ParallelDetector.java @@ -78,7 +78,7 @@ public static int findDefaultParallel(Project project) { } private static boolean isMac(ProviderFactory providers) { - return providers.systemProperty("os.name").forUseAtConfigurationTime().getOrElse("").startsWith("Mac"); + return providers.systemProperty("os.name").getOrElse("").startsWith("Mac"); } } diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java index 45b6b1d142963..414d6c4762e44 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java @@ -25,6 +25,7 @@ import org.gradle.api.file.FileCollection; import org.gradle.api.provider.ListProperty; import org.gradle.api.tasks.CacheableTask; +import org.gradle.api.tasks.IgnoreEmptyDirectories; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.Internal; @@ -68,6 +69,7 @@ public LicenseHeadersTask() { * constructor can write to it. */ @InputFiles + @IgnoreEmptyDirectories @SkipWhenEmpty @PathSensitive(PathSensitivity.RELATIVE) public List getJavaFiles() { diff --git a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties index 7cec6af44e192..d5190930b2f32 100644 --- a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties +++ b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionSha256Sum=c9490e938b221daf0094982288e4038deed954a3f12fb54cbf270ddf4e37d879 +distributionSha256Sum=cd5c2958a107ee7f0722004a12d0f8559b4564c34daad7df06cffd4d12a426d0 diff --git a/build-tools-internal/performance/elasticsearch-build-tool-update.scenarios b/build-tools-internal/performance/elasticsearch-build-tool-update.scenarios index 27ff2443e7371..9c7c7abdb8394 100644 --- a/build-tools-internal/performance/elasticsearch-build-tool-update.scenarios +++ b/build-tools-internal/performance/elasticsearch-build-tool-update.scenarios @@ -69,10 +69,9 @@ precommit_master { single_project_branch { title = "single project (@testGitCommit@)" - cleanup-tasks = [":server:clean"] - tasks = [":server:spotlessApply", ":server:precommit"] + tasks = [":server:precommit"] gradle-args = ["--no-scan"] - apply-abi-change-to = "server/src/main/java/org/elasticsearch/Build.java" + apply-abi-change-to = "server/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java" run-using = cli // value can be "cli" or "tooling-api" daemon = warm // value can be "warm", "cold", or "none" warm-ups = 5 @@ -87,10 +86,9 @@ single_project_branch { single_project_master { title = "single project (master)" - cleanup-tasks = [":server:clean"] - tasks = [":server:spotlessApply", ":server:precommit"] + tasks = [":server:precommit"] gradle-args = ["--no-scan"] - apply-abi-change-to = "server/src/main/java/org/elasticsearch/Build.java" + apply-abi-change-to = "server/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java" run-using = cli // value can be "cli" or "tooling-api" daemon = warm // value can be "warm", "cold", or "none" warm-ups = 5 diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/YamlRestCompatTestPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/YamlRestCompatTestPluginFuncTest.groovy index ee5ff76aa9c41..0c02b6baefe25 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/YamlRestCompatTestPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/YamlRestCompatTestPluginFuncTest.groovy @@ -30,7 +30,7 @@ class YamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTest { def "yamlRestTestVxCompatTest does nothing when there are no tests"() { given: - addSubProject(":distribution:bwc:bugfix") << """ + addSubProject(":distribution:bwc:maintenance") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -53,11 +53,11 @@ class YamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTest { result.task(transformTask).outcome == TaskOutcome.NO_SOURCE } - def "yamlRestTestVxCompatTest executes and copies api and transforms tests from :bwc:bugfix"() { + def "yamlRestTestVxCompatTest executes and copies api and transforms tests from :bwc:maintenance"() { given: internalBuild() - addSubProject(":distribution:bwc:bugfix") << """ + addSubProject(":distribution:bwc:maintenance") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -90,8 +90,8 @@ class YamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTest { String api = "foo.json" String test = "10_basic.yml" //add the compatible test and api files, these are the prior version's normal yaml rest tests - file("distribution/bwc/bugfix/checkoutDir/rest-api-spec/src/main/resources/rest-api-spec/api/" + api) << "" - file("distribution/bwc/bugfix/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/" + test) << "" + file("distribution/bwc/maintenance/checkoutDir/rest-api-spec/src/main/resources/rest-api-spec/api/" + api) << "" + file("distribution/bwc/maintenance/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/" + test) << "" when: def result = gradleRunner("yamlRestTestV${compatibleVersion}CompatTest").build() @@ -136,7 +136,7 @@ class YamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTest { def "yamlRestTestVxCompatTest is wired into check and checkRestCompat"() { given: - addSubProject(":distribution:bwc:bugfix") << """ + addSubProject(":distribution:bwc:maintenance") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -180,7 +180,7 @@ class YamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTest { given: internalBuild() - addSubProject(":distribution:bwc:bugfix") << """ + addSubProject(":distribution:bwc:maintenance") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -224,7 +224,7 @@ class YamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTest { setupRestResources([], []) - file("distribution/bwc/bugfix/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/test.yml" ) << """ + file("distribution/bwc/maintenance/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/test.yml" ) << """ "one": - do: do_.some.key_to_replace: diff --git a/build-tools-internal/src/main/groovy/elasticsearch.authenticated-testclusters.gradle b/build-tools-internal/src/main/groovy/elasticsearch.authenticated-testclusters.gradle index b52e6ec7f005c..102a838235cb1 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.authenticated-testclusters.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.authenticated-testclusters.gradle @@ -10,10 +10,8 @@ import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask def clusterCredentials = [ username: providers.systemProperty('tests.rest.cluster.username') - .forUseAtConfigurationTime() .getOrElse('test_admin'), password: providers.systemProperty('tests.rest.cluster.password') - .forUseAtConfigurationTime() .getOrElse('x-pack-test-password') ] diff --git a/build-tools-internal/src/main/groovy/elasticsearch.forbidden-dependencies.gradle b/build-tools-internal/src/main/groovy/elasticsearch.forbidden-dependencies.gradle index 2c20d79fac711..e67cb7846c791 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.forbidden-dependencies.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.forbidden-dependencies.gradle @@ -8,16 +8,15 @@ // we do not want any of these dependencies on the compilation classpath // because they could then be used within Elasticsearch -List FORBIDDEN_DEPENDENCIES = [ - 'guava' +List FORBIDDEN_DEPENDENCY_GROUPS = [ + 'com.google.guava' ] Closure checkDeps = { Configuration configuration -> configuration.resolutionStrategy.eachDependency { - String artifactName = it.target.name - if (FORBIDDEN_DEPENDENCIES.contains(artifactName)) { - throw new GradleException("Dependency '${artifactName}' on configuration '${configuration.name}' is not allowed. " + - "If it is needed as a transitive depenency, try adding it to the runtime classpath") + if (FORBIDDEN_DEPENDENCY_GROUPS.contains(it.target.group)) { + throw new GradleException("Dependency '${it.target.group}:${it.target.name}' on configuration '${configuration.name}' is not allowed. " + + "If it is needed as a transitive dependency, try adding it to the runtime classpath") } } } diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index 84b35b9a7568c..31638c9ddb1d4 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -69,7 +69,7 @@ tasks.register('configureIdeCheckstyle') { } // Applying this stuff, particularly the idea-ext plugin, has a cost so avoid it unless we're running in the IDE -if (providers.systemProperty('idea.active').forUseAtConfigurationTime().getOrNull() == 'true') { +if (providers.systemProperty('idea.active').getOrNull() == 'true') { project.apply(plugin: org.jetbrains.gradle.ext.IdeaExtPlugin) tasks.register('configureIdeaGradleJvm') { diff --git a/build-tools-internal/src/main/groovy/elasticsearch.run.gradle b/build-tools-internal/src/main/groovy/elasticsearch.run.gradle index 4eb4cdcdc32d8..b49c302e6ab99 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.run.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.run.gradle @@ -14,9 +14,9 @@ import org.elasticsearch.gradle.testclusters.RunTask // apply plugin: 'elasticsearch.internal-testclusters' testClusters.register("runTask") { - testDistribution = providers.systemProperty('run.distribution').orElse('default').forUseAtConfigurationTime().get() - if (providers.systemProperty('run.distribution').forUseAtConfigurationTime().getOrElse('default') == 'default') { - String licenseType = providers.systemProperty("run.license_type").forUseAtConfigurationTime().getOrElse("basic") + testDistribution = providers.systemProperty('run.distribution').orElse('default').get() + if (providers.systemProperty('run.distribution').getOrElse('default') == 'default') { + String licenseType = providers.systemProperty("run.license_type").getOrElse("basic") if (licenseType == 'trial') { setting 'xpack.ml.enabled', 'true' setting 'xpack.graph.enabled', 'true' diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java index 1dce3a7092d85..eda600f09004c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java @@ -84,25 +84,20 @@ public void apply(Project project) { String remoteRepo = remote.get(); // for testing only we can override the base remote url String remoteRepoUrl = providerFactory.systemProperty("testRemoteRepo") - .forUseAtConfigurationTime() .getOrElse("https://github.com/" + remoteRepo + "/elasticsearch.git"); addRemote.setCommandLine(asList("git", "remote", "add", remoteRepo, remoteRepoUrl)); }); TaskProvider fetchLatestTaskProvider = tasks.register("fetchLatest", LoggedExec.class, fetchLatest -> { - var gitFetchLatest = project.getProviders() - .systemProperty("tests.bwc.git_fetch_latest") - .forUseAtConfigurationTime() - .orElse("true") - .map(fetchProp -> { - if ("true".equals(fetchProp)) { - return true; - } - if ("false".equals(fetchProp)) { - return false; - } - throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + fetchProp + "]"); - }); + var gitFetchLatest = project.getProviders().systemProperty("tests.bwc.git_fetch_latest").orElse("true").map(fetchProp -> { + if ("true".equals(fetchProp)) { + return true; + } + if ("false".equals(fetchProp)) { + return false; + } + throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + fetchProp + "]"); + }); fetchLatest.onlyIf(t -> project.getGradle().getStartParameter().isOffline() == false && gitFetchLatest.get()); fetchLatest.dependsOn(addRemoteTaskProvider); fetchLatest.setWorkingDir(gitExtension.getCheckoutDir().get()); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java index b23fb215bcffc..53b1fec01cd8f 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java @@ -23,7 +23,6 @@ import java.io.File; import static org.elasticsearch.gradle.internal.conventions.GUtils.capitalize; -import static org.gradle.api.internal.artifacts.ArtifactAttributes.ARTIFACT_FORMAT; /** * Provides a DSL and common configurations to define different types of @@ -75,12 +74,14 @@ private void registerAndConfigureDistributionArchivesExtension(Project project) sub.getArtifacts().add(DEFAULT_CONFIGURATION_NAME, distributionArchive.getArchiveTask()); var extractedConfiguration = sub.getConfigurations().create(EXTRACTED_CONFIGURATION_NAME); extractedConfiguration.setCanBeResolved(false); - extractedConfiguration.getAttributes().attribute(ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + extractedConfiguration.getAttributes() + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); sub.getArtifacts().add(EXTRACTED_CONFIGURATION_NAME, distributionArchive.getExpandedDistTask()); // The "composite" configuration is specifically used for resolving transformed artifacts in an included build var compositeConfiguration = sub.getConfigurations().create(COMPOSITE_CONFIGURATION_NAME); compositeConfiguration.setCanBeResolved(false); - compositeConfiguration.getAttributes().attribute(ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + compositeConfiguration.getAttributes() + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); compositeConfiguration.getAttributes().attribute(Attribute.of("composite", Boolean.class), true); sub.getArtifacts().add(COMPOSITE_CONFIGURATION_NAME, distributionArchive.getArchiveTask()); sub.getTasks().register("extractedAssemble", task -> diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java index 1d0eff0b3aa70..cc8348d424e58 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java @@ -8,6 +8,7 @@ package org.elasticsearch.gradle.internal; +import org.elasticsearch.gradle.VersionProperties; import org.elasticsearch.gradle.internal.info.BuildParams; import org.elasticsearch.gradle.testclusters.TestClustersPlugin; import org.gradle.api.Plugin; @@ -31,6 +32,10 @@ public void apply(Project project) { project.getRootProject().getPluginManager().apply(InternalReaperPlugin.class); TestClustersPlugin testClustersPlugin = project.getPlugins().apply(TestClustersPlugin.class); testClustersPlugin.setRuntimeJava(providerFactory.provider(() -> BuildParams.getRuntimeJavaHome())); + testClustersPlugin.setIsReleasedVersion( + version -> (version.equals(VersionProperties.getElasticsearchVersion()) && BuildParams.isSnapshotBuild() == false) + || BuildParams.getBwcVersions().unreleasedInfo(version) == null + ); } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java index a1008babb3987..2bc84b36c8fa1 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java @@ -19,7 +19,6 @@ import org.gradle.api.artifacts.repositories.IvyArtifactRepository; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; import org.gradle.api.attributes.Attribute; -import org.gradle.api.internal.artifacts.ArtifactAttributes; import java.util.Arrays; @@ -40,10 +39,10 @@ public void apply(Project project) { project.getDependencies().getArtifactTypes().maybeCreate(ArtifactTypeDefinition.ZIP_TYPE); project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { transformSpec.getFrom() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE) .attribute(jdkAttribute, true); transformSpec.getTo() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE) .attribute(jdkAttribute, true); transformSpec.parameters(parameters -> parameters.setTrimmedPrefixPattern(JDK_TRIMMED_PREFIX)); }); @@ -51,10 +50,10 @@ public void apply(Project project) { ArtifactTypeDefinition tarArtifactTypeDefinition = project.getDependencies().getArtifactTypes().maybeCreate("tar.gz"); project.getDependencies().registerTransform(SymbolicLinkPreservingUntarTransform.class, transformSpec -> { transformSpec.getFrom() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, tarArtifactTypeDefinition.getName()) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, tarArtifactTypeDefinition.getName()) .attribute(jdkAttribute, true); transformSpec.getTo() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE) .attribute(jdkAttribute, true); transformSpec.parameters(parameters -> { parameters.setTrimmedPrefixPattern(JDK_TRIMMED_PREFIX); @@ -65,7 +64,7 @@ public void apply(Project project) { NamedDomainObjectContainer jdksContainer = project.container(Jdk.class, name -> { Configuration configuration = project.getConfigurations().create("jdk_" + name); configuration.setCanBeConsumed(false); - configuration.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + configuration.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); configuration.getAttributes().attribute(jdkAttribute, true); Jdk jdk = new Jdk(name, configuration, project.getObjects()); configuration.defaultDependencies(dependencies -> { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java index f8c9e2a86261c..be44f003bf036 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java @@ -269,7 +269,7 @@ private File findRuntimeJavaHome() { } private String findJavaHome(String version) { - Provider javaHomeNames = providers.gradleProperty("org.gradle.java.installations.fromEnv").forUseAtConfigurationTime(); + Provider javaHomeNames = providers.gradleProperty("org.gradle.java.installations.fromEnv"); String javaHomeEnvVar = getJavaHomeEnvVarName(version); // Provide a useful error if we're looking for a Java home version that we haven't told Gradle about yet diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/CheckForbiddenApisTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/CheckForbiddenApisTask.java new file mode 100644 index 0000000000000..e158dd7c755c9 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/CheckForbiddenApisTask.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.precommit; + +import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis; + +import org.gradle.api.file.FileTree; +import org.gradle.api.tasks.IgnoreEmptyDirectories; + +/** + * This implementation is used to fix gradle 8 compatibility of + * the CheckForbiddenApis task which is built with gradle 4 support + * in mind. + * */ +public class CheckForbiddenApisTask extends CheckForbiddenApis { + + /** + * Add additional annotation to make this input gradle 8 compliant. + * Otherwise we see a deprecation warning here starting with gradle 7.4 + * */ + @Override + @IgnoreEmptyDirectories + public FileTree getClassFiles() { + return super.getClassFiles(); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTask.java index ee3e58fd0552e..bc53358d0a507 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTask.java @@ -14,6 +14,7 @@ import org.gradle.api.file.FileTree; import org.gradle.api.file.ProjectLayout; import org.gradle.api.provider.ListProperty; +import org.gradle.api.tasks.IgnoreEmptyDirectories; import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.Internal; import org.gradle.api.tasks.OutputFile; @@ -75,6 +76,7 @@ private static boolean isExecutableFile(File file) { * Returns the files this task will check */ @InputFiles + @IgnoreEmptyDirectories @SkipWhenEmpty public FileCollection getFiles() { return getSources().get() diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java index 10efa35695cd4..99c36ebabcd1c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java @@ -8,19 +8,19 @@ package org.elasticsearch.gradle.internal.precommit; -import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis; -import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin; +import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApisExtension; import groovy.lang.Closure; import org.elasticsearch.gradle.internal.ExportElasticsearchBuildResourcesTask; import org.elasticsearch.gradle.internal.InternalPlugin; import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitPlugin; import org.elasticsearch.gradle.internal.info.BuildParams; -import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.Project; import org.gradle.api.Task; import org.gradle.api.plugins.ExtraPropertiesExtension; -import org.gradle.api.tasks.SourceSet; +import org.gradle.api.plugins.JavaBasePlugin; +import org.gradle.api.plugins.JavaPluginExtension; +import org.gradle.api.specs.Specs; import org.gradle.api.tasks.SourceSetContainer; import org.gradle.api.tasks.TaskProvider; @@ -29,10 +29,24 @@ import java.util.List; import java.util.Set; +import static de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin.FORBIDDEN_APIS_EXTENSION_NAME; +import static de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin.FORBIDDEN_APIS_TASK_NAME; + public class ForbiddenApisPrecommitPlugin extends PrecommitPlugin implements InternalPlugin { @Override public TaskProvider createTask(Project project) { - project.getPluginManager().apply(ForbiddenApisPlugin.class); + project.getPluginManager().apply(JavaBasePlugin.class); + + // create Extension for defaults: + var checkForbiddenApisExtension = project.getExtensions() + .create(FORBIDDEN_APIS_EXTENSION_NAME, CheckForbiddenApisExtension.class, project); + + // Create a convenience task for all checks (this does not conflict with extension, as it has higher priority in DSL): + var forbiddenTask = project.getTasks() + .register(FORBIDDEN_APIS_TASK_NAME, task -> { task.setDescription("Runs forbidden-apis checks."); }); + + JavaPluginExtension javaPluginExtension = project.getExtensions().getByType(JavaPluginExtension.class); + // Define our tasks (one for each SourceSet): TaskProvider resourcesTask = project.getTasks() .register("forbiddenApisResources", ExportElasticsearchBuildResourcesTask.class); @@ -47,76 +61,69 @@ public TaskProvider createTask(Project project) { t.copy("forbidden/es-server-signatures.txt"); t.copy("forbidden/snakeyaml-signatures.txt"); }); - project.getTasks().withType(CheckForbiddenApis.class).configureEach(t -> { - t.dependsOn(resourcesTask); - assert t.getName().startsWith(ForbiddenApisPlugin.FORBIDDEN_APIS_TASK_NAME); - String sourceSetName; - if (ForbiddenApisPlugin.FORBIDDEN_APIS_TASK_NAME.equals(t.getName())) { - sourceSetName = "main"; - } else { - // parse out the sourceSetName - char[] chars = t.getName().substring(ForbiddenApisPlugin.FORBIDDEN_APIS_TASK_NAME.length()).toCharArray(); - chars[0] = Character.toLowerCase(chars[0]); - sourceSetName = new String(chars); - } - - SourceSetContainer sourceSets = GradleUtils.getJavaSourceSets(project); - SourceSet sourceSet = sourceSets.getByName(sourceSetName); - t.setClasspath(project.files(sourceSet.getRuntimeClasspath()).plus(sourceSet.getCompileClasspath())); - - t.setTargetCompatibility(BuildParams.getMinimumRuntimeVersion().getMajorVersion()); - t.setBundledSignatures(Set.of("jdk-unsafe", "jdk-non-portable", "jdk-system-out")); - t.setSignaturesFiles( - project.files( - resourcesDir.resolve("forbidden/jdk-signatures.txt"), - resourcesDir.resolve("forbidden/es-all-signatures.txt"), - resourcesDir.resolve("forbidden/jdk-deprecated.txt") - ) - ); - t.setSuppressAnnotations(Set.of("**.SuppressForbidden")); - if (t.getName().endsWith("Test")) { + project.getExtensions().getByType(SourceSetContainer.class).configureEach(sourceSet -> { + String sourceSetTaskName = sourceSet.getTaskName(FORBIDDEN_APIS_TASK_NAME, null); + var sourceSetTask = project.getTasks().register(sourceSetTaskName, CheckForbiddenApisTask.class, t -> { + t.setDescription("Runs forbidden-apis checks on '${sourceSet.name}' classes."); + t.dependsOn(sourceSet.getOutput()); + t.getOutputs().upToDateWhen(Specs.SATISFIES_ALL); + t.setClassesDirs(sourceSet.getOutput().getClassesDirs()); + t.dependsOn(resourcesTask); + t.setClasspath(project.files(sourceSet.getRuntimeClasspath()).plus(sourceSet.getCompileClasspath())); + t.setTargetCompatibility(BuildParams.getMinimumRuntimeVersion().getMajorVersion()); + t.setBundledSignatures(Set.of("jdk-unsafe", "jdk-non-portable", "jdk-system-out")); t.setSignaturesFiles( - t.getSignaturesFiles() - .plus( - project.files( - resourcesDir.resolve("forbidden/es-test-signatures.txt"), - resourcesDir.resolve("forbidden/http-signatures.txt") - ) - ) + project.files( + resourcesDir.resolve("forbidden/jdk-signatures.txt"), + resourcesDir.resolve("forbidden/es-all-signatures.txt"), + resourcesDir.resolve("forbidden/jdk-deprecated.txt") + ) ); - } else { - t.setSignaturesFiles( - t.getSignaturesFiles().plus(project.files(resourcesDir.resolve("forbidden/es-server-signatures.txt"))) - ); - } - ExtraPropertiesExtension ext = t.getExtensions().getExtraProperties(); - ext.set("replaceSignatureFiles", new Closure(t) { - @Override - public Void call(Object... names) { - List resources = new ArrayList<>(names.length); - for (Object name : names) { - resources.add(resourcesDir.resolve("forbidden/" + name + ".txt")); - } - t.setSignaturesFiles(project.files(resources)); - return null; + t.setSuppressAnnotations(Set.of("**.SuppressForbidden")); + if (t.getName().endsWith("Test")) { + t.setSignaturesFiles( + t.getSignaturesFiles() + .plus( + project.files( + resourcesDir.resolve("forbidden/es-test-signatures.txt"), + resourcesDir.resolve("forbidden/http-signatures.txt") + ) + ) + ); + } else { + t.setSignaturesFiles( + t.getSignaturesFiles().plus(project.files(resourcesDir.resolve("forbidden/es-server-signatures.txt"))) + ); } + ExtraPropertiesExtension ext = t.getExtensions().getExtraProperties(); + ext.set("replaceSignatureFiles", new Closure(t) { + @Override + public Void call(Object... names) { + List resources = new ArrayList<>(names.length); + for (Object name : names) { + resources.add(resourcesDir.resolve("forbidden/" + name + ".txt")); + } + t.setSignaturesFiles(project.files(resources)); + return null; + } - }); - ext.set("addSignatureFiles", new Closure(t) { - @Override - public Void call(Object... names) { - List resources = new ArrayList<>(names.length); - for (Object name : names) { - resources.add(resourcesDir.resolve("forbidden/" + name + ".txt")); + }); + ext.set("addSignatureFiles", new Closure(t) { + @Override + public Void call(Object... names) { + List resources = new ArrayList<>(names.length); + for (Object name : names) { + resources.add(resourcesDir.resolve("forbidden/" + name + ".txt")); + } + t.setSignaturesFiles(t.getSignaturesFiles().plus(project.files(resources))); + return null; } - t.setSignaturesFiles(t.getSignaturesFiles().plus(project.files(resources))); - return null; - } + }); + }); + forbiddenTask.configure(t -> t.dependsOn(sourceSetTask)); }); - TaskProvider forbiddenApis = project.getTasks().named("forbiddenApis"); - forbiddenApis.configure(t -> t.setGroup("")); - return forbiddenApis; + return forbiddenTask; } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenPatternsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenPatternsTask.java index e4d732149e0eb..32059eefb8683 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenPatternsTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenPatternsTask.java @@ -16,6 +16,7 @@ import org.gradle.api.provider.ListProperty; import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; +import org.gradle.api.tasks.IgnoreEmptyDirectories; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.Internal; @@ -90,6 +91,7 @@ public ForbiddenPatternsTask(ProjectLayout projectLayout) { } @InputFiles + @IgnoreEmptyDirectories @PathSensitive(PathSensitivity.RELATIVE) @SkipWhenEmpty public FileCollection getFiles() { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/RestCompatTestTransformTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/RestCompatTestTransformTask.java index 8d03ff609d685..bfb53c23b5f1c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/RestCompatTestTransformTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/RestCompatTestTransformTask.java @@ -44,6 +44,7 @@ import org.gradle.api.file.FileSystemOperations; import org.gradle.api.file.FileTree; import org.gradle.api.model.ObjectFactory; +import org.gradle.api.tasks.IgnoreEmptyDirectories; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.Internal; @@ -417,6 +418,7 @@ public DirectoryProperty getOutputDirectory() { } @SkipWhenEmpty + @IgnoreEmptyDirectories @InputFiles public FileTree getTestFiles() { return sourceDirectory.getAsFileTree().matching(testPatternSet); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java index e12ec5836c1d7..f20abf6018777 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java @@ -81,7 +81,8 @@ public void apply(Project project) { // copy compatible rest specs Configuration bwcMinorConfig = project.getConfigurations().create(BWC_MINOR_CONFIG_NAME); - Dependency bwcMinor = project.getDependencies().project(Map.of("path", ":distribution:bwc:bugfix", "configuration", "checkout")); + Dependency bwcMinor = project.getDependencies() + .project(Map.of("path", ":distribution:bwc:maintenance", "configuration", "checkout")); project.getDependencies().add(bwcMinorConfig.getName(), bwcMinor); Provider copyCompatYamlSpecTask = project.getTasks() diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java index 16e7328ea98ff..0c916951bcd1c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java @@ -35,7 +35,6 @@ import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.dsl.DependencyHandler; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; -import org.gradle.api.internal.artifacts.ArtifactAttributes; import org.gradle.api.plugins.JavaBasePlugin; import org.gradle.api.provider.Provider; import org.gradle.api.specs.Specs; @@ -313,7 +312,7 @@ private static Object convertPath( private static Configuration configureExamplePlugin(Project project) { Configuration examplePlugin = project.getConfigurations().create(EXAMPLE_PLUGIN_CONFIGURATION); - examplePlugin.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE); + examplePlugin.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE); DependencyHandler deps = project.getDependencies(); deps.add(EXAMPLE_PLUGIN_CONFIGURATION, deps.project(Map.of("path", ":plugins:analysis-icu", "configuration", "zip"))); return examplePlugin; diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/RestTestBasePlugin.java index 1d5d0078e771a..20cf4328e4c84 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/RestTestBasePlugin.java @@ -78,6 +78,6 @@ public void apply(Project project) { } private String systemProperty(String propName) { - return providerFactory.systemProperty(propName).forUseAtConfigurationTime().getOrNull(); + return providerFactory.systemProperty(propName).getOrNull(); } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestApiTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestApiTask.java index e6a4a0a7a1397..5c00e0428c9b7 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestApiTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestApiTask.java @@ -15,6 +15,7 @@ import org.gradle.api.file.ProjectLayout; import org.gradle.api.model.ObjectFactory; import org.gradle.api.provider.ListProperty; +import org.gradle.api.tasks.IgnoreEmptyDirectories; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.Internal; @@ -86,6 +87,7 @@ public boolean isSkipHasRestTestCheck() { } @SkipWhenEmpty + @IgnoreEmptyDirectories @InputFiles public FileTree getInputDir() { FileTree coreFileTree = null; diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestTestsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestTestsTask.java index 4513c64d91183..5cc68f8e73d45 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestTestsTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestTestsTask.java @@ -16,6 +16,7 @@ import org.gradle.api.file.ProjectLayout; import org.gradle.api.model.ObjectFactory; import org.gradle.api.provider.ListProperty; +import org.gradle.api.tasks.IgnoreEmptyDirectories; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.Optional; @@ -98,6 +99,7 @@ public Map getSubstitutions() { } @SkipWhenEmpty + @IgnoreEmptyDirectories @InputFiles public FileTree getInputDir() { FileTree coreFileTree = null; diff --git a/build-tools-internal/src/main/resources/minimumGradleVersion b/build-tools-internal/src/main/resources/minimumGradleVersion index c6db724bfd030..9904c66141eab 100644 --- a/build-tools-internal/src/main/resources/minimumGradleVersion +++ b/build-tools-internal/src/main/resources/minimumGradleVersion @@ -1 +1 @@ -7.3.3 \ No newline at end of file +7.4 \ No newline at end of file diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java index 9de5d161116f0..d08dc469e5ba5 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java @@ -18,7 +18,6 @@ import org.gradle.api.artifacts.dsl.DependencyHandler; import org.gradle.api.artifacts.repositories.IvyArtifactRepository; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; -import org.gradle.api.internal.artifacts.ArtifactAttributes; import org.gradle.api.model.ObjectFactory; import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; @@ -61,14 +60,14 @@ public void setDockerAvailability(Provider dockerAvailability) { @Override public void apply(Project project) { project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { - transformSpec.getFrom().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE); - transformSpec.getTo().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + transformSpec.getFrom().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE); + transformSpec.getTo().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); }); ArtifactTypeDefinition tarArtifactTypeDefinition = project.getDependencies().getArtifactTypes().maybeCreate("tar.gz"); project.getDependencies().registerTransform(SymbolicLinkPreservingUntarTransform.class, transformSpec -> { - transformSpec.getFrom().attribute(ArtifactAttributes.ARTIFACT_FORMAT, tarArtifactTypeDefinition.getName()); - transformSpec.getTo().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + transformSpec.getFrom().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, tarArtifactTypeDefinition.getName()); + transformSpec.getTo().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); }); setupResolutionsContainer(project); @@ -80,7 +79,8 @@ private void setupDistributionContainer(Project project, Property docke distributionsContainer = project.container(ElasticsearchDistribution.class, name -> { Configuration fileConfiguration = project.getConfigurations().create("es_distro_file_" + name); Configuration extractedConfiguration = project.getConfigurations().create(DISTRO_EXTRACTED_CONFIG_PREFIX + name); - extractedConfiguration.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + extractedConfiguration.getAttributes() + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); return new ElasticsearchDistribution( name, project.getObjects(), diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/Version.java b/build-tools/src/main/java/org/elasticsearch/gradle/Version.java index dfa8be295a8f7..9367b38548b6c 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/Version.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/Version.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.gradle; +import java.io.Serializable; import java.util.Objects; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -14,7 +15,7 @@ /** * Encapsulates comparison and printing logic for an x.y.z version. */ -public final class Version implements Comparable { +public final class Version implements Comparable, Serializable { private final int major; private final int minor; private final int revision; diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java index 2dedd25c007f5..938f5e8c8ad25 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java @@ -32,7 +32,6 @@ import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; import org.gradle.api.file.RegularFile; -import org.gradle.api.internal.artifacts.ArtifactAttributes; import org.gradle.api.plugins.BasePlugin; import org.gradle.api.plugins.JavaPlugin; import org.gradle.api.plugins.JavaPluginExtension; @@ -236,7 +235,7 @@ public Object doCall() { // also make the zip available as a configuration (used when depending on this project) Configuration configuration = project.getConfigurations().create("zip"); - configuration.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE); + configuration.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE); project.getArtifacts().add("zip", bundle); return bundle; diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java index 59144576333f2..8e86973826830 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java @@ -24,7 +24,6 @@ import org.gradle.api.artifacts.dsl.DependencyHandler; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; import org.gradle.api.attributes.Attribute; -import org.gradle.api.internal.artifacts.ArtifactAttributes; import org.gradle.api.plugins.JavaBasePlugin; import org.gradle.api.tasks.Copy; import org.gradle.api.tasks.SourceSet; @@ -53,16 +52,16 @@ public void apply(Project project) { project.getDependencies().getArtifactTypes().maybeCreate(ArtifactTypeDefinition.JAR_TYPE); project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { transformSpec.getFrom() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.JAR_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.JAR_TYPE) .attribute(restAttribute, true); transformSpec.getTo() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE) .attribute(restAttribute, true); }); ConfigurationContainer configurations = project.getConfigurations(); Configuration restTestSpecs = configurations.create(REST_TEST_SPECS_CONFIGURATION_NAME); - restTestSpecs.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + restTestSpecs.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); restTestSpecs.getAttributes().attribute(restAttribute, true); TaskProvider copyRestTestSpecs = project.getTasks().register("copyRestTestSpecs", Copy.class, t -> { diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java index 5d76824a81a0c..e956895f34bce 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java @@ -10,6 +10,7 @@ import org.elasticsearch.gradle.FileSupplier; import org.elasticsearch.gradle.PropertyNormalization; import org.elasticsearch.gradle.ReaperService; +import org.elasticsearch.gradle.Version; import org.gradle.api.Named; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Project; @@ -61,6 +62,7 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named { private final ArchiveOperations archiveOperations; private final ExecOperations execOperations; private final Provider runtimeJava; + private final Function isReleasedVersion; private int nodeIndex = 0; public ElasticsearchCluster( @@ -73,7 +75,8 @@ public ElasticsearchCluster( ExecOperations execOperations, FileOperations fileOperations, File workingDirBase, - Provider runtimeJava + Provider runtimeJava, + Function isReleasedVersion ) { this.path = path; this.clusterName = clusterName; @@ -85,6 +88,7 @@ public ElasticsearchCluster( this.fileOperations = fileOperations; this.workingDirBase = workingDirBase; this.runtimeJava = runtimeJava; + this.isReleasedVersion = isReleasedVersion; this.nodes = project.container(ElasticsearchNode.class); this.nodes.add( new ElasticsearchNode( @@ -98,7 +102,8 @@ public ElasticsearchCluster( execOperations, fileOperations, workingDirBase, - runtimeJava + runtimeJava, + isReleasedVersion ) ); @@ -131,7 +136,8 @@ public void setNumberOfNodes(int numberOfNodes) { execOperations, fileOperations, workingDirBase, - runtimeJava + runtimeJava, + isReleasedVersion ) ); } @@ -401,6 +407,16 @@ public void rolesFile(File rolesYml) { nodes.all(node -> node.rolesFile(rolesYml)); } + @Override + public void requiresFeature(String feature, Version from) { + nodes.all(node -> node.requiresFeature(feature, from)); + } + + @Override + public void requiresFeature(String feature, Version from, Version until) { + nodes.all(node -> node.requiresFeature(feature, from, until)); + } + private void writeUnicastHostsFiles() { String unicastUris = nodes.stream().flatMap(node -> node.getAllTransportPortURI().stream()).collect(Collectors.joining("\n")); nodes.forEach(node -> { diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index 9704fce7b929d..a47807ae6d326 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -37,7 +37,6 @@ import org.gradle.api.file.FileSystemOperations; import org.gradle.api.file.FileTree; import org.gradle.api.file.RegularFile; -import org.gradle.api.internal.artifacts.ArtifactAttributes; import org.gradle.api.internal.file.FileOperations; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; @@ -148,6 +147,7 @@ public class ElasticsearchNode implements TestClusterConfiguration { private final LazyPropertyList extraJarConfigurations = new LazyPropertyList<>("Extra jar files", this); private final List> credentials = new ArrayList<>(); private final List roleFiles = new ArrayList<>(); + private final List featureFlags = new ArrayList<>(); final LinkedHashMap defaultConfig = new LinkedHashMap<>(); private final Path confPathRepo; @@ -159,6 +159,7 @@ public class ElasticsearchNode implements TestClusterConfiguration { private final Path esStdinFile; private final Path tmpDir; private final Provider runtimeJava; + private final Function isReleasedVersion; private int currentDistro = 0; private TestDistribution testDistribution; @@ -185,7 +186,8 @@ public class ElasticsearchNode implements TestClusterConfiguration { ExecOperations execOperations, FileOperations fileOperations, File workingDirBase, - Provider runtimeJava + Provider runtimeJava, + Function isReleasedVersion ) { this.clusterName = clusterName; this.path = path; @@ -197,6 +199,7 @@ public class ElasticsearchNode implements TestClusterConfiguration { this.execOperations = execOperations; this.fileOperations = fileOperations; this.runtimeJava = runtimeJava; + this.isReleasedVersion = isReleasedVersion; workingDir = workingDirBase.toPath().resolve(safeName(name)).toAbsolutePath(); confPathRepo = workingDir.resolve("repo"); configFile = workingDir.resolve("config/elasticsearch.yml"); @@ -339,7 +342,7 @@ public void module(Provider module) { private void registerExtractedConfig(Provider pluginProvider) { Dependency pluginDependency = this.project.getDependencies().create(project.files(pluginProvider)); Configuration extractedConfig = project.getConfigurations().detachedConfiguration(pluginDependency); - extractedConfig.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + extractedConfig.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); extractedConfig.getAttributes().attribute(bundleAttribute, true); pluginAndModuleConfiguration.from(extractedConfig); } @@ -349,10 +352,10 @@ private void configureArtifactTransforms() { project.getDependencies().getArtifactTypes().maybeCreate(ArtifactTypeDefinition.ZIP_TYPE); project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { transformSpec.getFrom() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE) .attribute(bundleAttribute, true); transformSpec.getTo() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE) .attribute(bundleAttribute, true); transformSpec.getParameters().setAsFiletreeOutput(true); }); @@ -773,6 +776,16 @@ public void rolesFile(File rolesYml) { roleFiles.add(rolesYml); } + @Override + public void requiresFeature(String feature, Version from) { + featureFlags.add(new FeatureFlag(feature, from, null)); + } + + @Override + public void requiresFeature(String feature, Version from, Version until) { + featureFlags.add(new FeatureFlag(feature, from, until)); + } + private void runElasticsearchBinScriptWithInput(String input, String tool, CharSequence... args) { if (Files.exists(getDistroDir().resolve("bin").resolve(tool)) == false && Files.exists(getDistroDir().resolve("bin").resolve(tool + ".bat")) == false) { @@ -820,19 +833,30 @@ private Map getESEnvironment() { defaultEnv.put("ES_PATH_CONF", configFile.getParent().toString()); String systemPropertiesString = ""; if (systemProperties.isEmpty() == false) { - systemPropertiesString = " " - + systemProperties.entrySet() - .stream() - .map(entry -> "-D" + entry.getKey() + "=" + entry.getValue()) - // ES_PATH_CONF is also set as an environment variable and for a reference to ${ES_PATH_CONF} - // to work ES_JAVA_OPTS, we need to make sure that ES_PATH_CONF before ES_JAVA_OPTS. Instead, - // we replace the reference with the actual value in other environment variables - .map(p -> p.replace("${ES_PATH_CONF}", configFile.getParent().toString())) - .collect(Collectors.joining(" ")); + systemPropertiesString = " " + systemProperties.entrySet().stream().peek(entry -> { + if (entry.getKey().contains("feature_flag")) { + throw new TestClustersException("Invalid system property `" + entry.getKey() + "`. Use `requiresFeature` instead."); + } + }) + .map(entry -> "-D" + entry.getKey() + "=" + entry.getValue()) + // ES_PATH_CONF is also set as an environment variable and for a reference to ${ES_PATH_CONF} + // to work ES_JAVA_OPTS, we need to make sure that ES_PATH_CONF before ES_JAVA_OPTS. Instead, + // we replace the reference with the actual value in other environment variables + .map(p -> p.replace("${ES_PATH_CONF}", configFile.getParent().toString())) + .collect(Collectors.joining(" ")); } if (systemProperties.containsKey("io.netty.leakDetection.level") == false) { systemPropertiesString = systemPropertiesString + " -Dio.netty.leakDetection.level=paranoid"; } + + String featureFlagsString = ""; + if (featureFlags.isEmpty() == false && isReleasedVersion.apply(getVersion())) { + featureFlagsString = featureFlags.stream() + .filter(f -> getVersion().onOrAfter(f.getFrom()) && (f.getUntil() == null || getVersion().before(f.getUntil()))) + .map(f -> "-D" + f.getFeature() + "=true") + .collect(Collectors.joining(" ")); + } + String jvmArgsString = ""; if (jvmArgs.isEmpty() == false) { jvmArgsString = " " + jvmArgs.stream().peek(argument -> { @@ -846,8 +870,19 @@ private Map getESEnvironment() { String heapSize = System.getProperty("tests.heap.size", "512m"); defaultEnv.put( "ES_JAVA_OPTS", - "-Xms" + heapSize + " -Xmx" + heapSize + " -ea -esa " + systemPropertiesString + " " + jvmArgsString + " " + - // Support passing in additional JVM arguments + "-Xms" + + heapSize + + " -Xmx" + + heapSize + + " -ea -esa " + + systemPropertiesString + + " " + + featureFlagsString + + " " + + jvmArgsString + + " " + + + // Support passing in additional JVM arguments System.getProperty("tests.jvm.argline", "") ); defaultEnv.put("ES_TMPDIR", tmpDir.toString()); @@ -1466,6 +1501,11 @@ public List getExtraConfigFiles() { return extraConfigFiles.getNormalizedCollection(); } + @Nested + public List getFeatureFlags() { + return featureFlags; + } + @Override @Internal public boolean isProcessAlive() { @@ -1599,6 +1639,34 @@ public CharSequence[] getArgs() { } } + private static class FeatureFlag { + private final String feature; + private final Version from; + private final Version until; + + public FeatureFlag(String feature, Version from, Version until) { + this.feature = feature; + this.from = from; + this.until = until; + } + + @Input + public String getFeature() { + return feature; + } + + @Input + public Version getFrom() { + return from; + } + + @Input + @Optional + public Version getUntil() { + return until; + } + } + private static class LinkCreationException extends UncheckedIOException { LinkCreationException(String message, IOException cause) { super(message, cause); diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterConfiguration.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterConfiguration.java index 1d4a377cb302d..126890629e60b 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterConfiguration.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterConfiguration.java @@ -9,6 +9,7 @@ import org.elasticsearch.gradle.FileSupplier; import org.elasticsearch.gradle.PropertyNormalization; +import org.elasticsearch.gradle.Version; import org.gradle.api.file.FileCollection; import org.gradle.api.file.RegularFile; import org.gradle.api.logging.Logging; @@ -97,6 +98,10 @@ public interface TestClusterConfiguration { void rolesFile(File rolesYml); + void requiresFeature(String feature, Version from); + + void requiresFeature(String feature, Version from, Version until); + String getHttpSocketURI(); String getTransportPortURI(); diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java index a46453efbc942..2253498b47f72 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java @@ -10,6 +10,7 @@ import org.elasticsearch.gradle.DistributionDownloadPlugin; import org.elasticsearch.gradle.ReaperPlugin; import org.elasticsearch.gradle.ReaperService; +import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Plugin; @@ -30,6 +31,7 @@ import org.gradle.process.ExecOperations; import java.io.File; +import java.util.function.Function; import javax.inject.Inject; @@ -45,6 +47,7 @@ public class TestClustersPlugin implements Plugin { private static final Logger logger = Logging.getLogger(TestClustersPlugin.class); private final ProviderFactory providerFactory; private Provider runtimeJavaProvider; + private Function isReleasedVersion = v -> true; @Inject protected FileSystemOperations getFileSystemOperations() { @@ -75,6 +78,10 @@ public void setRuntimeJava(Provider runtimeJava) { this.runtimeJavaProvider = runtimeJava; } + public void setIsReleasedVersion(Function isReleasedVersion) { + this.isReleasedVersion = isReleasedVersion; + } + @Override public void apply(Project project) { project.getPlugins().apply(DistributionDownloadPlugin.class); @@ -124,7 +131,8 @@ private NamedDomainObjectContainer createTestClustersConta getExecOperations(), getFileOperations(), new File(project.getBuildDir(), "testclusters"), - runtimeJavaProvider + runtimeJavaProvider, + isReleasedVersion ); }); project.getExtensions().add(EXTENSION_NAME, container); diff --git a/build.gradle b/build.gradle index 978d2fefee794..d62c6358e4cd4 100644 --- a/build.gradle +++ b/build.gradle @@ -68,17 +68,28 @@ ext.testArtifact = { p, String name = "test" -> } tasks.register("updateCIBwcVersions") { - doLast { - File yml = file(".ci/bwcVersions") - yml.text = "" - yml << "BWC_VERSION:\n" - BuildParams.bwcVersions.indexCompatible.each { - yml << " - \"$it\"\n" + def writeVersions = { File file, List versions -> + file.text = "" + file << "BWC_VERSION:\n" + versions.each { + file << " - \"$it\"\n" } } + doLast { + writeVersions(file(".ci/bwcVersions"), BuildParams.bwcVersions.indexCompatible) + writeVersions(file(".ci/snapshotBwcVersions"), BuildParams.bwcVersions.unreleasedIndexCompatible) + } } tasks.register("verifyVersions") { + def verifyCiYaml = { File file, List versions -> + String ciYml = file.text + versions.each { + if (ciYml.contains("\"$it\"\n") == false) { + throw new Exception("${file} is outdated, run `./gradlew updateCIBwcVersions` and check in the results") + } + } + } doLast { if (gradle.startParameter.isOffline()) { throw new GradleException("Must run in online mode to verify versions") @@ -94,12 +105,8 @@ tasks.register("verifyVersions") { .collect { Version.fromString(it) } ) } - String ciYml = file(".ci/bwcVersions").text - BuildParams.bwcVersions.indexCompatible.each { - if (ciYml.contains("\"$it\"\n") == false) { - throw new Exception(".ci/bwcVersions is outdated, run `./gradlew updateCIBwcVersions` and check in the results"); - } - } + verifyCiYaml(file(".ci/bwcVersions"), BuildParams.bwcVersions.indexCompatible) + verifyCiYaml(file(".ci/snapshotBwcVersions"), BuildParams.bwcVersions.unreleasedIndexCompatible) // Make sure backport bot config file is up to date JsonNode backportConfig = new ObjectMapper().readTree(file(".backportrc.json")) @@ -177,8 +184,8 @@ allprojects { // injecting groovy property variables into all projects project.ext { // for ide hacks... - isEclipse = providers.systemProperty("eclipse.launcher").forUseAtConfigurationTime().isPresent() || // Detects gradle launched from Eclipse's IDE - providers.systemProperty("eclipse.application").forUseAtConfigurationTime().isPresent() || // Detects gradle launched from the Eclipse compiler server + isEclipse = providers.systemProperty("eclipse.launcher").isPresent() || // Detects gradle launched from Eclipse's IDE + providers.systemProperty("eclipse.application").isPresent() || // Detects gradle launched from the Eclipse compiler server gradle.startParameter.taskNames.contains('eclipse') || // Detects gradle launched from the command line to do eclipse stuff gradle.startParameter.taskNames.contains('cleanEclipse') } diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 1da2f9ae57f6a..288caec3ce183 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -64,11 +64,9 @@ File pkiTrustCert = file("./src/test/resources/org/elasticsearch/client/security def clusterUserNameProvider = providers.systemProperty('tests.rest.cluster.username') .orElse('test_user') - .forUseAtConfigurationTime() def clusterPasswordProvider = providers.systemProperty('tests.rest.cluster.password') .orElse('test-user-password') - .forUseAtConfigurationTime() tasks.named('splitPackagesAudit').configure { // the client package should be owned by the client, but server has some classes there too diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterClient.java deleted file mode 100644 index a9a119da79bab..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterClient.java +++ /dev/null @@ -1,389 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsRequest; -import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsResponse; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.client.cluster.RemoteInfoRequest; -import org.elasticsearch.client.cluster.RemoteInfoResponse; -import org.elasticsearch.client.indices.ComponentTemplatesExistRequest; -import org.elasticsearch.client.indices.DeleteComponentTemplateRequest; -import org.elasticsearch.client.indices.GetComponentTemplatesRequest; -import org.elasticsearch.client.indices.GetComponentTemplatesResponse; -import org.elasticsearch.client.indices.PutComponentTemplateRequest; -import org.elasticsearch.rest.RestStatus; - -import java.io.IOException; - -import static java.util.Collections.emptySet; -import static java.util.Collections.singleton; - -/** - * A wrapper for the {@link RestHighLevelClient} that provides methods for accessing the Cluster API. - *

- * See Cluster API on elastic.co - * - * @deprecated The High Level Rest Client is deprecated in favor of the - * - * Elasticsearch Java API Client - */ -@Deprecated(since = "7.16.0", forRemoval = true) -@SuppressWarnings("removal") -public final class ClusterClient { - private final RestHighLevelClient restHighLevelClient; - - ClusterClient(RestHighLevelClient restHighLevelClient) { - this.restHighLevelClient = restHighLevelClient; - } - - /** - * Updates cluster wide specific settings using the Cluster Update Settings API. - * See Cluster Update Settings - * API on elastic.co - * @param clusterUpdateSettingsRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public ClusterUpdateSettingsResponse putSettings(ClusterUpdateSettingsRequest clusterUpdateSettingsRequest, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - clusterUpdateSettingsRequest, - ClusterRequestConverters::clusterPutSettings, - options, - ClusterUpdateSettingsResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously updates cluster wide specific settings using the Cluster Update Settings API. - * See Cluster Update Settings - * API on elastic.co - * @param clusterUpdateSettingsRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putSettingsAsync( - ClusterUpdateSettingsRequest clusterUpdateSettingsRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - clusterUpdateSettingsRequest, - ClusterRequestConverters::clusterPutSettings, - options, - ClusterUpdateSettingsResponse::fromXContent, - listener, - emptySet() - ); - } - - /** - * Get the cluster wide settings using the Cluster Get Settings API. - * See Cluster Get Settings - * API on elastic.co - * @param clusterGetSettingsRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public ClusterGetSettingsResponse getSettings(ClusterGetSettingsRequest clusterGetSettingsRequest, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - clusterGetSettingsRequest, - ClusterRequestConverters::clusterGetSettings, - options, - ClusterGetSettingsResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously get the cluster wide settings using the Cluster Get Settings API. - * See Cluster Get Settings - * API on elastic.co - * @param clusterGetSettingsRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getSettingsAsync( - ClusterGetSettingsRequest clusterGetSettingsRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - clusterGetSettingsRequest, - ClusterRequestConverters::clusterGetSettings, - options, - ClusterGetSettingsResponse::fromXContent, - listener, - emptySet() - ); - } - - /** - * Get cluster health using the Cluster Health API. - * See - * Cluster Health API on elastic.co - *

- * If timeout occurred, {@link ClusterHealthResponse} will have isTimedOut() == true and status() == RestStatus.REQUEST_TIMEOUT - * @param healthRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public ClusterHealthResponse health(ClusterHealthRequest healthRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - healthRequest, - ClusterRequestConverters::clusterHealth, - options, - ClusterHealthResponse::fromXContent, - singleton(RestStatus.REQUEST_TIMEOUT.getStatus()) - ); - } - - /** - * Asynchronously get cluster health using the Cluster Health API. - * See - * Cluster Health API on elastic.co - * If timeout occurred, {@link ClusterHealthResponse} will have isTimedOut() == true and status() == RestStatus.REQUEST_TIMEOUT - * @param healthRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable healthAsync( - ClusterHealthRequest healthRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - healthRequest, - ClusterRequestConverters::clusterHealth, - options, - ClusterHealthResponse::fromXContent, - listener, - singleton(RestStatus.REQUEST_TIMEOUT.getStatus()) - ); - } - - /** - * Get the remote cluster information using the Remote cluster info API. - * See Remote cluster info - * API on elastic.co - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public RemoteInfoResponse remoteInfo(RemoteInfoRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - ClusterRequestConverters::remoteInfo, - options, - RemoteInfoResponse::fromXContent, - singleton(RestStatus.REQUEST_TIMEOUT.getStatus()) - ); - } - - /** - * Asynchronously get remote cluster information using the Remote cluster info API. - * See Remote cluster info - * API on elastic.co - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable remoteInfoAsync(RemoteInfoRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - ClusterRequestConverters::remoteInfo, - options, - RemoteInfoResponse::fromXContent, - listener, - singleton(RestStatus.REQUEST_TIMEOUT.getStatus()) - ); - } - - /** - * Delete a component template using the Component Templates API - * - * @param req the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public AcknowledgedResponse deleteComponentTemplate(DeleteComponentTemplateRequest req, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - req, - ClusterRequestConverters::deleteComponentTemplate, - options, - AcknowledgedResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously delete a component template using the Component Templates API - * - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteComponentTemplateAsync( - DeleteComponentTemplateRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - ClusterRequestConverters::deleteComponentTemplate, - options, - AcknowledgedResponse::fromXContent, - listener, - emptySet() - ); - } - - /** - * Puts a component template using the Component Templates API. - * - * @param putComponentTemplateRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public AcknowledgedResponse putComponentTemplate(PutComponentTemplateRequest putComponentTemplateRequest, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - putComponentTemplateRequest, - ClusterRequestConverters::putComponentTemplate, - options, - AcknowledgedResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously puts a component template using the Component Templates API. - * - * @param putComponentTemplateRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putComponentTemplateAsync( - PutComponentTemplateRequest putComponentTemplateRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - putComponentTemplateRequest, - ClusterRequestConverters::putComponentTemplate, - options, - AcknowledgedResponse::fromXContent, - listener, - emptySet() - ); - } - - /** - * Gets component templates using the Components Templates API - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param getComponentTemplatesRequest the request - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public GetComponentTemplatesResponse getComponentTemplate( - GetComponentTemplatesRequest getComponentTemplatesRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - getComponentTemplatesRequest, - ClusterRequestConverters::getComponentTemplates, - options, - GetComponentTemplatesResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously gets component templates using the Components Templates API - * @param getComponentTemplatesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getComponentTemplateAsync( - GetComponentTemplatesRequest getComponentTemplatesRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - getComponentTemplatesRequest, - ClusterRequestConverters::getComponentTemplates, - options, - GetComponentTemplatesResponse::fromXContent, - listener, - emptySet() - ); - } - - /** - * Uses the Component Templates API to determine if component templates exist - * - * @param componentTemplatesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return true if any index templates in the request exist, false otherwise - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public boolean existsComponentTemplate(ComponentTemplatesExistRequest componentTemplatesRequest, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequest( - componentTemplatesRequest, - ClusterRequestConverters::componentTemplatesExist, - options, - RestHighLevelClient::convertExistsResponse, - emptySet() - ); - } - - /** - * Uses the Index Templates API to determine if index templates exist - * @param componentTemplatesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion. The listener will be called with the value {@code true} - * @return cancellable that may be used to cancel the request - */ - public Cancellable existsComponentTemplateAsync( - ComponentTemplatesExistRequest componentTemplatesRequest, - RequestOptions options, - ActionListener listener - ) { - - return restHighLevelClient.performRequestAsync( - componentTemplatesRequest, - ClusterRequestConverters::componentTemplatesExist, - options, - RestHighLevelClient::convertExistsResponse, - listener, - emptySet() - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/FeaturesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/FeaturesClient.java deleted file mode 100644 index 3a63162960a7c..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/FeaturesClient.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.client.feature.GetFeaturesRequest; -import org.elasticsearch.client.feature.GetFeaturesResponse; -import org.elasticsearch.client.feature.ResetFeaturesRequest; -import org.elasticsearch.client.feature.ResetFeaturesResponse; - -import java.io.IOException; - -import static java.util.Collections.emptySet; - -/** - * A wrapper for the {@link RestHighLevelClient} that provides methods for accessing the Snapshot API. - *

- * See Snapshot API on elastic.co - * - * @deprecated The High Level Rest Client is deprecated in favor of the - * - * Elasticsearch Java API Client - */ -@Deprecated(since = "7.16.0", forRemoval = true) -@SuppressWarnings("removal") -public class FeaturesClient { - private final RestHighLevelClient restHighLevelClient; - - FeaturesClient(RestHighLevelClient restHighLevelClient) { - this.restHighLevelClient = restHighLevelClient; - } - - /** - * Get a list of features which can be included in a snapshot as feature states. - * See Get Snapshottable - * Features API on elastic.co - * - * @param getFeaturesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public GetFeaturesResponse getFeatures(GetFeaturesRequest getFeaturesRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - getFeaturesRequest, - FeaturesRequestConverters::getFeatures, - options, - GetFeaturesResponse::parse, - emptySet() - ); - } - - /** - * Asynchronously get a list of features which can be included in a snapshot as feature states. - * See Get Snapshottable - * Features API on elastic.co - * - * @param getFeaturesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getFeaturesAsync( - GetFeaturesRequest getFeaturesRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - getFeaturesRequest, - FeaturesRequestConverters::getFeatures, - options, - GetFeaturesResponse::parse, - listener, - emptySet() - ); - } - - /** - * Reset the state of Elasticsearch features, deleting system indices and performing other - * cleanup operations. - * See Rest - * Features API on elastic.co - * - * @param resetFeaturesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public ResetFeaturesResponse resetFeatures(ResetFeaturesRequest resetFeaturesRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - resetFeaturesRequest, - FeaturesRequestConverters::resetFeatures, - options, - ResetFeaturesResponse::parse, - emptySet() - ); - } - - /** - * Asynchronously reset the state of Elasticsearch features, deleting system indices and performing other - * cleanup operations. - * See Get Snapshottable - * Features API on elastic.co - * - * @param resetFeaturesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable resetFeaturesAsync( - ResetFeaturesRequest resetFeaturesRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - resetFeaturesRequest, - FeaturesRequestConverters::resetFeatures, - options, - ResetFeaturesResponse::parse, - listener, - emptySet() - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/FeaturesRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/FeaturesRequestConverters.java deleted file mode 100644 index bb2b8be43cf3b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/FeaturesRequestConverters.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client; - -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; -import org.elasticsearch.client.feature.GetFeaturesRequest; -import org.elasticsearch.client.feature.ResetFeaturesRequest; - -public class FeaturesRequestConverters { - - private FeaturesRequestConverters() {} - - static Request getFeatures(GetFeaturesRequest getFeaturesRequest) { - String endpoint = "/_features"; - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withMasterTimeout(getFeaturesRequest.masterNodeTimeout()); - request.addParameters(parameters.asMap()); - return request; - } - - static Request resetFeatures(ResetFeaturesRequest resetFeaturesRequest) { - String endpoint = "/_features/_reset"; - return new Request(HttpPost.METHOD_NAME, endpoint); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java deleted file mode 100644 index 7c036510d0790..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java +++ /dev/null @@ -1,907 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client; - -import org.apache.http.HttpEntity; -import org.apache.http.client.methods.HttpDelete; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; -import org.apache.http.nio.entity.NByteArrayEntity; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.client.RequestConverters.EndpointBuilder; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.CloseJobRequest; -import org.elasticsearch.client.ml.DeleteCalendarEventRequest; -import org.elasticsearch.client.ml.DeleteCalendarJobRequest; -import org.elasticsearch.client.ml.DeleteCalendarRequest; -import org.elasticsearch.client.ml.DeleteDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.DeleteDatafeedRequest; -import org.elasticsearch.client.ml.DeleteExpiredDataRequest; -import org.elasticsearch.client.ml.DeleteFilterRequest; -import org.elasticsearch.client.ml.DeleteForecastRequest; -import org.elasticsearch.client.ml.DeleteJobRequest; -import org.elasticsearch.client.ml.DeleteModelSnapshotRequest; -import org.elasticsearch.client.ml.DeleteTrainedModelAliasRequest; -import org.elasticsearch.client.ml.DeleteTrainedModelRequest; -import org.elasticsearch.client.ml.EstimateModelMemoryRequest; -import org.elasticsearch.client.ml.EvaluateDataFrameRequest; -import org.elasticsearch.client.ml.ExplainDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.FlushJobRequest; -import org.elasticsearch.client.ml.ForecastJobRequest; -import org.elasticsearch.client.ml.GetBucketsRequest; -import org.elasticsearch.client.ml.GetCalendarEventsRequest; -import org.elasticsearch.client.ml.GetCalendarsRequest; -import org.elasticsearch.client.ml.GetCategoriesRequest; -import org.elasticsearch.client.ml.GetDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsRequest; -import org.elasticsearch.client.ml.GetDatafeedRequest; -import org.elasticsearch.client.ml.GetDatafeedStatsRequest; -import org.elasticsearch.client.ml.GetFiltersRequest; -import org.elasticsearch.client.ml.GetInfluencersRequest; -import org.elasticsearch.client.ml.GetJobRequest; -import org.elasticsearch.client.ml.GetJobStatsRequest; -import org.elasticsearch.client.ml.GetModelSnapshotsRequest; -import org.elasticsearch.client.ml.GetOverallBucketsRequest; -import org.elasticsearch.client.ml.GetRecordsRequest; -import org.elasticsearch.client.ml.GetTrainedModelsRequest; -import org.elasticsearch.client.ml.GetTrainedModelsStatsRequest; -import org.elasticsearch.client.ml.MlInfoRequest; -import org.elasticsearch.client.ml.OpenJobRequest; -import org.elasticsearch.client.ml.PostCalendarEventRequest; -import org.elasticsearch.client.ml.PostDataRequest; -import org.elasticsearch.client.ml.PreviewDatafeedRequest; -import org.elasticsearch.client.ml.PutCalendarJobRequest; -import org.elasticsearch.client.ml.PutCalendarRequest; -import org.elasticsearch.client.ml.PutDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.PutDatafeedRequest; -import org.elasticsearch.client.ml.PutFilterRequest; -import org.elasticsearch.client.ml.PutJobRequest; -import org.elasticsearch.client.ml.PutTrainedModelAliasRequest; -import org.elasticsearch.client.ml.PutTrainedModelRequest; -import org.elasticsearch.client.ml.RevertModelSnapshotRequest; -import org.elasticsearch.client.ml.SetUpgradeModeRequest; -import org.elasticsearch.client.ml.StartDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.StartDatafeedRequest; -import org.elasticsearch.client.ml.StopDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.StopDatafeedRequest; -import org.elasticsearch.client.ml.UpdateDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.UpdateDatafeedRequest; -import org.elasticsearch.client.ml.UpdateFilterRequest; -import org.elasticsearch.client.ml.UpdateJobRequest; -import org.elasticsearch.client.ml.UpdateModelSnapshotRequest; -import org.elasticsearch.client.ml.UpgradeJobModelSnapshotRequest; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; - -import java.io.IOException; - -import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE; -import static org.elasticsearch.client.RequestConverters.createContentType; -import static org.elasticsearch.client.RequestConverters.createEntity; - -final class MLRequestConverters { - - private MLRequestConverters() {} - - static Request putJob(PutJobRequest putJobRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(putJobRequest.getJob().getId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putJobRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getJob(GetJobRequest getJobRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(Strings.collectionToCommaDelimitedString(getJobRequest.getJobIds())) - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (getJobRequest.getAllowNoMatch() != null) { - params.putParam(GetJobRequest.ALLOW_NO_MATCH.getPreferredName(), Boolean.toString(getJobRequest.getAllowNoMatch())); - } - if (getJobRequest.getExcludeGenerated() != null) { - params.putParam(GetJobRequest.EXCLUDE_GENERATED, Boolean.toString(getJobRequest.getExcludeGenerated())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request getJobStats(GetJobStatsRequest getJobStatsRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(Strings.collectionToCommaDelimitedString(getJobStatsRequest.getJobIds())) - .addPathPartAsIs("_stats") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (getJobStatsRequest.getAllowNoMatch() != null) { - params.putParam("allow_no_match", Boolean.toString(getJobStatsRequest.getAllowNoMatch())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request openJob(OpenJobRequest openJobRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(openJobRequest.getJobId()) - .addPathPartAsIs("_open") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(openJobRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request closeJob(CloseJobRequest closeJobRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(Strings.collectionToCommaDelimitedString(closeJobRequest.getJobIds())) - .addPathPartAsIs("_close") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(closeJobRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request deleteExpiredData(DeleteExpiredDataRequest deleteExpiredDataRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("_delete_expired_data") - .addPathPart(deleteExpiredDataRequest.getJobId()) - .build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - request.setEntity(createEntity(deleteExpiredDataRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request deleteJob(DeleteJobRequest deleteJobRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(deleteJobRequest.getJobId()) - .build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (deleteJobRequest.getForce() != null) { - params.putParam("force", Boolean.toString(deleteJobRequest.getForce())); - } - if (deleteJobRequest.getWaitForCompletion() != null) { - params.putParam("wait_for_completion", Boolean.toString(deleteJobRequest.getWaitForCompletion())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request flushJob(FlushJobRequest flushJobRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(flushJobRequest.getJobId()) - .addPathPartAsIs("_flush") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(flushJobRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request forecastJob(ForecastJobRequest forecastJobRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(forecastJobRequest.getJobId()) - .addPathPartAsIs("_forecast") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(forecastJobRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request updateJob(UpdateJobRequest updateJobRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(updateJobRequest.getJobUpdate().getJobId()) - .addPathPartAsIs("_update") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(updateJobRequest.getJobUpdate(), REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request putDatafeed(PutDatafeedRequest putDatafeedRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(putDatafeedRequest.getDatafeed().getId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putDatafeedRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request updateDatafeed(UpdateDatafeedRequest updateDatafeedRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(updateDatafeedRequest.getDatafeedUpdate().getId()) - .addPathPartAsIs("_update") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(updateDatafeedRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getDatafeed(GetDatafeedRequest getDatafeedRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(Strings.collectionToCommaDelimitedString(getDatafeedRequest.getDatafeedIds())) - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (getDatafeedRequest.getAllowNoMatch() != null) { - params.putParam(GetDatafeedRequest.ALLOW_NO_MATCH.getPreferredName(), Boolean.toString(getDatafeedRequest.getAllowNoMatch())); - } - if (getDatafeedRequest.getExcludeGenerated() != null) { - params.putParam(GetDatafeedRequest.EXCLUDE_GENERATED, Boolean.toString(getDatafeedRequest.getExcludeGenerated())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request deleteDatafeed(DeleteDatafeedRequest deleteDatafeedRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(deleteDatafeedRequest.getDatafeedId()) - .build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (deleteDatafeedRequest.getForce() != null) { - params.putParam("force", Boolean.toString(deleteDatafeedRequest.getForce())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request startDatafeed(StartDatafeedRequest startDatafeedRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(startDatafeedRequest.getDatafeedId()) - .addPathPartAsIs("_start") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(startDatafeedRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request stopDatafeed(StopDatafeedRequest stopDatafeedRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(Strings.collectionToCommaDelimitedString(stopDatafeedRequest.getDatafeedIds())) - .addPathPartAsIs("_stop") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(stopDatafeedRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getDatafeedStats(GetDatafeedStatsRequest getDatafeedStatsRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(Strings.collectionToCommaDelimitedString(getDatafeedStatsRequest.getDatafeedIds())) - .addPathPartAsIs("_stats") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (getDatafeedStatsRequest.getAllowNoMatch() != null) { - params.putParam("allow_no_match", Boolean.toString(getDatafeedStatsRequest.getAllowNoMatch())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request previewDatafeed(PreviewDatafeedRequest previewDatafeedRequest) throws IOException { - EndpointBuilder builder = new EndpointBuilder().addPathPartAsIs("_ml").addPathPartAsIs("datafeeds"); - String endpoint = previewDatafeedRequest.getDatafeedId() != null - ? builder.addPathPart(previewDatafeedRequest.getDatafeedId()).addPathPartAsIs("_preview").build() - : builder.addPathPartAsIs("_preview").build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - if (previewDatafeedRequest.getDatafeedId() == null) { - request.setEntity(createEntity(previewDatafeedRequest, REQUEST_BODY_CONTENT_TYPE)); - } - return request; - } - - static Request deleteForecast(DeleteForecastRequest deleteForecastRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(deleteForecastRequest.getJobId()) - .addPathPartAsIs("_forecast") - .addPathPart(Strings.collectionToCommaDelimitedString(deleteForecastRequest.getForecastIds())) - .build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (deleteForecastRequest.getAllowNoForecasts() != null) { - params.putParam("allow_no_forecasts", Boolean.toString(deleteForecastRequest.getAllowNoForecasts())); - } - if (deleteForecastRequest.timeout() != null) { - params.putParam("timeout", deleteForecastRequest.timeout().getStringRep()); - } - request.addParameters(params.asMap()); - return request; - } - - static Request deleteModelSnapshot(DeleteModelSnapshotRequest deleteModelSnapshotRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(deleteModelSnapshotRequest.getJobId()) - .addPathPartAsIs("model_snapshots") - .addPathPart(deleteModelSnapshotRequest.getSnapshotId()) - .build(); - return new Request(HttpDelete.METHOD_NAME, endpoint); - } - - static Request getBuckets(GetBucketsRequest getBucketsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(getBucketsRequest.getJobId()) - .addPathPartAsIs("results") - .addPathPartAsIs("buckets") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getBucketsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getCategories(GetCategoriesRequest getCategoriesRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(getCategoriesRequest.getJobId()) - .addPathPartAsIs("results") - .addPathPartAsIs("categories") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getCategoriesRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getModelSnapshots(GetModelSnapshotsRequest getModelSnapshotsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(getModelSnapshotsRequest.getJobId()) - .addPathPartAsIs("model_snapshots") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getModelSnapshotsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request updateModelSnapshot(UpdateModelSnapshotRequest updateModelSnapshotRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(updateModelSnapshotRequest.getJobId()) - .addPathPartAsIs("model_snapshots") - .addPathPart(updateModelSnapshotRequest.getSnapshotId()) - .addPathPartAsIs("_update") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(updateModelSnapshotRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request upgradeJobSnapshot(UpgradeJobModelSnapshotRequest upgradeJobModelSnapshotRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(upgradeJobModelSnapshotRequest.getJobId()) - .addPathPartAsIs("model_snapshots") - .addPathPart(upgradeJobModelSnapshotRequest.getSnapshotId()) - .addPathPartAsIs("_upgrade") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (upgradeJobModelSnapshotRequest.getTimeout() != null) { - params.putParam( - UpgradeJobModelSnapshotRequest.TIMEOUT.getPreferredName(), - upgradeJobModelSnapshotRequest.getTimeout().getStringRep() - ); - } - if (upgradeJobModelSnapshotRequest.getWaitForCompletion() != null) { - params.putParam( - UpgradeJobModelSnapshotRequest.WAIT_FOR_COMPLETION.getPreferredName(), - upgradeJobModelSnapshotRequest.getWaitForCompletion().toString() - ); - } - request.addParameters(params.asMap()); - return request; - } - - static Request revertModelSnapshot(RevertModelSnapshotRequest revertModelSnapshotsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(revertModelSnapshotsRequest.getJobId()) - .addPathPartAsIs("model_snapshots") - .addPathPart(revertModelSnapshotsRequest.getSnapshotId()) - .addPathPart("_revert") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(revertModelSnapshotsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getOverallBuckets(GetOverallBucketsRequest getOverallBucketsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(Strings.collectionToCommaDelimitedString(getOverallBucketsRequest.getJobIds())) - .addPathPartAsIs("results") - .addPathPartAsIs("overall_buckets") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getOverallBucketsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getRecords(GetRecordsRequest getRecordsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(getRecordsRequest.getJobId()) - .addPathPartAsIs("results") - .addPathPartAsIs("records") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getRecordsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request postData(PostDataRequest postDataRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(postDataRequest.getJobId()) - .addPathPartAsIs("_data") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (postDataRequest.getResetStart() != null) { - params.putParam(PostDataRequest.RESET_START.getPreferredName(), postDataRequest.getResetStart()); - } - if (postDataRequest.getResetEnd() != null) { - params.putParam(PostDataRequest.RESET_END.getPreferredName(), postDataRequest.getResetEnd()); - } - BytesReference content = postDataRequest.getContent(); - request.addParameters(params.asMap()); - if (content != null) { - BytesRef source = postDataRequest.getContent().toBytesRef(); - HttpEntity byteEntity = new NByteArrayEntity( - source.bytes, - source.offset, - source.length, - createContentType(postDataRequest.getXContentType()) - ); - request.setEntity(byteEntity); - } - return request; - } - - static Request getInfluencers(GetInfluencersRequest getInfluencersRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(getInfluencersRequest.getJobId()) - .addPathPartAsIs("results") - .addPathPartAsIs("influencers") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getInfluencersRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request putCalendar(PutCalendarRequest putCalendarRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(putCalendarRequest.getCalendar().getId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putCalendarRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getCalendars(GetCalendarsRequest getCalendarsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(getCalendarsRequest.getCalendarId()) - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getCalendarsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request putCalendarJob(PutCalendarJobRequest putCalendarJobRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(putCalendarJobRequest.getCalendarId()) - .addPathPartAsIs("jobs") - .addPathPart(Strings.collectionToCommaDelimitedString(putCalendarJobRequest.getJobIds())) - .build(); - return new Request(HttpPut.METHOD_NAME, endpoint); - } - - static Request deleteCalendarJob(DeleteCalendarJobRequest deleteCalendarJobRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(deleteCalendarJobRequest.getCalendarId()) - .addPathPartAsIs("jobs") - .addPathPart(Strings.collectionToCommaDelimitedString(deleteCalendarJobRequest.getJobIds())) - .build(); - return new Request(HttpDelete.METHOD_NAME, endpoint); - } - - static Request deleteCalendar(DeleteCalendarRequest deleteCalendarRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(deleteCalendarRequest.getCalendarId()) - .build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - return request; - } - - static Request getCalendarEvents(GetCalendarEventsRequest getCalendarEventsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(getCalendarEventsRequest.getCalendarId()) - .addPathPartAsIs("events") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getCalendarEventsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request postCalendarEvents(PostCalendarEventRequest postCalendarEventRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(postCalendarEventRequest.getCalendarId()) - .addPathPartAsIs("events") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity( - createEntity(postCalendarEventRequest, REQUEST_BODY_CONTENT_TYPE, PostCalendarEventRequest.EXCLUDE_CALENDAR_ID_PARAMS) - ); - return request; - } - - static Request deleteCalendarEvent(DeleteCalendarEventRequest deleteCalendarEventRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(deleteCalendarEventRequest.getCalendarId()) - .addPathPartAsIs("events") - .addPathPart(deleteCalendarEventRequest.getEventId()) - .build(); - return new Request(HttpDelete.METHOD_NAME, endpoint); - } - - static Request estimateModelMemory(EstimateModelMemoryRequest estimateModelMemoryRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPartAsIs("_estimate_model_memory") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(estimateModelMemoryRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request putDataFrameAnalytics(PutDataFrameAnalyticsRequest putRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(putRequest.getConfig().getId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request updateDataFrameAnalytics(UpdateDataFrameAnalyticsRequest updateRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(updateRequest.getUpdate().getId()) - .addPathPartAsIs("_update") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(updateRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getDataFrameAnalytics(GetDataFrameAnalyticsRequest getRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(Strings.collectionToCommaDelimitedString(getRequest.getIds())) - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (getRequest.getPageParams() != null) { - PageParams pageParams = getRequest.getPageParams(); - if (pageParams.getFrom() != null) { - params.putParam(PageParams.FROM.getPreferredName(), pageParams.getFrom().toString()); - } - if (pageParams.getSize() != null) { - params.putParam(PageParams.SIZE.getPreferredName(), pageParams.getSize().toString()); - } - } - if (getRequest.getAllowNoMatch() != null) { - params.putParam(GetDataFrameAnalyticsRequest.ALLOW_NO_MATCH, Boolean.toString(getRequest.getAllowNoMatch())); - } - if (getRequest.getExcludeGenerated() != null) { - params.putParam(GetDataFrameAnalyticsRequest.EXCLUDE_GENERATED, Boolean.toString(getRequest.getExcludeGenerated())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request getDataFrameAnalyticsStats(GetDataFrameAnalyticsStatsRequest getStatsRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(Strings.collectionToCommaDelimitedString(getStatsRequest.getIds())) - .addPathPartAsIs("_stats") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (getStatsRequest.getPageParams() != null) { - PageParams pageParams = getStatsRequest.getPageParams(); - if (pageParams.getFrom() != null) { - params.putParam(PageParams.FROM.getPreferredName(), pageParams.getFrom().toString()); - } - if (pageParams.getSize() != null) { - params.putParam(PageParams.SIZE.getPreferredName(), pageParams.getSize().toString()); - } - } - if (getStatsRequest.getAllowNoMatch() != null) { - params.putParam( - GetDataFrameAnalyticsStatsRequest.ALLOW_NO_MATCH.getPreferredName(), - Boolean.toString(getStatsRequest.getAllowNoMatch()) - ); - } - request.addParameters(params.asMap()); - return request; - } - - static Request startDataFrameAnalytics(StartDataFrameAnalyticsRequest startRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(startRequest.getId()) - .addPathPartAsIs("_start") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (startRequest.getTimeout() != null) { - params.withTimeout(startRequest.getTimeout()); - } - request.addParameters(params.asMap()); - return request; - } - - static Request stopDataFrameAnalytics(StopDataFrameAnalyticsRequest stopRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(stopRequest.getId()) - .addPathPartAsIs("_stop") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (stopRequest.getTimeout() != null) { - params.withTimeout(stopRequest.getTimeout()); - } - if (stopRequest.getAllowNoMatch() != null) { - params.putParam( - StopDataFrameAnalyticsRequest.ALLOW_NO_MATCH.getPreferredName(), - Boolean.toString(stopRequest.getAllowNoMatch()) - ); - } - if (stopRequest.getForce() != null) { - params.putParam(StopDataFrameAnalyticsRequest.FORCE.getPreferredName(), Boolean.toString(stopRequest.getForce())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request deleteDataFrameAnalytics(DeleteDataFrameAnalyticsRequest deleteRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(deleteRequest.getId()) - .build(); - - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (deleteRequest.getForce() != null) { - params.putParam("force", Boolean.toString(deleteRequest.getForce())); - } - if (deleteRequest.getTimeout() != null) { - params.withTimeout(deleteRequest.getTimeout()); - } - request.addParameters(params.asMap()); - - return request; - } - - static Request evaluateDataFrame(EvaluateDataFrameRequest evaluateRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "_evaluate").build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(evaluateRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request explainDataFrameAnalytics(ExplainDataFrameAnalyticsRequest explainRequest) throws IOException { - EndpointBuilder endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics"); - if (explainRequest.getId() != null) { - endpoint.addPathPart(explainRequest.getId()); - } - endpoint.addPathPartAsIs("_explain"); - - Request request = new Request(HttpPost.METHOD_NAME, endpoint.build()); - if (explainRequest.getConfig() != null) { - request.setEntity(createEntity(explainRequest.getConfig(), REQUEST_BODY_CONTENT_TYPE)); - } - return request; - } - - static Request getTrainedModels(GetTrainedModelsRequest getTrainedModelsRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "trained_models") - .addPathPart(Strings.collectionToCommaDelimitedString(getTrainedModelsRequest.getIds())) - .build(); - RequestConverters.Params params = new RequestConverters.Params(); - if (getTrainedModelsRequest.getPageParams() != null) { - PageParams pageParams = getTrainedModelsRequest.getPageParams(); - if (pageParams.getFrom() != null) { - params.putParam(PageParams.FROM.getPreferredName(), pageParams.getFrom().toString()); - } - if (pageParams.getSize() != null) { - params.putParam(PageParams.SIZE.getPreferredName(), pageParams.getSize().toString()); - } - } - if (getTrainedModelsRequest.getAllowNoMatch() != null) { - params.putParam(GetTrainedModelsRequest.ALLOW_NO_MATCH, Boolean.toString(getTrainedModelsRequest.getAllowNoMatch())); - } - if (getTrainedModelsRequest.getDecompressDefinition() != null) { - params.putParam( - GetTrainedModelsRequest.DECOMPRESS_DEFINITION, - Boolean.toString(getTrainedModelsRequest.getDecompressDefinition()) - ); - } - if (getTrainedModelsRequest.getIncludes().isEmpty() == false) { - params.putParam( - GetTrainedModelsRequest.INCLUDE, - Strings.collectionToCommaDelimitedString(getTrainedModelsRequest.getIncludes()) - ); - } - if (getTrainedModelsRequest.getTags() != null) { - params.putParam(GetTrainedModelsRequest.TAGS, Strings.collectionToCommaDelimitedString(getTrainedModelsRequest.getTags())); - } - if (getTrainedModelsRequest.getExcludeGenerated() != null) { - params.putParam(GetTrainedModelsRequest.EXCLUDE_GENERATED, Boolean.toString(getTrainedModelsRequest.getExcludeGenerated())); - } - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.addParameters(params.asMap()); - return request; - } - - static Request getTrainedModelsStats(GetTrainedModelsStatsRequest getTrainedModelsStatsRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "trained_models") - .addPathPart(Strings.collectionToCommaDelimitedString(getTrainedModelsStatsRequest.getIds())) - .addPathPart("_stats") - .build(); - RequestConverters.Params params = new RequestConverters.Params(); - if (getTrainedModelsStatsRequest.getPageParams() != null) { - PageParams pageParams = getTrainedModelsStatsRequest.getPageParams(); - if (pageParams.getFrom() != null) { - params.putParam(PageParams.FROM.getPreferredName(), pageParams.getFrom().toString()); - } - if (pageParams.getSize() != null) { - params.putParam(PageParams.SIZE.getPreferredName(), pageParams.getSize().toString()); - } - } - if (getTrainedModelsStatsRequest.getAllowNoMatch() != null) { - params.putParam(GetTrainedModelsStatsRequest.ALLOW_NO_MATCH, Boolean.toString(getTrainedModelsStatsRequest.getAllowNoMatch())); - } - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.addParameters(params.asMap()); - return request; - } - - static Request deleteTrainedModel(DeleteTrainedModelRequest deleteRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "trained_models").addPathPart(deleteRequest.getId()).build(); - return new Request(HttpDelete.METHOD_NAME, endpoint); - } - - static Request putTrainedModel(PutTrainedModelRequest putTrainedModelRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "trained_models") - .addPathPart(putTrainedModelRequest.getTrainedModelConfig().getModelId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putTrainedModelRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request putTrainedModelAlias(PutTrainedModelAliasRequest putTrainedModelAliasRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "trained_models") - .addPathPart(putTrainedModelAliasRequest.getModelId()) - .addPathPartAsIs("model_aliases") - .addPathPart(putTrainedModelAliasRequest.getModelAlias()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (putTrainedModelAliasRequest.getReassign() != null) { - params.putParam(PutTrainedModelAliasRequest.REASSIGN, Boolean.toString(putTrainedModelAliasRequest.getReassign())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request deleteTrainedModelAlias(DeleteTrainedModelAliasRequest deleteTrainedModelAliasRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "trained_models") - .addPathPart(deleteTrainedModelAliasRequest.getModelId()) - .addPathPartAsIs("model_aliases") - .addPathPart(deleteTrainedModelAliasRequest.getModelAlias()) - .build(); - return new Request(HttpDelete.METHOD_NAME, endpoint); - } - - static Request putFilter(PutFilterRequest putFilterRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("filters") - .addPathPart(putFilterRequest.getMlFilter().getId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putFilterRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getFilter(GetFiltersRequest getFiltersRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("filters") - .addPathPart(getFiltersRequest.getFilterId()) - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (getFiltersRequest.getSize() != null) { - params.putParam(PageParams.SIZE.getPreferredName(), getFiltersRequest.getSize().toString()); - } - if (getFiltersRequest.getFrom() != null) { - params.putParam(PageParams.FROM.getPreferredName(), getFiltersRequest.getFrom().toString()); - } - request.addParameters(params.asMap()); - return request; - } - - static Request updateFilter(UpdateFilterRequest updateFilterRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("filters") - .addPathPart(updateFilterRequest.getFilterId()) - .addPathPartAsIs("_update") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(updateFilterRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request deleteFilter(DeleteFilterRequest deleteFilterRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "filters").addPathPart(deleteFilterRequest.getId()).build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - return request; - } - - static Request setUpgradeMode(SetUpgradeModeRequest setUpgradeModeRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "set_upgrade_mode").build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - params.putParam(SetUpgradeModeRequest.ENABLED.getPreferredName(), Boolean.toString(setUpgradeModeRequest.isEnabled())); - if (setUpgradeModeRequest.getTimeout() != null) { - params.putParam(SetUpgradeModeRequest.TIMEOUT.getPreferredName(), setUpgradeModeRequest.getTimeout().toString()); - } - request.addParameters(params.asMap()); - return request; - } - - static Request mlInfo(MlInfoRequest infoRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "info").build(); - return new Request(HttpGet.METHOD_NAME, endpoint); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java deleted file mode 100644 index 1272ca1658578..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java +++ /dev/null @@ -1,2992 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.client.ml.CloseJobRequest; -import org.elasticsearch.client.ml.CloseJobResponse; -import org.elasticsearch.client.ml.DeleteCalendarEventRequest; -import org.elasticsearch.client.ml.DeleteCalendarJobRequest; -import org.elasticsearch.client.ml.DeleteCalendarRequest; -import org.elasticsearch.client.ml.DeleteDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.DeleteDatafeedRequest; -import org.elasticsearch.client.ml.DeleteExpiredDataRequest; -import org.elasticsearch.client.ml.DeleteExpiredDataResponse; -import org.elasticsearch.client.ml.DeleteFilterRequest; -import org.elasticsearch.client.ml.DeleteForecastRequest; -import org.elasticsearch.client.ml.DeleteJobRequest; -import org.elasticsearch.client.ml.DeleteJobResponse; -import org.elasticsearch.client.ml.DeleteModelSnapshotRequest; -import org.elasticsearch.client.ml.DeleteTrainedModelAliasRequest; -import org.elasticsearch.client.ml.DeleteTrainedModelRequest; -import org.elasticsearch.client.ml.EstimateModelMemoryRequest; -import org.elasticsearch.client.ml.EstimateModelMemoryResponse; -import org.elasticsearch.client.ml.EvaluateDataFrameRequest; -import org.elasticsearch.client.ml.EvaluateDataFrameResponse; -import org.elasticsearch.client.ml.ExplainDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.ExplainDataFrameAnalyticsResponse; -import org.elasticsearch.client.ml.FlushJobRequest; -import org.elasticsearch.client.ml.FlushJobResponse; -import org.elasticsearch.client.ml.ForecastJobRequest; -import org.elasticsearch.client.ml.ForecastJobResponse; -import org.elasticsearch.client.ml.GetBucketsRequest; -import org.elasticsearch.client.ml.GetBucketsResponse; -import org.elasticsearch.client.ml.GetCalendarEventsRequest; -import org.elasticsearch.client.ml.GetCalendarEventsResponse; -import org.elasticsearch.client.ml.GetCalendarsRequest; -import org.elasticsearch.client.ml.GetCalendarsResponse; -import org.elasticsearch.client.ml.GetCategoriesRequest; -import org.elasticsearch.client.ml.GetCategoriesResponse; -import org.elasticsearch.client.ml.GetDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.GetDataFrameAnalyticsResponse; -import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsRequest; -import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsResponse; -import org.elasticsearch.client.ml.GetDatafeedRequest; -import org.elasticsearch.client.ml.GetDatafeedResponse; -import org.elasticsearch.client.ml.GetDatafeedStatsRequest; -import org.elasticsearch.client.ml.GetDatafeedStatsResponse; -import org.elasticsearch.client.ml.GetFiltersRequest; -import org.elasticsearch.client.ml.GetFiltersResponse; -import org.elasticsearch.client.ml.GetInfluencersRequest; -import org.elasticsearch.client.ml.GetInfluencersResponse; -import org.elasticsearch.client.ml.GetJobRequest; -import org.elasticsearch.client.ml.GetJobResponse; -import org.elasticsearch.client.ml.GetJobStatsRequest; -import org.elasticsearch.client.ml.GetJobStatsResponse; -import org.elasticsearch.client.ml.GetModelSnapshotsRequest; -import org.elasticsearch.client.ml.GetModelSnapshotsResponse; -import org.elasticsearch.client.ml.GetOverallBucketsRequest; -import org.elasticsearch.client.ml.GetOverallBucketsResponse; -import org.elasticsearch.client.ml.GetRecordsRequest; -import org.elasticsearch.client.ml.GetRecordsResponse; -import org.elasticsearch.client.ml.GetTrainedModelsRequest; -import org.elasticsearch.client.ml.GetTrainedModelsResponse; -import org.elasticsearch.client.ml.GetTrainedModelsStatsRequest; -import org.elasticsearch.client.ml.GetTrainedModelsStatsResponse; -import org.elasticsearch.client.ml.MlInfoRequest; -import org.elasticsearch.client.ml.MlInfoResponse; -import org.elasticsearch.client.ml.OpenJobRequest; -import org.elasticsearch.client.ml.OpenJobResponse; -import org.elasticsearch.client.ml.PostCalendarEventRequest; -import org.elasticsearch.client.ml.PostCalendarEventResponse; -import org.elasticsearch.client.ml.PostDataRequest; -import org.elasticsearch.client.ml.PostDataResponse; -import org.elasticsearch.client.ml.PreviewDatafeedRequest; -import org.elasticsearch.client.ml.PreviewDatafeedResponse; -import org.elasticsearch.client.ml.PutCalendarJobRequest; -import org.elasticsearch.client.ml.PutCalendarRequest; -import org.elasticsearch.client.ml.PutCalendarResponse; -import org.elasticsearch.client.ml.PutDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.PutDataFrameAnalyticsResponse; -import org.elasticsearch.client.ml.PutDatafeedRequest; -import org.elasticsearch.client.ml.PutDatafeedResponse; -import org.elasticsearch.client.ml.PutFilterRequest; -import org.elasticsearch.client.ml.PutFilterResponse; -import org.elasticsearch.client.ml.PutJobRequest; -import org.elasticsearch.client.ml.PutJobResponse; -import org.elasticsearch.client.ml.PutTrainedModelAliasRequest; -import org.elasticsearch.client.ml.PutTrainedModelRequest; -import org.elasticsearch.client.ml.PutTrainedModelResponse; -import org.elasticsearch.client.ml.RevertModelSnapshotRequest; -import org.elasticsearch.client.ml.RevertModelSnapshotResponse; -import org.elasticsearch.client.ml.SetUpgradeModeRequest; -import org.elasticsearch.client.ml.StartDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.StartDataFrameAnalyticsResponse; -import org.elasticsearch.client.ml.StartDatafeedRequest; -import org.elasticsearch.client.ml.StartDatafeedResponse; -import org.elasticsearch.client.ml.StopDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.StopDataFrameAnalyticsResponse; -import org.elasticsearch.client.ml.StopDatafeedRequest; -import org.elasticsearch.client.ml.StopDatafeedResponse; -import org.elasticsearch.client.ml.UpdateDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.UpdateDatafeedRequest; -import org.elasticsearch.client.ml.UpdateFilterRequest; -import org.elasticsearch.client.ml.UpdateJobRequest; -import org.elasticsearch.client.ml.UpdateModelSnapshotRequest; -import org.elasticsearch.client.ml.UpdateModelSnapshotResponse; -import org.elasticsearch.client.ml.UpgradeJobModelSnapshotRequest; -import org.elasticsearch.client.ml.UpgradeJobModelSnapshotResponse; -import org.elasticsearch.client.ml.job.stats.JobStats; - -import java.io.IOException; -import java.util.Collections; - -/** - * Machine Learning API client wrapper for the {@link RestHighLevelClient} - *

- * See the - * X-Pack Machine Learning APIs for additional information. - * - * @deprecated The High Level Rest Client is deprecated in favor of the - * - * Elasticsearch Java API Client - */ -@Deprecated(since = "7.16.0", forRemoval = true) -@SuppressWarnings("removal") -public final class MachineLearningClient { - - private final RestHighLevelClient restHighLevelClient; - - MachineLearningClient(RestHighLevelClient restHighLevelClient) { - this.restHighLevelClient = restHighLevelClient; - } - - /** - * Creates a new Machine Learning Job - *

- * For additional info - * see ML PUT job documentation - * - * @param request The PutJobRequest containing the {@link org.elasticsearch.client.ml.job.config.Job} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return PutJobResponse with enclosed {@link org.elasticsearch.client.ml.job.config.Job} object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutJobResponse putJob(PutJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putJob, - options, - PutJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates a new Machine Learning Job asynchronously and notifies listener on completion - *

- * For additional info - * see ML PUT job documentation - * @param request The request containing the {@link org.elasticsearch.client.ml.job.config.Job} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putJobAsync(PutJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putJob, - options, - PutJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets one or more Machine Learning job configuration info. - *

- * For additional info - * see ML GET job documentation - * - * @param request {@link GetJobRequest} Request containing a list of jobId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetJobResponse} response object containing - * the {@link org.elasticsearch.client.ml.job.config.Job} objects and the number of jobs found - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetJobResponse getJob(GetJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getJob, - options, - GetJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets one or more Machine Learning job configuration info, asynchronously. - *

- * For additional info - * see ML GET job documentation - * @param request {@link GetJobRequest} Request containing a list of jobId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified with {@link GetJobResponse} upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getJobAsync(GetJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getJob, - options, - GetJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets usage statistics for one or more Machine Learning jobs - *

- * For additional info - * see Get job stats docs - * - * @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetJobStatsResponse} response object containing - * the {@link JobStats} objects and the number of jobs found - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetJobStatsResponse getJobStats(GetJobStatsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getJobStats, - options, - GetJobStatsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets usage statistics for one or more Machine Learning jobs, asynchronously. - *

- * For additional info - * see Get job stats docs - * @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified with {@link GetJobStatsResponse} upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getJobStatsAsync(GetJobStatsRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getJobStats, - options, - GetJobStatsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes expired data from Machine Learning Jobs - *

- * For additional info - * see ML Delete Expired Data - * documentation - * - * @param request The request to delete expired ML data - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The action response which contains the acknowledgement or the task id depending on whether the action was set to wait for - * completion - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public DeleteExpiredDataResponse deleteExpiredData(DeleteExpiredDataRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteExpiredData, - options, - DeleteExpiredDataResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes expired data from Machine Learning Jobs asynchronously and notifies the listener on completion - *

- * For additional info - * see ML Delete Expired Data - * documentation - * @param request The request to delete expired ML data - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteExpiredDataAsync( - DeleteExpiredDataRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteExpiredData, - options, - DeleteExpiredDataResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning Job - *

- * For additional info - * see ML Delete job documentation - * - * @param request The request to delete the job - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The action response which contains the acknowledgement or the task id depending on whether the action was set to wait for - * completion - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public DeleteJobResponse deleteJob(DeleteJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteJob, - options, - DeleteJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning Job asynchronously and notifies the listener on completion - *

- * For additional info - * see ML Delete Job documentation - * - * @param request The request to delete the job - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteJob, - options, - DeleteJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Opens a Machine Learning Job. - * When you open a new job, it starts with an empty model. - * When you open an existing job, the most recent model state is automatically loaded. - * The job is ready to resume its analysis from where it left off, once new data is received. - *

- * For additional info - * see ML Open Job documentation - * - * @param request Request containing job_id and additional optional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return response containing if the job was successfully opened or not. - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public OpenJobResponse openJob(OpenJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::openJob, - options, - OpenJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Opens a Machine Learning Job asynchronously, notifies listener on completion. - * When you open a new job, it starts with an empty model. - * When you open an existing job, the most recent model state is automatically loaded. - * The job is ready to resume its analysis from where it left off, once new data is received. - *

- * For additional info - * see ML Open Job documentation - * - * @param request Request containing job_id and additional optional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable openJobAsync(OpenJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::openJob, - options, - OpenJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Closes one or more Machine Learning Jobs. A job can be opened and closed multiple times throughout its lifecycle. - * A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results. - *

- * For additional info - * see ML Close Job documentation - * - * @param request Request containing job_ids and additional options. See {@link CloseJobRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return response containing if the job was successfully closed or not. - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public CloseJobResponse closeJob(CloseJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::closeJob, - options, - CloseJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Closes one or more Machine Learning Jobs asynchronously, notifies listener on completion - * A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results. - *

- * For additional info - * see ML Close Job documentation - * - * @param request Request containing job_ids and additional options. See {@link CloseJobRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable closeJobAsync(CloseJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::closeJob, - options, - CloseJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Flushes internally buffered data for the given Machine Learning Job ensuring all data sent to the has been processed. - * This may cause new results to be calculated depending on the contents of the buffer - * Both flush and close operations are similar, - * however the flush is more efficient if you are expecting to send more data for analysis. - * When flushing, the job remains open and is available to continue analyzing data. - * A close operation additionally prunes and persists the model state to disk and the - * job must be opened again before analyzing further data. - *

- * For additional info - * see Flush ML job documentation - * - * @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public FlushJobResponse flushJob(FlushJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::flushJob, - options, - FlushJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Flushes internally buffered data for the given Machine Learning Job asynchronously ensuring all data sent to the has been processed. - * This may cause new results to be calculated depending on the contents of the buffer - * Both flush and close operations are similar, - * however the flush is more efficient if you are expecting to send more data for analysis. - * When flushing, the job remains open and is available to continue analyzing data. - * A close operation additionally prunes and persists the model state to disk and the - * job must be opened again before analyzing further data. - *

- * For additional info - * see Flush ML job documentation - * - * @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable flushJobAsync(FlushJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::flushJob, - options, - FlushJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Creates a forecast of an existing, opened Machine Learning Job - * This predicts the future behavior of a time series by using its historical behavior. - *

- * For additional info - * see Forecast ML Job Documentation - * - * @param request ForecastJobRequest with forecasting options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return response containing forecast acknowledgement and new forecast's ID - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public ForecastJobResponse forecastJob(ForecastJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::forecastJob, - options, - ForecastJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates a forecast of an existing, opened Machine Learning Job asynchronously - * This predicts the future behavior of a time series by using its historical behavior. - *

- * For additional info - * see Forecast ML Job Documentation - * - * @param request ForecastJobRequest with forecasting options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable forecastJobAsync(ForecastJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::forecastJob, - options, - ForecastJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes Machine Learning Job Forecasts - *

- * For additional info - * see Delete Job Forecast - * Documentation - * - * @param request the {@link DeleteForecastRequest} object enclosing the desired jobId, forecastIDs, and other options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return a AcknowledgedResponse object indicating request success - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteForecast(DeleteForecastRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteForecast, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes Machine Learning Job Forecasts asynchronously - *

- * For additional info - * see Delete Job Forecast - * Documentation - * - * @param request the {@link DeleteForecastRequest} object enclosing the desired jobId, forecastIDs, and other options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteForecastAsync( - DeleteForecastRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteForecast, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes Machine Learning Model Snapshots - *

- * For additional info - * see - * ML Delete Model Snapshot documentation - * - * @param request The request to delete the model snapshot - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteModelSnapshot(DeleteModelSnapshotRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteModelSnapshot, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes Machine Learning Model Snapshots asynchronously and notifies the listener on completion - *

- * For additional info - * see - * ML Delete Model Snapshot documentation - * - * @param request The request to delete the model snapshot - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteModelSnapshotAsync( - DeleteModelSnapshotRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteModelSnapshot, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Reverts to a particular Machine Learning Model Snapshot - *

- * For additional info - * see - * ML Revert Model Snapshot documentation - * - * @param request The request to revert to a previous model snapshot - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public RevertModelSnapshotResponse revertModelSnapshot(RevertModelSnapshotRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::revertModelSnapshot, - options, - RevertModelSnapshotResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Reverts to a particular Machine Learning Model Snapshot asynchronously and notifies the listener on completion - *

- * For additional info - * see - * ML Revert Model Snapshot documentation - * - * @param request The request to revert to a previous model snapshot - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable revertModelSnapshotAsync( - RevertModelSnapshotRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::revertModelSnapshot, - options, - RevertModelSnapshotResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Creates a new Machine Learning Datafeed - *

- * For additional info - * see ML PUT datafeed documentation - * - * @param request The PutDatafeedRequest containing the {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return PutDatafeedResponse with enclosed {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutDatafeedResponse putDatafeed(PutDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putDatafeed, - options, - PutDatafeedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates a new Machine Learning Datafeed asynchronously and notifies listener on completion - *

- * For additional info - * see ML PUT datafeed documentation - * - * @param request The request containing the {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putDatafeedAsync(PutDatafeedRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putDatafeed, - options, - PutDatafeedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Updates a Machine Learning Datafeed - *

- * For additional info - * see - * ML Update datafeed documentation - * - * @param request The UpdateDatafeedRequest containing the {@link org.elasticsearch.client.ml.datafeed.DatafeedUpdate} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return PutDatafeedResponse with enclosed, updated {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutDatafeedResponse updateDatafeed(UpdateDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::updateDatafeed, - options, - PutDatafeedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Updates a Machine Learning Datafeed asynchronously and notifies listener on completion - *

- * For additional info - * see - * ML Update datafeed documentation - * - * @param request The request containing the {@link org.elasticsearch.client.ml.datafeed.DatafeedUpdate} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable updateDatafeedAsync( - UpdateDatafeedRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::updateDatafeed, - options, - PutDatafeedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets one or more Machine Learning datafeed configuration info. - * - *

- * For additional info - * see ML GET datafeed documentation - * - * @param request {@link GetDatafeedRequest} Request containing a list of datafeedId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetDatafeedResponse} response object containing - * the {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} objects and the number of jobs found - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetDatafeedResponse getDatafeed(GetDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getDatafeed, - options, - GetDatafeedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets one or more Machine Learning datafeed configuration info, asynchronously. - * - *

- * For additional info - * see ML GET datafeed documentation - * - * @param request {@link GetDatafeedRequest} Request containing a list of datafeedId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified with {@link GetDatafeedResponse} upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getDatafeedAsync(GetDatafeedRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getDatafeed, - options, - GetDatafeedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning Datafeed - *

- * For additional info - * see - * ML Delete Datafeed documentation - * - * @param request The request to delete the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteDatafeed(DeleteDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteDatafeed, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning Datafeed asynchronously and notifies the listener on completion - *

- * For additional info - * see - * ML Delete Datafeed documentation - * - * @param request The request to delete the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteDatafeedAsync( - DeleteDatafeedRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteDatafeed, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Starts the given Machine Learning Datafeed - *

- * For additional info - * see - * ML Start Datafeed documentation - * - * @param request The request to start the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public StartDatafeedResponse startDatafeed(StartDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::startDatafeed, - options, - StartDatafeedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Starts the given Machine Learning Datafeed asynchronously and notifies the listener on completion - *

- * For additional info - * see - * ML Start Datafeed documentation - * - * @param request The request to start the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable startDatafeedAsync( - StartDatafeedRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::startDatafeed, - options, - StartDatafeedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Stops the given Machine Learning Datafeed - *

- * For additional info - * see - * ML Stop Datafeed documentation - * - * @param request The request to stop the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public StopDatafeedResponse stopDatafeed(StopDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::stopDatafeed, - options, - StopDatafeedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Stops the given Machine Learning Datafeed asynchronously and notifies the listener on completion - *

- * For additional info - * see - * ML Stop Datafeed documentation - * - * @param request The request to stop the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable stopDatafeedAsync( - StopDatafeedRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::stopDatafeed, - options, - StopDatafeedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets statistics for one or more Machine Learning datafeeds - *

- * For additional info - * see Get datafeed stats docs - * - * @param request {@link GetDatafeedStatsRequest} Request containing a list of datafeedId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetDatafeedStatsResponse} response object containing - * the {@link org.elasticsearch.client.ml.datafeed.DatafeedStats} objects and the number of datafeeds found - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetDatafeedStatsResponse getDatafeedStats(GetDatafeedStatsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getDatafeedStats, - options, - GetDatafeedStatsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Previews the given Machine Learning Datafeed - *

- * For additional info - * see - * ML Preview Datafeed documentation - * - * @param request The request to preview the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link PreviewDatafeedResponse} object containing a {@link org.elasticsearch.common.bytes.BytesReference} of the data in - * JSON format - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PreviewDatafeedResponse previewDatafeed(PreviewDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::previewDatafeed, - options, - PreviewDatafeedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets statistics for one or more Machine Learning datafeeds, asynchronously. - *

- * For additional info - * see Get datafeed stats docs - * - * @param request {@link GetDatafeedStatsRequest} Request containing a list of datafeedId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified with {@link GetDatafeedStatsResponse} upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getDatafeedStatsAsync( - GetDatafeedStatsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getDatafeedStats, - options, - GetDatafeedStatsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Previews the given Machine Learning Datafeed asynchronously and notifies the listener on completion - *

- * For additional info - * see - * ML Preview Datafeed documentation - * - * @param request The request to preview the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable previewDatafeedAsync( - PreviewDatafeedRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::previewDatafeed, - options, - PreviewDatafeedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job} - *

- * For additional info - * see ML Update Job Documentation - * - * @param request the {@link UpdateJobRequest} object enclosing the desired updates - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return a PutJobResponse object containing the updated job object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutJobResponse updateJob(UpdateJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::updateJob, - options, - PutJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job} asynchronously - *

- * For additional info - * see ML Update Job Documentation - * - * @param request the {@link UpdateJobRequest} object enclosing the desired updates - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable updateJobAsync(UpdateJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::updateJob, - options, - PutJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the buckets for a Machine Learning Job. - *

- * For additional info - * see ML GET buckets documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - */ - public GetBucketsResponse getBuckets(GetBucketsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getBuckets, - options, - GetBucketsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the buckets for a Machine Learning Job, notifies listener once the requested buckets are retrieved. - *

- * For additional info - * see ML GET buckets documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getBucketsAsync(GetBucketsRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getBuckets, - options, - GetBucketsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the categories for a Machine Learning Job. - *

- * For additional info - * see - * ML GET categories documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetCategoriesResponse getCategories(GetCategoriesRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getCategories, - options, - GetCategoriesResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the categories for a Machine Learning Job, notifies listener once the requested buckets are retrieved. - *

- * For additional info - * see - * ML GET categories documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getCategoriesAsync( - GetCategoriesRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getCategories, - options, - GetCategoriesResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the snapshots for a Machine Learning Job. - *

- * For additional info - * see - * ML GET model snapshots documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetModelSnapshotsResponse getModelSnapshots(GetModelSnapshotsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getModelSnapshots, - options, - GetModelSnapshotsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the snapshots for a Machine Learning Job, notifies listener once the requested snapshots are retrieved. - *

- * For additional info - * see - * ML GET model snapshots documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getModelSnapshotsAsync( - GetModelSnapshotsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getModelSnapshots, - options, - GetModelSnapshotsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Updates a snapshot for a Machine Learning Job. - *

- * For additional info - * see - * ML UPDATE model snapshots documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public UpdateModelSnapshotResponse updateModelSnapshot(UpdateModelSnapshotRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::updateModelSnapshot, - options, - UpdateModelSnapshotResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Updates a snapshot for a Machine Learning Job, notifies listener once the requested snapshots are retrieved. - *

- * For additional info - * see - * ML UPDATE model snapshots documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable updateModelSnapshotAsync( - UpdateModelSnapshotRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::updateModelSnapshot, - options, - UpdateModelSnapshotResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Upgrades a snapshot for a Machine Learning Job to the current major version. - *

- * For additional info - * see - * ML Upgrade job snapshots documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public UpgradeJobModelSnapshotResponse upgradeJobSnapshot(UpgradeJobModelSnapshotRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::upgradeJobSnapshot, - options, - UpgradeJobModelSnapshotResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Upgrades a snapshot for a Machine Learning Job to the current major version, - * notifies listener once the upgrade has started. - *

- * For additional info - * see - * ML Upgrade job snapshots documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable upgradeJobSnapshotAsync( - UpgradeJobModelSnapshotRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::upgradeJobSnapshot, - options, - UpgradeJobModelSnapshotResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets overall buckets for a set of Machine Learning Jobs. - *

- * For additional info - * see - * ML GET overall buckets documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - */ - public GetOverallBucketsResponse getOverallBuckets(GetOverallBucketsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getOverallBuckets, - options, - GetOverallBucketsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets overall buckets for a set of Machine Learning Jobs, notifies listener once the requested buckets are retrieved. - *

- * For additional info - * see - * ML GET overall buckets documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getOverallBucketsAsync( - GetOverallBucketsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getOverallBuckets, - options, - GetOverallBucketsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the records for a Machine Learning Job. - *

- * For additional info - * see ML GET records documentation - * - * @param request the request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - */ - public GetRecordsResponse getRecords(GetRecordsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getRecords, - options, - GetRecordsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the records for a Machine Learning Job, notifies listener once the requested records are retrieved. - *

- * For additional info - * see ML GET records documentation - * - * @param request the request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getRecordsAsync(GetRecordsRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getRecords, - options, - GetRecordsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Sends data to an anomaly detection job for analysis. - *

- * NOTE: The job must have a state of open to receive and process the data. - *

- * For additional info - * see ML POST Data documentation - * - * @param request PostDataRequest containing the data to post and some additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return response containing operational progress about the job - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PostDataResponse postData(PostDataRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::postData, - options, - PostDataResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Sends data to an anomaly detection job for analysis, asynchronously - *

- * NOTE: The job must have a state of open to receive and process the data. - *

- * For additional info - * see ML POST Data documentation - * - * @param request PostDataRequest containing the data to post and some additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable postDataAsync(PostDataRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::postData, - options, - PostDataResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets a single or multiple calendars. - *

- * For additional info - * see ML GET calendars documentation - * - * @param request The calendars request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetCalendarsResponse} response object containing the {@link org.elasticsearch.client.ml.calendars.Calendar} - * objects and the number of calendars found - */ - public GetCalendarsResponse getCalendars(GetCalendarsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getCalendars, - options, - GetCalendarsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets a single or multiple calendars, notifies listener once the requested records are retrieved. - *

- * For additional info - * see ML GET calendars documentation - * - * @param request The calendars request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getCalendarsAsync( - GetCalendarsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getCalendars, - options, - GetCalendarsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the influencers for a Machine Learning Job. - *

- * For additional info - * see - * ML GET influencers documentation - * - * @param request the request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - */ - public GetInfluencersResponse getInfluencers(GetInfluencersRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getInfluencers, - options, - GetInfluencersResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the influencers for a Machine Learning Job, notifies listener once the requested influencers are retrieved. - *

- * For additional info - * * see - * ML GET influencers documentation - * - * @param request the request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getInfluencersAsync( - GetInfluencersRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getInfluencers, - options, - GetInfluencersResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Create a new machine learning calendar - *

- * For additional info - * see - * ML create calendar documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PutCalendarResponse} containing the calendar - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutCalendarResponse putCalendar(PutCalendarRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putCalendar, - options, - PutCalendarResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Create a new machine learning calendar, notifies listener with the created calendar - *

- * For additional info - * see - * ML create calendar documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putCalendarAsync(PutCalendarRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putCalendar, - options, - PutCalendarResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Adds Machine Learning Job(s) to a calendar - *

- * For additional info - * see - * ML Put calendar job documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PutCalendarResponse} containing the updated calendar - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutCalendarResponse putCalendarJob(PutCalendarJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putCalendarJob, - options, - PutCalendarResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Adds Machine Learning Job(s) to a calendar, notifies listener when completed - *

- * For additional info - * see - * ML Put calendar job documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putCalendarJobAsync( - PutCalendarJobRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putCalendarJob, - options, - PutCalendarResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Removes Machine Learning Job(s) from a calendar - *

- * For additional info - * see - * ML Delete calendar job documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PutCalendarResponse} containing the updated calendar - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutCalendarResponse deleteCalendarJob(DeleteCalendarJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteCalendarJob, - options, - PutCalendarResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Removes Machine Learning Job(s) from a calendar, notifies listener when completed - *

- * For additional info - * see - * ML Delete calendar job documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteCalendarJobAsync( - DeleteCalendarJobRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteCalendarJob, - options, - PutCalendarResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning Calendar - *

- * For additional info see - * - * ML Delete calendar documentation - * - * @param request The request to delete the calendar - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteCalendar(DeleteCalendarRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteCalendar, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning Job asynchronously and notifies the listener on completion - *

- * For additional info see - * - * ML Delete calendar documentation - * - * @param request The request to delete the calendar - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteCalendarAsync( - DeleteCalendarRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteCalendar, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the events for a machine learning calendar - *

- * For additional info - * see - * GET Calendar Events API - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PostCalendarEventRequest} containing the scheduled events - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetCalendarEventsResponse getCalendarEvents(GetCalendarEventsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getCalendarEvents, - options, - GetCalendarEventsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the events for a a machine learning calendar asynchronously, notifies the listener on completion - *

- * For additional info - * see - * GET Calendar Events API - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getCalendarEventsAsync( - GetCalendarEventsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getCalendarEvents, - options, - GetCalendarEventsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Creates new events for a a machine learning calendar - *

- * For additional info - * see - * Add Events to Calendar API - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PostCalendarEventRequest} containing the scheduled events - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PostCalendarEventResponse postCalendarEvent(PostCalendarEventRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::postCalendarEvents, - options, - PostCalendarEventResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates new events for a a machine learning calendar asynchronously, notifies the listener on completion - *

- * For additional info - * see - * Add Events to Calendar API - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable postCalendarEventAsync( - PostCalendarEventRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::postCalendarEvents, - options, - PostCalendarEventResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Removes a Scheduled Event from a calendar - *

- * For additional info - * see - * ML Delete calendar event documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PutCalendarResponse} containing the updated calendar - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteCalendarEvent(DeleteCalendarEventRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteCalendarEvent, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Removes a Scheduled Event from a calendar, notifies listener when completed - *

- * For additional info - * see - * ML Delete calendar event documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteCalendarEventAsync( - DeleteCalendarEventRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteCalendarEvent, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Creates a new Machine Learning Filter - *

- * For additional info - * see ML PUT Filter documentation - * - * @param request The PutFilterRequest containing the {@link org.elasticsearch.client.ml.job.config.MlFilter} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return PutFilterResponse with enclosed {@link org.elasticsearch.client.ml.job.config.MlFilter} object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutFilterResponse putFilter(PutFilterRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putFilter, - options, - PutFilterResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates a new Machine Learning Filter asynchronously and notifies listener on completion - *

- * For additional info - * see ML PUT Filter documentation - * - * @param request The request containing the {@link org.elasticsearch.client.ml.job.config.MlFilter} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putFilterAsync(PutFilterRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putFilter, - options, - PutFilterResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets Machine Learning Filters - *

- * For additional info - * see ML GET Filter documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return GetFilterResponse with enclosed {@link org.elasticsearch.client.ml.job.config.MlFilter} objects - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetFiltersResponse getFilter(GetFiltersRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getFilter, - options, - GetFiltersResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets Machine Learning Filters asynchronously and notifies listener on completion - *

- * For additional info - * see ML GET Filter documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getFilterAsync(GetFiltersRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getFilter, - options, - GetFiltersResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Updates a Machine Learning Filter - *

- * For additional info - * see - * ML Update Filter documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return PutFilterResponse with the updated {@link org.elasticsearch.client.ml.job.config.MlFilter} object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutFilterResponse updateFilter(UpdateFilterRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::updateFilter, - options, - PutFilterResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Updates a Machine Learning Filter asynchronously and notifies listener on completion - *

- * For additional info - * see - * ML Update Filter documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable updateFilterAsync(UpdateFilterRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::updateFilter, - options, - PutFilterResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning filter - *

- * For additional info - * see - * ML Delete Filter documentation - * - * @param request The request to delete the filter - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteFilter(DeleteFilterRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteFilter, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning filter asynchronously and notifies the listener on completion - *

- * For additional info - * see - * ML Delete Filter documentation - * - * @param request The request to delete the filter - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteFilterAsync( - DeleteFilterRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteFilter, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets Machine Learning information about default values and limits. - *

- * For additional info - * see Machine Learning info - * - * @param request The request of Machine Learning info - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return response info about default values and limits - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public MlInfoResponse getMlInfo(MlInfoRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::mlInfo, - options, - MlInfoResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets Machine Learning information about default values and limits, asynchronously. - *

- * For additional info - * see Machine Learning info - * - * @param request The request of Machine Learning info - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getMlInfoAsync(MlInfoRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::mlInfo, - options, - MlInfoResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Sets the ML cluster setting upgrade_mode - *

- * For additional info - * see Set Upgrade Mode - * - * @param request The request to set upgrade mode - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return response - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse setUpgradeMode(SetUpgradeModeRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::setUpgradeMode, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Sets the ML cluster setting upgrade_mode asynchronously - *

- * For additional info - * see Set Upgrade Mode - * - * @param request The request of Machine Learning info - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable setUpgradeModeAsync( - SetUpgradeModeRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::setUpgradeMode, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Estimate the model memory an analysis config is likely to need given supplied field cardinalities - *

- * For additional info - * see Estimate Model Memory - * - * @param request The {@link EstimateModelMemoryRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link EstimateModelMemoryResponse} response object - */ - public EstimateModelMemoryResponse estimateModelMemory(EstimateModelMemoryRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::estimateModelMemory, - options, - EstimateModelMemoryResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Estimate the model memory an analysis config is likely to need given supplied field cardinalities and notifies listener upon - * completion - *

- * For additional info - * see Estimate Model Memory - * - * @param request The {@link EstimateModelMemoryRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable estimateModelMemoryAsync( - EstimateModelMemoryRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::estimateModelMemory, - options, - EstimateModelMemoryResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Creates a new Data Frame Analytics config - *

- * For additional info - * see - * PUT Data Frame Analytics documentation - * - * @param request The {@link PutDataFrameAnalyticsRequest} containing the - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PutDataFrameAnalyticsResponse} containing the created - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig} - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutDataFrameAnalyticsResponse putDataFrameAnalytics(PutDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putDataFrameAnalytics, - options, - PutDataFrameAnalyticsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates a new Data Frame Analytics config asynchronously and notifies listener upon completion - *

- * For additional info - * see - * PUT Data Frame Analytics documentation - * - * @param request The {@link PutDataFrameAnalyticsRequest} containing the - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putDataFrameAnalyticsAsync( - PutDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putDataFrameAnalytics, - options, - PutDataFrameAnalyticsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Updates a Data Frame Analytics config - *

- * For additional info - * see - * PUT Data Frame Analytics documentation - * - * @param request The {@link UpdateDataFrameAnalyticsRequest} containing the - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfigUpdate} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PutDataFrameAnalyticsResponse} containing the updated - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig} - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutDataFrameAnalyticsResponse updateDataFrameAnalytics(UpdateDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::updateDataFrameAnalytics, - options, - PutDataFrameAnalyticsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Updates a Data Frame Analytics config asynchronously and notifies listener upon completion - *

- * For additional info - * see - * Update Data Frame Analytics documentation - * - * @param request The {@link UpdateDataFrameAnalyticsRequest} containing the - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfigUpdate} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable updateDataFrameAnalyticsAsync( - UpdateDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::updateDataFrameAnalytics, - options, - PutDataFrameAnalyticsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets a single or multiple Data Frame Analytics configs - *

- * For additional info - * see - * GET Data Frame Analytics documentation - * - * @param request The {@link GetDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetDataFrameAnalyticsResponse} response object containing the - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig} objects - */ - public GetDataFrameAnalyticsResponse getDataFrameAnalytics(GetDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getDataFrameAnalytics, - options, - GetDataFrameAnalyticsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets a single or multiple Data Frame Analytics configs asynchronously and notifies listener upon completion - *

- * For additional info - * see - * GET Data Frame Analytics documentation - * - * @param request The {@link GetDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getDataFrameAnalyticsAsync( - GetDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getDataFrameAnalytics, - options, - GetDataFrameAnalyticsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the running statistics of a Data Frame Analytics - *

- * For additional info - * see - * GET Data Frame Analytics Stats documentation - * - * @param request The {@link GetDataFrameAnalyticsStatsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetDataFrameAnalyticsStatsResponse} response object - */ - public GetDataFrameAnalyticsStatsResponse getDataFrameAnalyticsStats(GetDataFrameAnalyticsStatsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getDataFrameAnalyticsStats, - options, - GetDataFrameAnalyticsStatsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the running statistics of a Data Frame Analytics asynchronously and notifies listener upon completion - *

- * For additional info - * see - * GET Data Frame Analytics Stats documentation - * - * @param request The {@link GetDataFrameAnalyticsStatsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getDataFrameAnalyticsStatsAsync( - GetDataFrameAnalyticsStatsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getDataFrameAnalyticsStats, - options, - GetDataFrameAnalyticsStatsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Starts Data Frame Analytics - *

- * For additional info - * see - * Start Data Frame Analytics documentation - * - * @param request The {@link StartDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public StartDataFrameAnalyticsResponse startDataFrameAnalytics(StartDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::startDataFrameAnalytics, - options, - StartDataFrameAnalyticsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Starts Data Frame Analytics asynchronously and notifies listener upon completion - *

- * For additional info - * see - * Start Data Frame Analytics documentation - * - * @param request The {@link StartDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable startDataFrameAnalyticsAsync( - StartDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::startDataFrameAnalytics, - options, - StartDataFrameAnalyticsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Stops Data Frame Analytics - *

- * For additional info - * see - * Stop Data Frame Analytics documentation - * - * @param request The {@link StopDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link StopDataFrameAnalyticsResponse} - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public StopDataFrameAnalyticsResponse stopDataFrameAnalytics(StopDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::stopDataFrameAnalytics, - options, - StopDataFrameAnalyticsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Stops Data Frame Analytics asynchronously and notifies listener upon completion - *

- * For additional info - * see - * Stop Data Frame Analytics documentation - * - * @param request The {@link StopDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable stopDataFrameAnalyticsAsync( - StopDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::stopDataFrameAnalytics, - options, - StopDataFrameAnalyticsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes the given Data Frame Analytics config - *

- * For additional info - * see - * DELETE Data Frame Analytics documentation - * - * @param request The {@link DeleteDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteDataFrameAnalytics(DeleteDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteDataFrameAnalytics, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes the given Data Frame Analytics config asynchronously and notifies listener upon completion - *

- * For additional info - * see - * DELETE Data Frame Analytics documentation - * - * @param request The {@link DeleteDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteDataFrameAnalyticsAsync( - DeleteDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteDataFrameAnalytics, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Evaluates the given Data Frame - *

- * For additional info - * see - * Evaluate Data Frame documentation - * - * @param request The {@link EvaluateDataFrameRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link EvaluateDataFrameResponse} response object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public EvaluateDataFrameResponse evaluateDataFrame(EvaluateDataFrameRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::evaluateDataFrame, - options, - EvaluateDataFrameResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Evaluates the given Data Frame asynchronously and notifies listener upon completion - *

- * For additional info - * see - * Evaluate Data Frame documentation - * - * @param request The {@link EvaluateDataFrameRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable evaluateDataFrameAsync( - EvaluateDataFrameRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::evaluateDataFrame, - options, - EvaluateDataFrameResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Explains the given Data Frame Analytics - *

- * For additional info - * see - * Explain Data Frame Analytics documentation - * - * @param request The {@link ExplainDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link ExplainDataFrameAnalyticsResponse} response object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public ExplainDataFrameAnalyticsResponse explainDataFrameAnalytics(ExplainDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::explainDataFrameAnalytics, - options, - ExplainDataFrameAnalyticsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Explains the given Data Frame Analytics asynchronously and notifies listener upon completion - *

- * For additional info - * see - * Explain Data Frame Analytics documentation - * - * @param request The {@link ExplainDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable explainDataFrameAnalyticsAsync( - ExplainDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::explainDataFrameAnalytics, - options, - ExplainDataFrameAnalyticsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets trained model configs - *

- * For additional info - * see - * GET Trained Model Configs documentation - * - * @param request The {@link GetTrainedModelsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetTrainedModelsResponse} response object - */ - public GetTrainedModelsResponse getTrainedModels(GetTrainedModelsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getTrainedModels, - options, - GetTrainedModelsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets trained model configs asynchronously and notifies listener upon completion - *

- * For additional info - * see - * GET Trained Model Configs documentation - * - * @param request The {@link GetTrainedModelsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getTrainedModelsAsync( - GetTrainedModelsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getTrainedModels, - options, - GetTrainedModelsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Put trained model config - *

- * For additional info - * see - * PUT Trained Model Config documentation - * - * @param request The {@link PutTrainedModelRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link PutTrainedModelResponse} response object - */ - public PutTrainedModelResponse putTrainedModel(PutTrainedModelRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putTrainedModel, - options, - PutTrainedModelResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Put trained model config asynchronously and notifies listener upon completion - *

- * For additional info - * see - * PUT Trained Model Config documentation - * - * @param request The {@link PutTrainedModelRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putTrainedModelAsync( - PutTrainedModelRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putTrainedModel, - options, - PutTrainedModelResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets trained model stats - *

- * For additional info - * see - * GET Trained Model Stats documentation - * - * @param request The {@link GetTrainedModelsStatsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetTrainedModelsStatsResponse} response object - */ - public GetTrainedModelsStatsResponse getTrainedModelsStats(GetTrainedModelsStatsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getTrainedModelsStats, - options, - GetTrainedModelsStatsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets trained model stats asynchronously and notifies listener upon completion - *

- * For additional info - * see - * GET Trained Model Stats documentation - * - * @param request The {@link GetTrainedModelsStatsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getTrainedModelsStatsAsync( - GetTrainedModelsStatsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getTrainedModelsStats, - options, - GetTrainedModelsStatsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes the given Trained Model - *

- * For additional info - * see - * DELETE Trained Model documentation - * - * @param request The {@link DeleteTrainedModelRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteTrainedModel(DeleteTrainedModelRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteTrainedModel, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes the given Trained Model asynchronously and notifies listener upon completion - *

- * For additional info - * see - * DELETE Trained Model documentation - * - * @param request The {@link DeleteTrainedModelRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteTrainedModelAsync( - DeleteTrainedModelRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteTrainedModel, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Creates or reassigns a trained model alias - *

- * For additional info - * see - * Put Trained Model Aliases documentation - * - * @param request The {@link PutTrainedModelAliasRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse putTrainedModelAlias(PutTrainedModelAliasRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putTrainedModelAlias, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates or reassigns a trained model alias asynchronously and notifies listener upon completion - *

- * For additional info - * see - * Put Trained Model Aliases documentation - * - * @param request The {@link PutTrainedModelAliasRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putTrainedModelAliasAsync( - PutTrainedModelAliasRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putTrainedModelAlias, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes a trained model alias - *

- * For additional info - * see - * Delete Trained Model Aliases documentation - * - * @param request The {@link DeleteTrainedModelAliasRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteTrainedModelAlias(DeleteTrainedModelAliasRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteTrainedModelAlias, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes a trained model alias asynchronously and notifies listener upon completion - *

- * For additional info - * see - * Delete Trained Model Aliases documentation - * - * @param request The {@link DeleteTrainedModelAliasRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteTrainedModelAliasAsync( - DeleteTrainedModelAliasRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteTrainedModelAlias, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 3650662d733cf..f030033392123 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -51,8 +51,6 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.client.analytics.InferencePipelineAggregationBuilder; -import org.elasticsearch.client.analytics.ParsedInference; import org.elasticsearch.client.analytics.ParsedStringStats; import org.elasticsearch.client.analytics.ParsedTopMetrics; import org.elasticsearch.client.analytics.StringStatsAggregationBuilder; @@ -276,17 +274,12 @@ public class RestHighLevelClient implements Closeable { private volatile ListenableFuture> versionValidationFuture; private final IndicesClient indicesClient = new IndicesClient(this); - private final ClusterClient clusterClient = new ClusterClient(this); private final IngestClient ingestClient = new IngestClient(this); private final SnapshotClient snapshotClient = new SnapshotClient(this); - private final TasksClient tasksClient = new TasksClient(this); - private final XPackClient xPackClient = new XPackClient(this); - private final MachineLearningClient machineLearningClient = new MachineLearningClient(this); private final SecurityClient securityClient = new SecurityClient(this); private final TransformClient transformClient = new TransformClient(this); private final EqlClient eqlClient = new EqlClient(this); private final SearchableSnapshotsClient searchableSnapshotsClient = new SearchableSnapshotsClient(this); - private final FeaturesClient featuresClient = new FeaturesClient(this); /** * Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the @@ -367,15 +360,6 @@ public final IndicesClient indices() { return indicesClient; } - /** - * Provides a {@link ClusterClient} which can be used to access the Cluster API. - * - * See Cluster API on elastic.co - */ - public final ClusterClient cluster() { - return clusterClient; - } - /** * Provides a {@link IngestClient} which can be used to access the Ingest API. * @@ -394,28 +378,6 @@ public final SnapshotClient snapshot() { return snapshotClient; } - /** - * Provides a {@link TasksClient} which can be used to access the Tasks API. - * - * See Task Management API on elastic.co - */ - public final TasksClient tasks() { - return tasksClient; - } - - /** - * Provides methods for accessing the Elastic Licensed X-Pack Info - * and Usage APIs that are shipped with the default distribution of - * Elasticsearch. All of these APIs will 404 if run against the OSS - * distribution of Elasticsearch. - *

- * See the - * Info APIs on elastic.co for more information. - */ - public final XPackClient xpack() { - return xPackClient; - } - /** * A wrapper for the {@link RestHighLevelClient} that provides methods for accessing the Searchable Snapshots APIs. *

@@ -426,30 +388,6 @@ public SearchableSnapshotsClient searchableSnapshots() { return searchableSnapshotsClient; } - /** - * A wrapper for the {@link RestHighLevelClient} that provides methods for accessing the Searchable Snapshots APIs. - *

- * See the Searchable Snapshots - * APIs on elastic.co for more information. - */ - public FeaturesClient features() { - return featuresClient; - } - - /** - * Provides methods for accessing the Elastic Licensed Machine Learning APIs that - * are shipped with the Elastic Stack distribution of Elasticsearch. All of - * these APIs will 404 if run against the OSS distribution of Elasticsearch. - *

- * See the - * Machine Learning APIs on elastic.co for more information. - * - * @return the client wrapper for making Machine Learning API calls - */ - public MachineLearningClient machineLearning() { - return machineLearningClient; - } - /** * Provides methods for accessing the Elastic Licensed Security APIs that * are shipped with the Elastic Stack distribution of Elasticsearch. All of @@ -2775,7 +2713,6 @@ static List getDefaultNamedXContents() { map.put(CompositeAggregationBuilder.NAME, (p, c) -> ParsedComposite.fromXContent(p, (String) c)); map.put(StringStatsAggregationBuilder.NAME, (p, c) -> ParsedStringStats.PARSER.parse(p, (String) c)); map.put(TopMetricsAggregationBuilder.NAME, (p, c) -> ParsedTopMetrics.PARSER.parse(p, (String) c)); - map.put(InferencePipelineAggregationBuilder.NAME, (p, c) -> ParsedInference.fromXContent(p, (String) (c))); map.put(TimeSeriesAggregationBuilder.NAME, (p, c) -> ParsedTimeSeries.fromXContent(p, (String) (c))); List entries = map.entrySet() .stream() diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java index 83790585ee119..008fe9206ab8f 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java @@ -8,10 +8,6 @@ package org.elasticsearch.client; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.client.security.AuthenticateRequest; -import org.elasticsearch.client.security.AuthenticateResponse; -import org.elasticsearch.client.security.ChangePasswordRequest; import org.elasticsearch.client.security.ClearRealmCacheRequest; import org.elasticsearch.client.security.ClearRealmCacheResponse; import org.elasticsearch.client.security.CreateTokenRequest; @@ -22,10 +18,6 @@ import org.elasticsearch.client.security.DeleteRoleMappingResponse; import org.elasticsearch.client.security.DeleteRoleRequest; import org.elasticsearch.client.security.DeleteRoleResponse; -import org.elasticsearch.client.security.DeleteUserRequest; -import org.elasticsearch.client.security.DeleteUserResponse; -import org.elasticsearch.client.security.DisableUserRequest; -import org.elasticsearch.client.security.EnableUserRequest; import org.elasticsearch.client.security.GetApiKeyRequest; import org.elasticsearch.client.security.GetApiKeyResponse; import org.elasticsearch.client.security.GetRolesRequest; @@ -40,8 +32,6 @@ import org.elasticsearch.client.security.PutRoleMappingResponse; import org.elasticsearch.client.security.PutRoleRequest; import org.elasticsearch.client.security.PutRoleResponse; -import org.elasticsearch.client.security.PutUserRequest; -import org.elasticsearch.client.security.PutUserResponse; import java.io.IOException; @@ -67,45 +57,6 @@ public final class SecurityClient { this.restHighLevelClient = restHighLevelClient; } - /** - * Create/update a user in the native realm synchronously. - * See - * the docs for more. - * - * @param request the request with the user's information - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response from the put user call - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public PutUserResponse putUser(PutUserRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - SecurityRequestConverters::putUser, - options, - PutUserResponse::fromXContent, - emptySet() - ); - } - - /** - * Removes user from the native realm synchronously. - * See - * the docs for more. - * @param request the request with the user to delete - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response from the delete user call - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public DeleteUserResponse deleteUser(DeleteUserRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - SecurityRequestConverters::deleteUser, - options, - DeleteUserResponse::fromXContent, - singleton(404) - ); - } - /** * Create/Update a role mapping. * See @@ -125,85 +76,6 @@ public PutRoleMappingResponse putRoleMapping(final PutRoleMappingRequest request ); } - /** - * Enable a native realm or built-in user synchronously. - * See - * the docs for more. - * - * @param request the request with the user to enable - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@code true} if the request succeeded (the user is enabled) - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public boolean enableUser(EnableUserRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequest( - request, - SecurityRequestConverters::enableUser, - options, - RestHighLevelClient::convertExistsResponse, - emptySet() - ); - } - - /** - * Disable a native realm or built-in user synchronously. - * See - * the docs for more. - * - * @param request the request with the user to disable - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@code true} if the request succeeded (the user is disabled) - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public boolean disableUser(DisableUserRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequest( - request, - SecurityRequestConverters::disableUser, - options, - RestHighLevelClient::convertExistsResponse, - emptySet() - ); - } - - /** - * Disable a native realm or built-in user asynchronously. - * See - * the docs for more. - * - * @param request the request with the user to disable - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable disableUserAsync(DisableUserRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsync( - request, - SecurityRequestConverters::disableUser, - options, - RestHighLevelClient::convertExistsResponse, - listener, - emptySet() - ); - } - - /** - * Authenticate the current user and return all the information about the authenticated user. - * See - * the docs for more. - * - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the responsee from the authenticate user call - */ - public AuthenticateResponse authenticate(RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - AuthenticateRequest.INSTANCE, - AuthenticateRequest::getRequest, - options, - AuthenticateResponse::fromXContent, - emptySet() - ); - } - /** * Clears the cache in one or more realms. * See @@ -224,26 +96,6 @@ public ClearRealmCacheResponse clearRealmCache(ClearRealmCacheRequest request, R ); } - /** - * Change the password of a user of a native realm or built-in user synchronously. - * See - * the docs for more. - * - * @param request the request with the user's new password - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@code true} if the request succeeded (the new password was set) - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public boolean changePassword(ChangePasswordRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequest( - request, - SecurityRequestConverters::changePassword, - options, - RestHighLevelClient::convertExistsResponse, - emptySet() - ); - } - /** * Delete a role mapping. * See diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java index 8adcbe71e288a..fe3988d79cf7c 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java @@ -12,15 +12,11 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; -import org.elasticsearch.client.security.ChangePasswordRequest; import org.elasticsearch.client.security.ClearRealmCacheRequest; import org.elasticsearch.client.security.CreateTokenRequest; import org.elasticsearch.client.security.DelegatePkiAuthenticationRequest; import org.elasticsearch.client.security.DeleteRoleMappingRequest; import org.elasticsearch.client.security.DeleteRoleRequest; -import org.elasticsearch.client.security.DeleteUserRequest; -import org.elasticsearch.client.security.DisableUserRequest; -import org.elasticsearch.client.security.EnableUserRequest; import org.elasticsearch.client.security.GetApiKeyRequest; import org.elasticsearch.client.security.GetRolesRequest; import org.elasticsearch.client.security.InvalidateApiKeyRequest; @@ -28,8 +24,6 @@ import org.elasticsearch.client.security.PutPrivilegesRequest; import org.elasticsearch.client.security.PutRoleMappingRequest; import org.elasticsearch.client.security.PutRoleRequest; -import org.elasticsearch.client.security.PutUserRequest; -import org.elasticsearch.client.security.SetUserEnabledRequest; import org.elasticsearch.common.Strings; import java.io.IOException; @@ -41,42 +35,6 @@ final class SecurityRequestConverters { private SecurityRequestConverters() {} - static Request changePassword(ChangePasswordRequest changePasswordRequest) throws IOException { - String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_security/user") - .addPathPart(changePasswordRequest.getUsername()) - .addPathPartAsIs("_password") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(changePasswordRequest, REQUEST_BODY_CONTENT_TYPE)); - RequestConverters.Params params = new RequestConverters.Params(); - params.withRefreshPolicy(changePasswordRequest.getRefreshPolicy()); - request.addParameters(params.asMap()); - return request; - } - - static Request putUser(PutUserRequest putUserRequest) throws IOException { - String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_security/user") - .addPathPart(putUserRequest.getUser().getUsername()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putUserRequest, REQUEST_BODY_CONTENT_TYPE)); - RequestConverters.Params params = new RequestConverters.Params(); - params.withRefreshPolicy(putUserRequest.getRefreshPolicy()); - request.addParameters(params.asMap()); - return request; - } - - static Request deleteUser(DeleteUserRequest deleteUserRequest) { - String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_security", "user") - .addPathPart(deleteUserRequest.getName()) - .build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - params.withRefreshPolicy(deleteUserRequest.getRefreshPolicy()); - request.addParameters(params.asMap()); - return request; - } - static Request putRoleMapping(final PutRoleMappingRequest putRoleMappingRequest) throws IOException { final String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_security/role_mapping") .addPathPart(putRoleMappingRequest.getName()) @@ -89,26 +47,6 @@ static Request putRoleMapping(final PutRoleMappingRequest putRoleMappingRequest) return request; } - static Request enableUser(EnableUserRequest enableUserRequest) { - return setUserEnabled(enableUserRequest); - } - - static Request disableUser(DisableUserRequest disableUserRequest) { - return setUserEnabled(disableUserRequest); - } - - private static Request setUserEnabled(SetUserEnabledRequest setUserEnabledRequest) { - String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_security/user") - .addPathPart(setUserEnabledRequest.getUsername()) - .addPathPart(setUserEnabledRequest.isEnabled() ? "_enable" : "_disable") - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - params.withRefreshPolicy(setUserEnabledRequest.getRefreshPolicy()); - request.addParameters(params.asMap()); - return request; - } - static Request clearRealmCache(ClearRealmCacheRequest clearRealmCacheRequest) { RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder().addPathPartAsIs("_security/realm"); if (clearRealmCacheRequest.getRealms().isEmpty() == false) { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksClient.java deleted file mode 100644 index 83f2185f5104b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksClient.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -import org.elasticsearch.client.tasks.CancelTasksRequest; -import org.elasticsearch.client.tasks.CancelTasksResponse; -import org.elasticsearch.client.tasks.GetTaskRequest; -import org.elasticsearch.client.tasks.GetTaskResponse; - -import java.io.IOException; -import java.util.Optional; - -import static java.util.Collections.emptySet; - -/** - * A wrapper for the {@link RestHighLevelClient} that provides methods for accessing the Tasks API. - *

- * See Task Management API on elastic.co - * - * @deprecated The High Level Rest Client is deprecated in favor of the - * - * Elasticsearch Java API Client - */ -@Deprecated(since = "7.16.0", forRemoval = true) -@SuppressWarnings("removal") -public final class TasksClient { - private final RestHighLevelClient restHighLevelClient; - - TasksClient(RestHighLevelClient restHighLevelClient) { - this.restHighLevelClient = restHighLevelClient; - } - - /** - * Get current tasks using the Task Management API. - * See - * Task Management API on elastic.co - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public ListTasksResponse list(ListTasksRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - TasksRequestConverters::listTasks, - options, - ListTasksResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously get current tasks using the Task Management API. - * See - * Task Management API on elastic.co - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable listAsync(ListTasksRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - TasksRequestConverters::listTasks, - options, - ListTasksResponse::fromXContent, - listener, - emptySet() - ); - } - - /** - * Get a task using the Task Management API. - * See - * Task Management API on elastic.co - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public Optional get(GetTaskRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseOptionalEntity( - request, - TasksRequestConverters::getTask, - options, - GetTaskResponse::fromXContent - ); - } - - /** - * Get a task using the Task Management API. - * See - * Task Management API on elastic.co - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener an actionlistener that takes an optional response (404s are returned as an empty Optional) - * @return cancellable that may be used to cancel the request - */ - public Cancellable getAsync(GetTaskRequest request, RequestOptions options, ActionListener> listener) { - - return restHighLevelClient.performRequestAsyncAndParseOptionalEntity( - request, - TasksRequestConverters::getTask, - options, - GetTaskResponse::fromXContent, - listener - ); - } - - /** - * Cancel one or more cluster tasks using the Task Management API. - * - * See - * Task Management API on elastic.co - * @param cancelTasksRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * - */ - public CancelTasksResponse cancel(CancelTasksRequest cancelTasksRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - cancelTasksRequest, - TasksRequestConverters::cancelTasks, - options, - CancelTasksResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously cancel one or more cluster tasks using the Task Management API. - * - * See - * Task Management API on elastic.co - * @param cancelTasksRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable cancelAsync( - CancelTasksRequest cancelTasksRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - cancelTasksRequest, - TasksRequestConverters::cancelTasks, - options, - CancelTasksResponse::fromXContent, - listener, - emptySet() - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksRequestConverters.java deleted file mode 100644 index 54525a8cd304d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksRequestConverters.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client; - -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; -import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; -import org.elasticsearch.client.RequestConverters.EndpointBuilder; -import org.elasticsearch.client.tasks.CancelTasksRequest; -import org.elasticsearch.client.tasks.GetTaskRequest; - -final class TasksRequestConverters { - - private TasksRequestConverters() {} - - static Request cancelTasks(CancelTasksRequest req) { - Request request = new Request(HttpPost.METHOD_NAME, "/_tasks/_cancel"); - RequestConverters.Params params = new RequestConverters.Params(); - req.getTimeout().ifPresent(params::withTimeout); - req.getTaskId().ifPresent(params::withTaskId); - req.getParentTaskId().ifPresent(params::withParentTaskId); - params.withNodes(req.getNodes()).withActions(req.getActions()); - if (req.getWaitForCompletion() != null) { - params.withWaitForCompletion(req.getWaitForCompletion()); - } - request.addParameters(params.asMap()); - return request; - } - - static Request listTasks(ListTasksRequest listTaskRequest) { - if (listTaskRequest.getTargetTaskId() != null && listTaskRequest.getTargetTaskId().isSet()) { - throw new IllegalArgumentException("TargetTaskId cannot be used for list tasks request"); - } - Request request = new Request(HttpGet.METHOD_NAME, "/_tasks"); - RequestConverters.Params params = new RequestConverters.Params(); - params.withTimeout(listTaskRequest.getTimeout()) - .withDetailed(listTaskRequest.getDetailed()) - .withWaitForCompletion(listTaskRequest.getWaitForCompletion()) - .withParentTaskId(listTaskRequest.getTargetParentTaskId()) - .withNodes(listTaskRequest.getNodes()) - .withActions(listTaskRequest.getActions()) - .putParam("group_by", "none"); - request.addParameters(params.asMap()); - return request; - } - - static Request getTask(GetTaskRequest getTaskRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_tasks") - .addPathPartAsIs(getTaskRequest.getNodeId() + ":" + Long.toString(getTaskRequest.getTaskId())) - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - params.withTimeout(getTaskRequest.getTimeout()).withWaitForCompletion(getTaskRequest.getWaitForCompletion()); - request.addParameters(params.asMap()); - return request; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java deleted file mode 100644 index f019a262b607a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.client.xpack.XPackInfoRequest; -import org.elasticsearch.client.xpack.XPackInfoResponse; -import org.elasticsearch.client.xpack.XPackUsageRequest; -import org.elasticsearch.client.xpack.XPackUsageResponse; - -import java.io.IOException; - -import static java.util.Collections.emptySet; - -/** - * A wrapper for the {@link RestHighLevelClient} that provides methods for - * accessing the Elastic Licensed X-Pack APIs that are shipped with the - * default distribution of Elasticsearch. All of these APIs will 404 if run - * against the OSS distribution of Elasticsearch. - *

- * See the - * REST APIs on elastic.co for more information. - * - * @deprecated The High Level Rest Client is deprecated in favor of the - * - * Elasticsearch Java API Client - */ -@Deprecated(since = "7.16.0", forRemoval = true) -@SuppressWarnings("removal") -public final class XPackClient { - - private final RestHighLevelClient restHighLevelClient; - - XPackClient(RestHighLevelClient restHighLevelClient) { - this.restHighLevelClient = restHighLevelClient; - } - - /** - * Fetch information about X-Pack from the cluster. - * See - * the docs for more. - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public XPackInfoResponse info(XPackInfoRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - XPackRequestConverters::info, - options, - XPackInfoResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously fetch information about X-Pack from the cluster. - * See - * the docs for more. - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable infoAsync(XPackInfoRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - XPackRequestConverters::info, - options, - XPackInfoResponse::fromXContent, - listener, - emptySet() - ); - } - - /** - * Fetch usage information about X-Pack features from the cluster. - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public XPackUsageResponse usage(XPackUsageRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - XPackRequestConverters::usage, - options, - XPackUsageResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously fetch usage information about X-Pack features from the cluster. - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable usageAsync(XPackUsageRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - XPackRequestConverters::usage, - options, - XPackUsageResponse::fromXContent, - listener, - emptySet() - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/InferencePipelineAggregationBuilder.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/InferencePipelineAggregationBuilder.java deleted file mode 100644 index 47e1eb00bbd7f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/InferencePipelineAggregationBuilder.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.analytics; - -import org.elasticsearch.client.ml.inference.trainedmodel.InferenceConfig; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Map; -import java.util.Objects; -import java.util.TreeMap; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * For building inference pipeline aggregations - * - * NOTE: This extends {@linkplain AbstractPipelineAggregationBuilder} for compatibility - * with {@link SearchSourceBuilder#aggregation(PipelineAggregationBuilder)} but it - * doesn't support any "server" side things like {@linkplain #doWriteTo(StreamOutput)} - * or {@linkplain #createInternal(Map)} - */ -public class InferencePipelineAggregationBuilder extends AbstractPipelineAggregationBuilder { - - public static String NAME = "inference"; - - public static final ParseField MODEL_ID = new ParseField("model_id"); - private static final ParseField INFERENCE_CONFIG = new ParseField("inference_config"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - false, - (args, name) -> new InferencePipelineAggregationBuilder(name, (String) args[0], (Map) args[1]) - ); - - static { - PARSER.declareString(constructorArg(), MODEL_ID); - PARSER.declareObject(constructorArg(), (p, c) -> p.mapStrings(), BUCKETS_PATH_FIELD); - PARSER.declareNamedObject( - InferencePipelineAggregationBuilder::setInferenceConfig, - (p, c, n) -> p.namedObject(InferenceConfig.class, n, c), - INFERENCE_CONFIG - ); - } - - private final Map bucketPathMap; - private final String modelId; - private InferenceConfig inferenceConfig; - - public static InferencePipelineAggregationBuilder parse(String pipelineAggregatorName, XContentParser parser) { - return PARSER.apply(parser, pipelineAggregatorName); - } - - public InferencePipelineAggregationBuilder(String name, String modelId, Map bucketsPath) { - super(name, NAME, new TreeMap<>(bucketsPath).values().toArray(new String[] {})); - this.modelId = modelId; - this.bucketPathMap = bucketsPath; - } - - public void setInferenceConfig(InferenceConfig inferenceConfig) { - this.inferenceConfig = inferenceConfig; - } - - @Override - protected void validate(ValidationContext context) { - // validation occurs on the server - } - - @Override - protected void doWriteTo(StreamOutput out) { - throw new UnsupportedOperationException(); - } - - @Override - protected PipelineAggregator createInternal(Map metaData) { - throw new UnsupportedOperationException(); - } - - @Override - protected boolean overrideBucketsPath() { - return true; - } - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(MODEL_ID.getPreferredName(), modelId); - builder.field(BUCKETS_PATH_FIELD.getPreferredName(), bucketPathMap); - if (inferenceConfig != null) { - builder.startObject(INFERENCE_CONFIG.getPreferredName()); - builder.field(inferenceConfig.getName(), inferenceConfig); - builder.endObject(); - } - return builder; - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), bucketPathMap, modelId, inferenceConfig); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null || getClass() != obj.getClass()) return false; - if (super.equals(obj) == false) return false; - - InferencePipelineAggregationBuilder other = (InferencePipelineAggregationBuilder) obj; - return Objects.equals(bucketPathMap, other.bucketPathMap) - && Objects.equals(modelId, other.modelId) - && Objects.equals(inferenceConfig, other.inferenceConfig); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedInference.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedInference.java deleted file mode 100644 index d3e1fcd5c85f0..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedInference.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.analytics; - -import org.elasticsearch.client.ml.inference.results.FeatureImportance; -import org.elasticsearch.client.ml.inference.results.TopClassEntry; -import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParseException; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * This class parses the superset of all possible fields that may be written by - * InferenceResults. The warning field is mutually exclusive with all the other fields. - * - * In the case of classification results {@link #getValue()} may return a String, - * Boolean or a Double. For regression results {@link #getValue()} is always - * a Double. - */ -public class ParsedInference extends ParsedAggregation { - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - ParsedInference.class.getSimpleName(), - true, - args -> new ParsedInference(args[0], (List) args[1], (List) args[2], (String) args[3]) - ); - - public static final ParseField FEATURE_IMPORTANCE = new ParseField("feature_importance"); - public static final ParseField WARNING = new ParseField("warning"); - public static final ParseField TOP_CLASSES = new ParseField("top_classes"); - - static { - PARSER.declareField(optionalConstructorArg(), (p, n) -> { - Object o; - XContentParser.Token token = p.currentToken(); - if (token == XContentParser.Token.VALUE_STRING) { - o = p.text(); - } else if (token == XContentParser.Token.VALUE_BOOLEAN) { - o = p.booleanValue(); - } else if (token == XContentParser.Token.VALUE_NUMBER) { - o = p.doubleValue(); - } else { - throw new XContentParseException( - p.getTokenLocation(), - "[" - + ParsedInference.class.getSimpleName() - + "] failed to parse field [" - + CommonFields.VALUE - + "] " - + "value [" - + token - + "] is not a string, boolean or number" - ); - } - return o; - }, CommonFields.VALUE, ObjectParser.ValueType.VALUE); - PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> FeatureImportance.fromXContent(p), FEATURE_IMPORTANCE); - PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> TopClassEntry.fromXContent(p), TOP_CLASSES); - PARSER.declareString(optionalConstructorArg(), WARNING); - declareAggregationFields(PARSER); - } - - public static ParsedInference fromXContent(XContentParser parser, final String name) { - ParsedInference parsed = PARSER.apply(parser, null); - parsed.setName(name); - return parsed; - } - - private final Object value; - private final List featureImportance; - private final List topClasses; - private final String warning; - - ParsedInference(Object value, List featureImportance, List topClasses, String warning) { - this.value = value; - this.warning = warning; - this.featureImportance = featureImportance; - this.topClasses = topClasses; - } - - public Object getValue() { - return value; - } - - public List getFeatureImportance() { - return featureImportance; - } - - public List getTopClasses() { - return topClasses; - } - - public String getWarning() { - return warning; - } - - @Override - protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - if (warning != null) { - builder.field(WARNING.getPreferredName(), warning); - } else { - builder.field(CommonFields.VALUE.getPreferredName(), value); - if (topClasses != null && topClasses.size() > 0) { - builder.field(TOP_CLASSES.getPreferredName(), topClasses); - } - if (featureImportance != null && featureImportance.size() > 0) { - builder.field(FEATURE_IMPORTANCE.getPreferredName(), featureImportance); - } - } - return builder; - } - - @Override - public String getType() { - return InferencePipelineAggregationBuilder.NAME; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/GetFeaturesRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/GetFeaturesRequest.java deleted file mode 100644 index 313a118ba4f00..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/GetFeaturesRequest.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.feature; - -import org.elasticsearch.client.TimedRequest; - -/** - * A {@link TimedRequest} to get the list of features available to be included in snapshots in the cluster. - */ -public class GetFeaturesRequest extends TimedRequest {} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/GetFeaturesResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/GetFeaturesResponse.java deleted file mode 100644 index 440444abbbbee..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/GetFeaturesResponse.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.feature; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.List; -import java.util.Objects; - -public class GetFeaturesResponse { - - private final List features; - - private static final ParseField FEATURES = new ParseField("features"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "snapshottable_features_response", - true, - (a, ctx) -> new GetFeaturesResponse((List) a[0]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), SnapshottableFeature::parse, FEATURES); - } - - public GetFeaturesResponse(List features) { - this.features = features; - } - - public List getFeatures() { - return features; - } - - public static GetFeaturesResponse parse(XContentParser parser) { - return PARSER.apply(parser, null); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof GetFeaturesResponse) == false) return false; - GetFeaturesResponse that = (GetFeaturesResponse) o; - return getFeatures().equals(that.getFeatures()); - } - - @Override - public int hashCode() { - return Objects.hash(getFeatures()); - } - - public static class SnapshottableFeature { - - private final String featureName; - private final String description; - - private static final ParseField FEATURE_NAME = new ParseField("name"); - private static final ParseField DESCRIPTION = new ParseField("description"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "feature", - true, - (a, ctx) -> new SnapshottableFeature((String) a[0], (String) a[1]) - ); - - static { - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> p.text(), FEATURE_NAME, ObjectParser.ValueType.STRING); - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> p.text(), DESCRIPTION, ObjectParser.ValueType.STRING); - } - - public SnapshottableFeature(String featureName, String description) { - this.featureName = featureName; - this.description = description; - } - - public static SnapshottableFeature parse(XContentParser parser, Void ctx) { - return PARSER.apply(parser, ctx); - } - - public String getFeatureName() { - return featureName; - } - - public String getDescription() { - return description; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof SnapshottableFeature) == false) return false; - SnapshottableFeature feature = (SnapshottableFeature) o; - return Objects.equals(getFeatureName(), feature.getFeatureName()); - } - - @Override - public int hashCode() { - return Objects.hash(getFeatureName()); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/ResetFeaturesRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/ResetFeaturesRequest.java deleted file mode 100644 index 5bc2565c24b17..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/ResetFeaturesRequest.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.feature; - -import org.elasticsearch.client.TimedRequest; - -public class ResetFeaturesRequest extends TimedRequest {} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/ResetFeaturesResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/ResetFeaturesResponse.java deleted file mode 100644 index c3fca66724138..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/ResetFeaturesResponse.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.feature; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.List; -import java.util.Objects; - -/** - * This class represents the response of the Feature State Reset API. It is a - * list containing the response of every feature whose state can be reset. The - * response from each feature will indicate success or failure. In the case of a - * failure, the cause will be returned as well. - */ -public class ResetFeaturesResponse { - private final List features; - - private static final ParseField FEATURES = new ParseField("features"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "features_reset_status_response", - true, - (a, ctx) -> new ResetFeaturesResponse((List) a[0]) - ); - - static { - PARSER.declareObjectArray( - ConstructingObjectParser.constructorArg(), - ResetFeaturesResponse.ResetFeatureStateStatus::parse, - FEATURES - ); - } - - /** - * Create a new ResetFeaturesResponse - * @param features A full list of status responses from individual feature reset operations. - */ - public ResetFeaturesResponse(List features) { - this.features = features; - } - - /** - * @return List containing a reset status for each feature that we have tried to reset. - */ - public List getFeatureResetStatuses() { - return features; - } - - public static ResetFeaturesResponse parse(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** - * A class representing the status of an attempt to reset a feature's state. - * The attempt to reset either succeeds and we return the name of the - * feature and a success flag; or it fails and we return the name of the feature, - * a status flag, and the exception thrown during the attempt to reset the feature. - */ - public static class ResetFeatureStateStatus { - private final String featureName; - private final String status; - private final Exception exception; - - private static final ParseField FEATURE_NAME = new ParseField("feature_name"); - private static final ParseField STATUS = new ParseField("status"); - private static final ParseField EXCEPTION = new ParseField("exception"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "feature_state_reset_stats", - true, - (a, ctx) -> new ResetFeatureStateStatus((String) a[0], (String) a[1], (ElasticsearchException) a[2]) - ); - - static { - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> p.text(), FEATURE_NAME, ObjectParser.ValueType.STRING); - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> p.text(), STATUS, ObjectParser.ValueType.STRING); - PARSER.declareObject( - ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> ElasticsearchException.fromXContent(p), - EXCEPTION - ); - } - - /** - * Create a ResetFeatureStateStatus. - * @param featureName Name of the feature whose status has been reset. - * @param status Whether the reset attempt succeeded or failed. - * @param exception If the reset attempt failed, the exception that caused the - * failure. Must be null when status is "SUCCESS". - */ - ResetFeatureStateStatus(String featureName, String status, @Nullable Exception exception) { - this.featureName = featureName; - assert "SUCCESS".equals(status) || "FAILURE".equals(status); - this.status = status; - assert "FAILURE".equals(status) ? Objects.nonNull(exception) : Objects.isNull(exception); - this.exception = exception; - } - - public static ResetFeatureStateStatus parse(XContentParser parser, Void ctx) { - return PARSER.apply(parser, ctx); - } - - /** - * @return Name of the feature that we tried to reset - */ - public String getFeatureName() { - return featureName; - } - - /** - * @return "SUCCESS" if the reset attempt succeeded, "FAILURE" otherwise. - */ - public String getStatus() { - return status; - } - - /** - * @return The exception that caused the reset attempt to fail. - */ - @Nullable - public Exception getException() { - return exception; - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/AbstractResultResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/AbstractResultResponse.java deleted file mode 100644 index 94e42fc0c8b0a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/AbstractResultResponse.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -/** - * Abstract class that provides a list of results and their count. - */ -public abstract class AbstractResultResponse implements ToXContentObject { - - public static final ParseField COUNT = new ParseField("count"); - - private final ParseField resultsField; - protected final List results; - protected final long count; - - AbstractResultResponse(ParseField resultsField, List results, long count) { - this.resultsField = Objects.requireNonNull(resultsField, "[results_field] must not be null"); - this.results = Collections.unmodifiableList(results); - this.count = count; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(COUNT.getPreferredName(), count); - builder.field(resultsField.getPreferredName(), results); - builder.endObject(); - return builder; - } - - public long count() { - return count; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobRequest.java deleted file mode 100644 index bd55976d7debb..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobRequest.java +++ /dev/null @@ -1,183 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.security.InvalidParameterException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request to close Machine Learning Jobs - */ -public class CloseJobRequest implements ToXContentObject, Validatable { - - public static final ParseField JOB_ID = new ParseField("job_id"); - public static final ParseField TIMEOUT = new ParseField("timeout"); - public static final ParseField FORCE = new ParseField("force"); - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "close_job_request", - true, - a -> new CloseJobRequest((List) a[0]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())), - JOB_ID, - ObjectParser.ValueType.STRING_ARRAY - ); - PARSER.declareString((obj, val) -> obj.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); - PARSER.declareBoolean(CloseJobRequest::setForce, FORCE); - PARSER.declareBoolean(CloseJobRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - private static final String ALL_JOBS = "_all"; - - private final List jobIds; - private TimeValue timeout; - private Boolean force; - private Boolean allowNoMatch; - - /** - * Explicitly close all jobs - * - * @return a {@link CloseJobRequest} for all existing jobs - */ - public static CloseJobRequest closeAllJobsRequest() { - return new CloseJobRequest(ALL_JOBS); - } - - CloseJobRequest(List jobIds) { - if (jobIds.isEmpty()) { - throw new InvalidParameterException("jobIds must not be empty"); - } - if (jobIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("jobIds must not contain null values"); - } - this.jobIds = new ArrayList<>(jobIds); - } - - /** - * Close the specified Jobs via their unique jobIds - * - * @param jobIds must be non-null and non-empty and each jobId must be non-null - */ - public CloseJobRequest(String... jobIds) { - this(Arrays.asList(jobIds)); - } - - /** - * All the jobIds to be closed - */ - public List getJobIds() { - return jobIds; - } - - public TimeValue getTimeout() { - return timeout; - } - - /** - * How long to wait for the close request to complete before timing out. - * - * @param timeout Default value: 30 minutes - */ - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - public Boolean getForce() { - return force; - } - - /** - * Should the closing be forced. - * - * Use to close a failed job, or to forcefully close a job which has not responded to its initial close request. - * - * @param force When {@code true} forcefully close the job. Defaults to {@code false} - */ - public void setForce(boolean force) { - this.force = force; - } - - public Boolean getAllowNoMatch() { - return this.allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no jobs. - * - * This includes {@code _all} string or when no jobs have been specified - * - * @param allowNoMatch When {@code true} ignore if wildcard or {@code _all} matches no jobs. Defaults to {@code true} - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - @Override - public int hashCode() { - return Objects.hash(jobIds, timeout, force, allowNoMatch); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - CloseJobRequest that = (CloseJobRequest) other; - return Objects.equals(jobIds, that.jobIds) - && Objects.equals(timeout, that.timeout) - && Objects.equals(force, that.force) - && Objects.equals(allowNoMatch, that.allowNoMatch); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(JOB_ID.getPreferredName(), Strings.collectionToCommaDelimitedString(jobIds)); - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - } - if (force != null) { - builder.field(FORCE.getPreferredName(), force); - } - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - builder.endObject(); - return builder; - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobResponse.java deleted file mode 100644 index 827cd87595ad4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobResponse.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response indicating if the Job(s) closed or not - */ -public class CloseJobResponse implements ToXContentObject { - - private static final ParseField CLOSED = new ParseField("closed"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "close_job_response", - true, - (a) -> new CloseJobResponse((Boolean) a[0]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), CLOSED); - } - - private boolean closed; - - public CloseJobResponse(boolean closed) { - this.closed = closed; - } - - public static CloseJobResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - /** - * Has the job closed or not - * @return boolean value indicating the job closed status - */ - public boolean isClosed() { - return closed; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - CloseJobResponse that = (CloseJobResponse) other; - return isClosed() == that.isClosed(); - } - - @Override - public int hashCode() { - return Objects.hash(isClosed()); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CLOSED.getPreferredName(), closed); - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarEventRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarEventRequest.java deleted file mode 100644 index 5ade66ff71820..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarEventRequest.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -/** - * Request class for removing an event from an existing calendar - */ -public class DeleteCalendarEventRequest implements Validatable { - - private final String eventId; - private final String calendarId; - - /** - * Create a new request referencing an existing Calendar and which event to remove - * from it. - * - * @param calendarId The non-null ID of the calendar - * @param eventId Scheduled Event to remove from the calendar, Cannot be null. - */ - public DeleteCalendarEventRequest(String calendarId, String eventId) { - this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null."); - this.eventId = Objects.requireNonNull(eventId, "[event_id] must not be null."); - } - - public String getEventId() { - return eventId; - } - - public String getCalendarId() { - return calendarId; - } - - @Override - public int hashCode() { - return Objects.hash(eventId, calendarId); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DeleteCalendarEventRequest that = (DeleteCalendarEventRequest) other; - return Objects.equals(eventId, that.eventId) && Objects.equals(calendarId, that.calendarId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarJobRequest.java deleted file mode 100644 index fff975334d95c..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarJobRequest.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.security.InvalidParameterException; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request class for removing Machine Learning Jobs from an existing calendar - */ -public class DeleteCalendarJobRequest implements Validatable { - - private final List jobIds; - private final String calendarId; - - /** - * Create a new request referencing an existing Calendar and which JobIds to remove - * from it. - * - * @param calendarId The non-null ID of the calendar - * @param jobIds JobIds to remove from the calendar, cannot be empty, or contain null values. - * It can be a list of jobs or groups. - */ - public DeleteCalendarJobRequest(String calendarId, String... jobIds) { - this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null."); - if (jobIds.length == 0) { - throw new InvalidParameterException("jobIds must not be empty."); - } - if (Arrays.stream(jobIds).anyMatch(Objects::isNull)) { - throw new NullPointerException("jobIds must not contain null values."); - } - this.jobIds = Arrays.asList(jobIds); - } - - public List getJobIds() { - return jobIds; - } - - public String getCalendarId() { - return calendarId; - } - - @Override - public int hashCode() { - return Objects.hash(jobIds, calendarId); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DeleteCalendarJobRequest that = (DeleteCalendarJobRequest) other; - return Objects.equals(jobIds, that.jobIds) && Objects.equals(calendarId, that.calendarId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarRequest.java deleted file mode 100644 index 8777d202529f6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarRequest.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -/** - * Request to delete a Machine Learning Calendar - */ -public class DeleteCalendarRequest implements Validatable { - - private final String calendarId; - - /** - * The constructor requires a single calendar id. - * @param calendarId The calendar to delete. Must be {@code non-null} - */ - public DeleteCalendarRequest(String calendarId) { - this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null"); - } - - public String getCalendarId() { - return calendarId; - } - - @Override - public int hashCode() { - return Objects.hash(calendarId); - } - - @Override - public boolean equals(Object obj) { - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - DeleteCalendarRequest other = (DeleteCalendarRequest) obj; - return Objects.equals(calendarId, other.calendarId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDataFrameAnalyticsRequest.java deleted file mode 100644 index ec19611ec58ae..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.core.TimeValue; - -import java.util.Objects; -import java.util.Optional; - -/** - * Request to delete a data frame analytics config - */ -public class DeleteDataFrameAnalyticsRequest implements Validatable { - - private final String id; - private Boolean force; - private TimeValue timeout; - - public DeleteDataFrameAnalyticsRequest(String id) { - this.id = id; - } - - public String getId() { - return id; - } - - public Boolean getForce() { - return force; - } - - /** - * Used to forcefully delete an job that is not stopped. - * This method is quicker than stopping and deleting the job. - * - * @param force When {@code true} forcefully delete a non stopped job. Defaults to {@code false} - */ - public void setForce(Boolean force) { - this.force = force; - } - - public TimeValue getTimeout() { - return timeout; - } - - /** - * Sets the time to wait until the job is deleted. - * - * @param timeout The time to wait until the job is deleted. - */ - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - @Override - public Optional validate() { - if (id == null) { - return Optional.of(ValidationException.withError("data frame analytics id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - DeleteDataFrameAnalyticsRequest other = (DeleteDataFrameAnalyticsRequest) o; - return Objects.equals(id, other.id) && Objects.equals(force, other.force) && Objects.equals(timeout, other.timeout); - } - - @Override - public int hashCode() { - return Objects.hash(id, force, timeout); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDatafeedRequest.java deleted file mode 100644 index 4ed729e9fd079..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDatafeedRequest.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -/** - * Request to delete a Machine Learning Datafeed via its ID - */ -public class DeleteDatafeedRequest implements Validatable { - - private String datafeedId; - private Boolean force; - - public DeleteDatafeedRequest(String datafeedId) { - this.datafeedId = Objects.requireNonNull(datafeedId, "[datafeed_id] must not be null"); - } - - public String getDatafeedId() { - return datafeedId; - } - - public Boolean getForce() { - return force; - } - - /** - * Used to forcefully delete a started datafeed. - * This method is quicker than stopping and deleting the datafeed. - * - * @param force When {@code true} forcefully delete a started datafeed. Defaults to {@code false} - */ - public void setForce(Boolean force) { - this.force = force; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedId, force); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || obj.getClass() != getClass()) { - return false; - } - - DeleteDatafeedRequest other = (DeleteDatafeedRequest) obj; - return Objects.equals(datafeedId, other.datafeedId) && Objects.equals(force, other.force); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataRequest.java deleted file mode 100644 index 66edbcfe8e961..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataRequest.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to delete expired model snapshots and forecasts - */ -public class DeleteExpiredDataRequest implements Validatable, ToXContentObject { - - static final String REQUESTS_PER_SECOND = "requests_per_second"; - static final String TIMEOUT = "timeout"; - static final String JOB_ID = "job_id"; - - private final String jobId; - private final Float requestsPerSecond; - private final TimeValue timeout; - - /** - * Create a new request to delete expired data - */ - public DeleteExpiredDataRequest() { - this(null, null, null); - } - - public DeleteExpiredDataRequest(String jobId, Float requestsPerSecond, TimeValue timeout) { - this.jobId = jobId; - this.requestsPerSecond = requestsPerSecond; - this.timeout = timeout; - } - - /** - * The requests allowed per second in the underlying Delete by Query requests executed. - * - * `-1.0f` indicates that the standard nightly cleanup behavior should be ran. - * Throttling scales according to the number of data nodes. - * `null` is default and means no throttling will occur. - */ - public Float getRequestsPerSecond() { - return requestsPerSecond; - } - - /** - * Indicates how long the deletion request will run until it timesout. - * - * Default value is 8 hours. - */ - public TimeValue getTimeout() { - return timeout; - } - - /** - * The optional job id - * - * The default is `null` meaning all jobs. - * @return The job id or null - */ - public String getJobId() { - return jobId; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - DeleteExpiredDataRequest that = (DeleteExpiredDataRequest) o; - return Objects.equals(requestsPerSecond, that.requestsPerSecond) - && Objects.equals(timeout, that.timeout) - && Objects.equals(jobId, that.jobId); - } - - @Override - public int hashCode() { - return Objects.hash(requestsPerSecond, timeout, jobId); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (jobId != null) { - builder.field(JOB_ID, jobId); - } - if (requestsPerSecond != null) { - builder.field(REQUESTS_PER_SECOND, requestsPerSecond); - } - if (timeout != null) { - builder.field(TIMEOUT, timeout.getStringRep()); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataResponse.java deleted file mode 100644 index 18cd260698198..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataResponse.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * A response acknowledging the deletion of expired data - */ -public class DeleteExpiredDataResponse implements ToXContentObject { - - private static final ParseField DELETED = new ParseField("deleted"); - - public DeleteExpiredDataResponse(boolean deleted) { - this.deleted = deleted; - } - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "delete_expired_data_response", - true, - a -> new DeleteExpiredDataResponse((Boolean) a[0]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), DELETED); - } - - public static DeleteExpiredDataResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final Boolean deleted; - - public Boolean getDeleted() { - return deleted; - } - - @Override - public int hashCode() { - return Objects.hash(deleted); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (deleted != null) { - builder.field(DELETED.getPreferredName(), deleted); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - DeleteExpiredDataResponse response = (DeleteExpiredDataResponse) obj; - return Objects.equals(deleted, response.deleted); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteFilterRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteFilterRequest.java deleted file mode 100644 index a98ad85c775e0..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteFilterRequest.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -/** - * A request to delete a machine learning filter - */ -public class DeleteFilterRequest implements Validatable { - - private final String filterId; - - public DeleteFilterRequest(String filterId) { - this.filterId = Objects.requireNonNull(filterId, "[filter_id] is required"); - } - - public String getId() { - return filterId; - } - - @Override - public int hashCode() { - return Objects.hash(filterId); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - final DeleteFilterRequest other = (DeleteFilterRequest) obj; - - return Objects.equals(filterId, other.filterId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteForecastRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteForecastRequest.java deleted file mode 100644 index 11a49bf3aa270..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteForecastRequest.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * POJO for a delete forecast request - */ -public class DeleteForecastRequest implements Validatable, ToXContentObject { - - public static final ParseField FORECAST_ID = new ParseField("forecast_id"); - public static final ParseField ALLOW_NO_FORECASTS = new ParseField("allow_no_forecasts"); - public static final ParseField TIMEOUT = new ParseField("timeout"); - public static final String ALL = "_all"; - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "delete_forecast_request", - (a) -> new DeleteForecastRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareStringOrNull((c, p) -> c.setForecastIds(Strings.commaDelimitedListToStringArray(p)), FORECAST_ID); - PARSER.declareBoolean(DeleteForecastRequest::setAllowNoForecasts, ALLOW_NO_FORECASTS); - PARSER.declareString(DeleteForecastRequest::timeout, TIMEOUT); - } - - /** - * Create a new {@link DeleteForecastRequest} that explicitly deletes all forecasts - * - * @param jobId the jobId of the Job whose forecasts to delete - */ - public static DeleteForecastRequest deleteAllForecasts(String jobId) { - DeleteForecastRequest request = new DeleteForecastRequest(jobId); - request.setForecastIds(ALL); - return request; - } - - private final String jobId; - private List forecastIds = new ArrayList<>(); - private Boolean allowNoForecasts; - private TimeValue timeout; - - /** - * Create a new DeleteForecastRequest for the given Job ID - * - * @param jobId the jobId of the Job whose forecast(s) to delete - */ - public DeleteForecastRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId, Job.ID.getPreferredName()); - } - - public String getJobId() { - return jobId; - } - - public List getForecastIds() { - return forecastIds; - } - - /** - * The forecast IDs to delete. Can be also be {@link DeleteForecastRequest#ALL} to explicitly delete ALL forecasts - * - * @param forecastIds forecast IDs to delete - */ - public void setForecastIds(String... forecastIds) { - setForecastIds(Arrays.asList(forecastIds)); - } - - void setForecastIds(List forecastIds) { - if (forecastIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("forecastIds must not contain null values"); - } - this.forecastIds = new ArrayList<>(forecastIds); - } - - public Boolean getAllowNoForecasts() { - return allowNoForecasts; - } - - /** - * Sets the value of "allow_no_forecasts". - * - * @param allowNoForecasts when {@code true} no error is thrown when {@link DeleteForecastRequest#ALL} does not find any forecasts - */ - public void setAllowNoForecasts(boolean allowNoForecasts) { - this.allowNoForecasts = allowNoForecasts; - } - - /** - * Allows to set the timeout - * @param timeout timeout as a string (e.g. 1s) - */ - public void timeout(String timeout) { - this.timeout = TimeValue.parseTimeValue(timeout, this.timeout, getClass().getSimpleName() + ".timeout"); - } - - /** - * Allows to set the timeout - * @param timeout timeout as a {@link TimeValue} - */ - public void timeout(TimeValue timeout) { - this.timeout = timeout; - } - - public TimeValue timeout() { - return timeout; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DeleteForecastRequest that = (DeleteForecastRequest) other; - return Objects.equals(jobId, that.jobId) - && Objects.equals(forecastIds, that.forecastIds) - && Objects.equals(allowNoForecasts, that.allowNoForecasts) - && Objects.equals(timeout, that.timeout); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, forecastIds, allowNoForecasts, timeout); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (forecastIds != null) { - builder.field(FORECAST_ID.getPreferredName(), Strings.collectionToCommaDelimitedString(forecastIds)); - } - if (allowNoForecasts != null) { - builder.field(ALLOW_NO_FORECASTS.getPreferredName(), allowNoForecasts); - } - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java deleted file mode 100644 index dc4c0cd4d6c86..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -/** - * Request to delete a Machine Learning Job via its ID - */ -public class DeleteJobRequest implements Validatable { - - private String jobId; - private Boolean force; - private Boolean waitForCompletion; - - public DeleteJobRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null"); - } - - public String getJobId() { - return jobId; - } - - /** - * The jobId which to delete - * @param jobId unique jobId to delete, must not be null - */ - public void setJobId(String jobId) { - this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null"); - } - - public Boolean getForce() { - return force; - } - - /** - * Used to forcefully delete an opened job. - * This method is quicker than closing and deleting the job. - * - * @param force When {@code true} forcefully delete an opened job. Defaults to {@code false} - */ - public void setForce(Boolean force) { - this.force = force; - } - - public Boolean getWaitForCompletion() { - return waitForCompletion; - } - - /** - * Set whether this request should wait until the operation has completed before returning - * @param waitForCompletion When {@code true} the call will wait for the job deletion to complete. - * Otherwise, the deletion will be executed asynchronously and the response - * will contain the task id. - */ - public void setWaitForCompletion(Boolean waitForCompletion) { - this.waitForCompletion = waitForCompletion; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, force); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || obj.getClass() != getClass()) { - return false; - } - - DeleteJobRequest other = (DeleteJobRequest) obj; - return Objects.equals(jobId, other.jobId) && Objects.equals(force, other.force); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java deleted file mode 100644 index ad843da43a357..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response object that contains the acknowledgement or the task id - * depending on whether the delete job action was requested to wait for completion. - */ -public class DeleteJobResponse implements ToXContentObject { - - private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged"); - private static final ParseField TASK = new ParseField("task"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "delete_job_response", - true, - a -> new DeleteJobResponse((Boolean) a[0], (TaskId) a[1]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), ACKNOWLEDGED); - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), TaskId.parser(), TASK, ObjectParser.ValueType.STRING); - } - - public static DeleteJobResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final Boolean acknowledged; - private final TaskId task; - - DeleteJobResponse(@Nullable Boolean acknowledged, @Nullable TaskId task) { - assert acknowledged != null || task != null; - this.acknowledged = acknowledged; - this.task = task; - } - - /** - * Get the action acknowledgement - * @return {@code null} when the request had {@link DeleteJobRequest#getWaitForCompletion()} set to {@code false} or - * otherwise a {@code boolean} that indicates whether the job was deleted successfully. - */ - public Boolean getAcknowledged() { - return acknowledged; - } - - /** - * Get the task id - * @return {@code null} when the request had {@link DeleteJobRequest#getWaitForCompletion()} set to {@code true} or - * otherwise the id of the job deletion task. - */ - public TaskId getTask() { - return task; - } - - @Override - public int hashCode() { - return Objects.hash(acknowledged, task); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DeleteJobResponse that = (DeleteJobResponse) other; - return Objects.equals(acknowledged, that.acknowledged) && Objects.equals(task, that.task); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (acknowledged != null) { - builder.field(ACKNOWLEDGED.getPreferredName(), acknowledged); - } - if (task != null) { - builder.field(TASK.getPreferredName(), task.toString()); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteModelSnapshotRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteModelSnapshotRequest.java deleted file mode 100644 index 75dda5d47eade..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteModelSnapshotRequest.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.process.ModelSnapshot; - -import java.util.Objects; - -/** - * Request to delete a Machine Learning Model Snapshot Job via its Job and Snapshot IDs - */ -public class DeleteModelSnapshotRequest implements Validatable { - - private final String jobId; - private final String snapshotId; - - public DeleteModelSnapshotRequest(String jobId, String snapshotId) { - this.jobId = Objects.requireNonNull(jobId, "[" + Job.ID + "] must not be null"); - this.snapshotId = Objects.requireNonNull(snapshotId, "[" + ModelSnapshot.SNAPSHOT_ID + "] must not be null"); - } - - public String getJobId() { - return jobId; - } - - public String getSnapshotId() { - return snapshotId; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, snapshotId); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || obj.getClass() != getClass()) { - return false; - } - - DeleteModelSnapshotRequest other = (DeleteModelSnapshotRequest) obj; - return Objects.equals(jobId, other.jobId) && Objects.equals(snapshotId, other.snapshotId); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelAliasRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelAliasRequest.java deleted file mode 100644 index aa91a01a0d775..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelAliasRequest.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -public class DeleteTrainedModelAliasRequest implements Validatable { - - private final String modelAlias; - private final String modelId; - - public DeleteTrainedModelAliasRequest(String modelAlias, String modelId) { - this.modelAlias = Objects.requireNonNull(modelAlias); - this.modelId = Objects.requireNonNull(modelId); - } - - public String getModelAlias() { - return modelAlias; - } - - public String getModelId() { - return modelId; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - DeleteTrainedModelAliasRequest request = (DeleteTrainedModelAliasRequest) o; - return Objects.equals(modelAlias, request.modelAlias) && Objects.equals(modelId, request.modelId); - } - - @Override - public int hashCode() { - return Objects.hash(modelAlias, modelId); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelRequest.java deleted file mode 100644 index a964c8f0ddd63..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelRequest.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; - -import java.util.Objects; -import java.util.Optional; - -/** - * Request to delete a data frame analytics config - */ -public class DeleteTrainedModelRequest implements Validatable { - - private final String id; - - public DeleteTrainedModelRequest(String id) { - this.id = id; - } - - public String getId() { - return id; - } - - @Override - public Optional validate() { - if (id == null) { - return Optional.of(ValidationException.withError("trained model id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - DeleteTrainedModelRequest other = (DeleteTrainedModelRequest) o; - return Objects.equals(id, other.id); - } - - @Override - public int hashCode() { - return Objects.hash(id); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryRequest.java deleted file mode 100644 index 2a2a43fb2441e..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryRequest.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.ml.job.config.AnalysisConfig; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; - -/** - * Request to estimate the model memory an analysis config is likely to need given supplied field cardinalities. - */ -public class EstimateModelMemoryRequest implements Validatable, ToXContentObject { - - public static final String ANALYSIS_CONFIG = "analysis_config"; - public static final String OVERALL_CARDINALITY = "overall_cardinality"; - public static final String MAX_BUCKET_CARDINALITY = "max_bucket_cardinality"; - - private final AnalysisConfig analysisConfig; - private Map overallCardinality = Collections.emptyMap(); - private Map maxBucketCardinality = Collections.emptyMap(); - - @Override - public Optional validate() { - return Optional.empty(); - } - - public EstimateModelMemoryRequest(AnalysisConfig analysisConfig) { - this.analysisConfig = Objects.requireNonNull(analysisConfig); - } - - public AnalysisConfig getAnalysisConfig() { - return analysisConfig; - } - - public Map getOverallCardinality() { - return overallCardinality; - } - - public void setOverallCardinality(Map overallCardinality) { - this.overallCardinality = Collections.unmodifiableMap(overallCardinality); - } - - public Map getMaxBucketCardinality() { - return maxBucketCardinality; - } - - public void setMaxBucketCardinality(Map maxBucketCardinality) { - this.maxBucketCardinality = Collections.unmodifiableMap(maxBucketCardinality); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ANALYSIS_CONFIG, analysisConfig); - if (overallCardinality.isEmpty() == false) { - builder.field(OVERALL_CARDINALITY, overallCardinality); - } - if (maxBucketCardinality.isEmpty() == false) { - builder.field(MAX_BUCKET_CARDINALITY, maxBucketCardinality); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(analysisConfig, overallCardinality, maxBucketCardinality); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - EstimateModelMemoryRequest that = (EstimateModelMemoryRequest) other; - return Objects.equals(analysisConfig, that.analysisConfig) - && Objects.equals(overallCardinality, that.overallCardinality) - && Objects.equals(maxBucketCardinality, that.maxBucketCardinality); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryResponse.java deleted file mode 100644 index 806a76fe7eb63..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryResponse.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class EstimateModelMemoryResponse { - - public static final ParseField MODEL_MEMORY_ESTIMATE = new ParseField("model_memory_estimate"); - - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "estimate_model_memory", - true, - args -> new EstimateModelMemoryResponse((String) args[0]) - ); - - static { - PARSER.declareString(constructorArg(), MODEL_MEMORY_ESTIMATE); - } - - public static EstimateModelMemoryResponse fromXContent(final XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final ByteSizeValue modelMemoryEstimate; - - public EstimateModelMemoryResponse(String modelMemoryEstimate) { - this.modelMemoryEstimate = ByteSizeValue.parseBytesSizeValue(modelMemoryEstimate, MODEL_MEMORY_ESTIMATE.getPreferredName()); - } - - /** - * @return An estimate of the model memory the supplied analysis config is likely to need given the supplied field cardinalities. - */ - public ByteSizeValue getModelMemoryEstimate() { - return modelMemoryEstimate; - } - - @Override - public boolean equals(Object o) { - - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - EstimateModelMemoryResponse other = (EstimateModelMemoryResponse) o; - return Objects.equals(this.modelMemoryEstimate, other.modelMemoryEstimate); - } - - @Override - public int hashCode() { - return Objects.hash(modelMemoryEstimate); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameRequest.java deleted file mode 100644 index 5bad5d73a8892..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameRequest.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.ml.dataframe.QueryConfig; -import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.Optional; - -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class EvaluateDataFrameRequest implements ToXContentObject, Validatable { - - private static final ParseField INDEX = new ParseField("index"); - private static final ParseField QUERY = new ParseField("query"); - private static final ParseField EVALUATION = new ParseField("evaluation"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "evaluate_data_frame_request", - true, - args -> new EvaluateDataFrameRequest((List) args[0], (QueryConfig) args[1], (Evaluation) args[2]) - ); - - static { - PARSER.declareStringArray(constructorArg(), INDEX); - PARSER.declareObject(optionalConstructorArg(), (p, c) -> QueryConfig.fromXContent(p), QUERY); - PARSER.declareObject(constructorArg(), (p, c) -> parseEvaluation(p), EVALUATION); - } - - private static Evaluation parseEvaluation(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); - Evaluation evaluation = parser.namedObject(Evaluation.class, parser.currentName(), null); - ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); - return evaluation; - } - - public static EvaluateDataFrameRequest fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private List indices; - private QueryConfig queryConfig; - private Evaluation evaluation; - - public EvaluateDataFrameRequest(String index, @Nullable QueryConfig queryConfig, Evaluation evaluation) { - this(Arrays.asList(index), queryConfig, evaluation); - } - - public EvaluateDataFrameRequest(List indices, @Nullable QueryConfig queryConfig, Evaluation evaluation) { - setIndices(indices); - setQueryConfig(queryConfig); - setEvaluation(evaluation); - } - - public List getIndices() { - return Collections.unmodifiableList(indices); - } - - public final void setIndices(List indices) { - Objects.requireNonNull(indices); - this.indices = new ArrayList<>(indices); - } - - public QueryConfig getQueryConfig() { - return queryConfig; - } - - public final void setQueryConfig(QueryConfig queryConfig) { - this.queryConfig = queryConfig; - } - - public Evaluation getEvaluation() { - return evaluation; - } - - public final void setEvaluation(Evaluation evaluation) { - this.evaluation = evaluation; - } - - @Override - public Optional validate() { - List errors = new ArrayList<>(); - if (indices.isEmpty()) { - errors.add("At least one index must be specified"); - } - if (evaluation == null) { - errors.add("evaluation must not be null"); - } - return errors.isEmpty() ? Optional.empty() : Optional.of(ValidationException.withErrors(errors)); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.stringListField(INDEX.getPreferredName(), indices); - if (queryConfig != null) { - builder.field(QUERY.getPreferredName(), queryConfig.getQuery()); - } - builder.startObject(EVALUATION.getPreferredName()).field(evaluation.getName(), evaluation).endObject(); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(indices, queryConfig, evaluation); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - EvaluateDataFrameRequest that = (EvaluateDataFrameRequest) o; - return Objects.equals(indices, that.indices) - && Objects.equals(queryConfig, that.queryConfig) - && Objects.equals(evaluation, that.evaluation); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameResponse.java deleted file mode 100644 index 82213974297ab..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameResponse.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.NamedObjectNotFoundException; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; - -import static org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider.registeredMetricName; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - -public class EvaluateDataFrameResponse implements ToXContentObject { - - public static EvaluateDataFrameResponse fromXContent(XContentParser parser) throws IOException { - if (parser.currentToken() == null) { - parser.nextToken(); - } - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); - String evaluationName = parser.currentName(); - parser.nextToken(); - Map metrics = parser.map(LinkedHashMap::new, p -> parseMetric(evaluationName, p)); - List knownMetrics = metrics.values() - .stream() - .filter(Objects::nonNull) // Filter out null values returned by {@link EvaluateDataFrameResponse::parseMetric}. - .collect(Collectors.toList()); - ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); - return new EvaluateDataFrameResponse(evaluationName, knownMetrics); - } - - private static EvaluationMetric.Result parseMetric(String evaluationName, XContentParser parser) throws IOException { - String metricName = parser.currentName(); - try { - return parser.namedObject(EvaluationMetric.Result.class, registeredMetricName(evaluationName, metricName), null); - } catch (NamedObjectNotFoundException e) { - parser.skipChildren(); - // Metric name not recognized. Return {@code null} value here and filter it out later. - return null; - } - } - - private final String evaluationName; - private final Map metrics; - - public EvaluateDataFrameResponse(String evaluationName, List metrics) { - this.evaluationName = Objects.requireNonNull(evaluationName); - this.metrics = Objects.requireNonNull(metrics).stream().collect(Collectors.toUnmodifiableMap(m -> m.getMetricName(), m -> m)); - } - - public String getEvaluationName() { - return evaluationName; - } - - public List getMetrics() { - return metrics.values().stream().collect(Collectors.toList()); - } - - @SuppressWarnings("unchecked") - public T getMetricByName(String metricName) { - Objects.requireNonNull(metricName); - return (T) metrics.get(metricName); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return builder.startObject().field(evaluationName, metrics).endObject(); - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - EvaluateDataFrameResponse that = (EvaluateDataFrameResponse) o; - return Objects.equals(evaluationName, that.evaluationName) && Objects.equals(metrics, that.metrics); - } - - @Override - public int hashCode() { - return Objects.hash(evaluationName, metrics); - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsRequest.java deleted file mode 100644 index b9df8faacdda8..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig; -import org.elasticsearch.core.Nullable; - -import java.util.Objects; - -/** - * Request to explain the following about a data frame analytics job: - *

    - *
  • field selection: which fields are included or are not in the analysis
  • - *
  • memory estimation: how much memory the job is estimated to require
  • - *
- */ -public class ExplainDataFrameAnalyticsRequest implements Validatable { - - private final String id; - private final DataFrameAnalyticsConfig config; - - public ExplainDataFrameAnalyticsRequest(String id) { - this.id = Objects.requireNonNull(id); - this.config = null; - } - - public ExplainDataFrameAnalyticsRequest(DataFrameAnalyticsConfig config) { - this.id = null; - this.config = Objects.requireNonNull(config); - } - - @Nullable - public String getId() { - return id; - } - - @Nullable - public DataFrameAnalyticsConfig getConfig() { - return config; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - ExplainDataFrameAnalyticsRequest other = (ExplainDataFrameAnalyticsRequest) o; - return Objects.equals(id, other.id) && Objects.equals(config, other.config); - } - - @Override - public int hashCode() { - return Objects.hash(id, config); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsResponse.java deleted file mode 100644 index d9e15f8ff7031..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsResponse.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.dataframe.explain.FieldSelection; -import org.elasticsearch.client.ml.dataframe.explain.MemoryEstimation; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class ExplainDataFrameAnalyticsResponse implements ToXContentObject { - - public static final ParseField TYPE = new ParseField("explain_data_frame_analytics_response"); - - public static final ParseField FIELD_SELECTION = new ParseField("field_selection"); - public static final ParseField MEMORY_ESTIMATION = new ParseField("memory_estimation"); - - public static ExplainDataFrameAnalyticsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - TYPE.getPreferredName(), - true, - args -> new ExplainDataFrameAnalyticsResponse((List) args[0], (MemoryEstimation) args[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), FieldSelection.PARSER, FIELD_SELECTION); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), MemoryEstimation.PARSER, MEMORY_ESTIMATION); - } - - private final List fieldSelection; - private final MemoryEstimation memoryEstimation; - - public ExplainDataFrameAnalyticsResponse(List fieldSelection, MemoryEstimation memoryEstimation) { - this.fieldSelection = Objects.requireNonNull(fieldSelection); - this.memoryEstimation = Objects.requireNonNull(memoryEstimation); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(FIELD_SELECTION.getPreferredName(), fieldSelection); - builder.field(MEMORY_ESTIMATION.getPreferredName(), memoryEstimation); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object other) { - if (this == other) return true; - if (other == null || getClass() != other.getClass()) return false; - - ExplainDataFrameAnalyticsResponse that = (ExplainDataFrameAnalyticsResponse) other; - return Objects.equals(fieldSelection, that.fieldSelection) && Objects.equals(memoryEstimation, that.memoryEstimation); - } - - @Override - public int hashCode() { - return Objects.hash(fieldSelection, memoryEstimation); - } - - public MemoryEstimation getMemoryEstimation() { - return memoryEstimation; - } - - public List getFieldSelection() { - return fieldSelection; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java deleted file mode 100644 index a19f787a6458f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request object to flush a given Machine Learning job. - */ -public class FlushJobRequest implements Validatable, ToXContentObject { - - public static final ParseField CALC_INTERIM = new ParseField("calc_interim"); - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField ADVANCE_TIME = new ParseField("advance_time"); - public static final ParseField SKIP_TIME = new ParseField("skip_time"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "flush_job_request", - (a) -> new FlushJobRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareBoolean(FlushJobRequest::setCalcInterim, CALC_INTERIM); - PARSER.declareString(FlushJobRequest::setStart, START); - PARSER.declareString(FlushJobRequest::setEnd, END); - PARSER.declareString(FlushJobRequest::setAdvanceTime, ADVANCE_TIME); - PARSER.declareString(FlushJobRequest::setSkipTime, SKIP_TIME); - } - - private final String jobId; - private Boolean calcInterim; - private String start; - private String end; - private String advanceTime; - private String skipTime; - - /** - * Create new Flush job request - * - * @param jobId The job ID of the job to flush - */ - public FlushJobRequest(String jobId) { - this.jobId = jobId; - } - - public String getJobId() { - return jobId; - } - - public boolean getCalcInterim() { - return calcInterim; - } - - /** - * When {@code true} calculates the interim results for the most recent bucket or all buckets within the latency period. - * - * @param calcInterim defaults to {@code false}. - */ - public void setCalcInterim(boolean calcInterim) { - this.calcInterim = calcInterim; - } - - public String getStart() { - return start; - } - - /** - * When used in conjunction with {@link FlushJobRequest#calcInterim}, - * specifies the start of the range of buckets on which to calculate interim results. - * - * @param start the beginning of the range of buckets; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * When used in conjunction with {@link FlushJobRequest#calcInterim}, specifies the end of the range - * of buckets on which to calculate interim results - * - * @param end the end of the range of buckets; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public String getAdvanceTime() { - return advanceTime; - } - - /** - * Specifies to advance to a particular time value. - * Results are generated and the model is updated for data from the specified time interval. - * - * @param advanceTime String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setAdvanceTime(String advanceTime) { - this.advanceTime = advanceTime; - } - - public String getSkipTime() { - return skipTime; - } - - /** - * Specifies to skip to a particular time value. - * Results are not generated and the model is not updated for data from the specified time interval. - * - * @param skipTime String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setSkipTime(String skipTime) { - this.skipTime = skipTime; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, calcInterim, start, end, advanceTime, skipTime); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - FlushJobRequest other = (FlushJobRequest) obj; - return Objects.equals(jobId, other.jobId) - && calcInterim == other.calcInterim - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(advanceTime, other.advanceTime) - && Objects.equals(skipTime, other.skipTime); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (calcInterim != null) { - builder.field(CALC_INTERIM.getPreferredName(), calcInterim); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (advanceTime != null) { - builder.field(ADVANCE_TIME.getPreferredName(), advanceTime); - } - if (skipTime != null) { - builder.field(SKIP_TIME.getPreferredName(), skipTime); - } - builder.endObject(); - return builder; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java deleted file mode 100644 index d85ec888b61a4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Date; -import java.util.Objects; - -/** - * Response object containing flush acknowledgement and additional data - */ -public class FlushJobResponse implements ToXContentObject { - - public static final ParseField FLUSHED = new ParseField("flushed"); - public static final ParseField LAST_FINALIZED_BUCKET_END = new ParseField("last_finalized_bucket_end"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "flush_job_response", - true, - (a) -> { - boolean flushed = (boolean) a[0]; - Date date = a[1] == null ? null : new Date((long) a[1]); - return new FlushJobResponse(flushed, date); - } - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), FLUSHED); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_FINALIZED_BUCKET_END); - } - - public static FlushJobResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final boolean flushed; - private final Date lastFinalizedBucketEnd; - - public FlushJobResponse(boolean flushed, @Nullable Date lastFinalizedBucketEnd) { - this.flushed = flushed; - this.lastFinalizedBucketEnd = lastFinalizedBucketEnd; - } - - /** - * Was the job successfully flushed or not - */ - public boolean isFlushed() { - return flushed; - } - - /** - * Provides the timestamp (in milliseconds-since-the-epoch) of the end of the last bucket that was processed. - */ - @Nullable - public Date getLastFinalizedBucketEnd() { - return lastFinalizedBucketEnd; - } - - @Override - public int hashCode() { - return Objects.hash(flushed, lastFinalizedBucketEnd); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - FlushJobResponse that = (FlushJobResponse) other; - return that.flushed == flushed && Objects.equals(lastFinalizedBucketEnd, that.lastFinalizedBucketEnd); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(FLUSHED.getPreferredName(), flushed); - if (lastFinalizedBucketEnd != null) { - builder.timeField( - LAST_FINALIZED_BUCKET_END.getPreferredName(), - LAST_FINALIZED_BUCKET_END.getPreferredName() + "_string", - lastFinalizedBucketEnd.getTime() - ); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobRequest.java deleted file mode 100644 index 2a7f09c802dc3..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobRequest.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParseException; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Pojo for forecasting an existing and open Machine Learning Job - */ -public class ForecastJobRequest implements Validatable, ToXContentObject { - - public static final ParseField DURATION = new ParseField("duration"); - public static final ParseField EXPIRES_IN = new ParseField("expires_in"); - public static final ParseField MAX_MODEL_MEMORY = new ParseField("max_model_memory"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "forecast_job_request", - (a) -> new ForecastJobRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString((request, val) -> request.setDuration(TimeValue.parseTimeValue(val, DURATION.getPreferredName())), DURATION); - PARSER.declareString( - (request, val) -> request.setExpiresIn(TimeValue.parseTimeValue(val, EXPIRES_IN.getPreferredName())), - EXPIRES_IN - ); - PARSER.declareField(ForecastJobRequest::setMaxModelMemory, (p, c) -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return ByteSizeValue.parseBytesSizeValue(p.text(), MAX_MODEL_MEMORY.getPreferredName()); - } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return new ByteSizeValue(p.longValue()); - } - throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); - }, MAX_MODEL_MEMORY, ObjectParser.ValueType.VALUE); - } - - private final String jobId; - private TimeValue duration; - private TimeValue expiresIn; - private ByteSizeValue maxModelMemory; - - /** - * A new forecast request - * - * @param jobId the non-null, existing, and opened jobId to forecast - */ - public ForecastJobRequest(String jobId) { - this.jobId = jobId; - } - - public String getJobId() { - return jobId; - } - - public TimeValue getDuration() { - return duration; - } - - /** - * Set the forecast duration - * - * A period of time that indicates how far into the future to forecast. - * The default value is 1 day. The forecast starts at the last record that was processed. - * - * @param duration TimeValue for the duration of the forecast - */ - public void setDuration(TimeValue duration) { - this.duration = duration; - } - - public TimeValue getExpiresIn() { - return expiresIn; - } - - /** - * Set the forecast expiration - * - * The period of time that forecast results are retained. - * After a forecast expires, the results are deleted. The default value is 14 days. - * If set to a value of 0, the forecast is never automatically deleted. - * - * @param expiresIn TimeValue for the forecast expiration - */ - public void setExpiresIn(TimeValue expiresIn) { - this.expiresIn = expiresIn; - } - - public ByteSizeValue getMaxModelMemory() { - return maxModelMemory; - } - - /** - * Set the amount of memory allowed to be used by this forecast. - * - * If the projected forecast memory usage exceeds this amount, the forecast will spool results to disk to keep within the limits. - * @param maxModelMemory A byte sized value less than 500MB and less than 40% of the associated job's configured memory usage. - * Defaults to 20MB. - */ - public ForecastJobRequest setMaxModelMemory(ByteSizeValue maxModelMemory) { - this.maxModelMemory = maxModelMemory; - return this; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, duration, expiresIn, maxModelMemory); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - ForecastJobRequest other = (ForecastJobRequest) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(duration, other.duration) - && Objects.equals(expiresIn, other.expiresIn) - && Objects.equals(maxModelMemory, other.maxModelMemory); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (duration != null) { - builder.field(DURATION.getPreferredName(), duration.getStringRep()); - } - if (expiresIn != null) { - builder.field(EXPIRES_IN.getPreferredName(), expiresIn.getStringRep()); - } - if (maxModelMemory != null) { - builder.field(MAX_MODEL_MEMORY.getPreferredName(), maxModelMemory.getStringRep()); - } - builder.endObject(); - return builder; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobResponse.java deleted file mode 100644 index b1cbd5d863c99..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobResponse.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Forecast response object - */ -public class ForecastJobResponse implements ToXContentObject { - - public static final ParseField ACKNOWLEDGED = new ParseField("acknowledged"); - public static final ParseField FORECAST_ID = new ParseField("forecast_id"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "forecast_job_response", - true, - (a) -> new ForecastJobResponse((Boolean) a[0], (String) a[1]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ACKNOWLEDGED); - PARSER.declareString(ConstructingObjectParser.constructorArg(), FORECAST_ID); - } - - public static ForecastJobResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final boolean acknowledged; - private final String forecastId; - - public ForecastJobResponse(boolean acknowledged, String forecastId) { - this.acknowledged = acknowledged; - this.forecastId = forecastId; - } - - /** - * Forecast creating acknowledgement - * @return {@code true} indicates success, {@code false} otherwise - */ - public boolean isAcknowledged() { - return acknowledged; - } - - /** - * The created forecast ID - */ - public String getForecastId() { - return forecastId; - } - - @Override - public int hashCode() { - return Objects.hash(acknowledged, forecastId); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - ForecastJobResponse other = (ForecastJobResponse) obj; - return Objects.equals(acknowledged, other.acknowledged) && Objects.equals(forecastId, other.forecastId); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ACKNOWLEDGED.getPreferredName(), acknowledged); - builder.field(FORECAST_ID.getPreferredName(), forecastId); - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java deleted file mode 100644 index 052f177bb8743..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java +++ /dev/null @@ -1,250 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.results.Result; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to retrieve buckets of a given job - */ -public class GetBucketsRequest implements Validatable, ToXContentObject { - - public static final ParseField EXPAND = new ParseField("expand"); - public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim"); - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score"); - public static final ParseField SORT = new ParseField("sort"); - public static final ParseField DESCENDING = new ParseField("desc"); - - public static final ObjectParser PARSER = new ObjectParser<>("get_buckets_request", GetBucketsRequest::new); - - static { - PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID); - PARSER.declareString(GetBucketsRequest::setTimestamp, Result.TIMESTAMP); - PARSER.declareBoolean(GetBucketsRequest::setExpand, EXPAND); - PARSER.declareBoolean(GetBucketsRequest::setExcludeInterim, EXCLUDE_INTERIM); - PARSER.declareStringOrNull(GetBucketsRequest::setStart, START); - PARSER.declareStringOrNull(GetBucketsRequest::setEnd, END); - PARSER.declareObject(GetBucketsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - PARSER.declareDouble(GetBucketsRequest::setAnomalyScore, ANOMALY_SCORE); - PARSER.declareString(GetBucketsRequest::setSort, SORT); - PARSER.declareBoolean(GetBucketsRequest::setDescending, DESCENDING); - } - - private String jobId; - private String timestamp; - private Boolean expand; - private Boolean excludeInterim; - private String start; - private String end; - private PageParams pageParams; - private Double anomalyScore; - private String sort; - private Boolean descending; - - private GetBucketsRequest() {} - - /** - * Constructs a request to retrieve buckets of a given job - * @param jobId id of the job to retrieve buckets of - */ - public GetBucketsRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId); - } - - public String getJobId() { - return jobId; - } - - /** - * Sets the timestamp of a specific bucket to be retrieved. - * @param timestamp String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setTimestamp(String timestamp) { - this.timestamp = timestamp; - } - - public String getTimestamp() { - return timestamp; - } - - public boolean isExpand() { - return expand; - } - - /** - * Sets the value of "expand". - * When {@code true}, buckets will be expanded to include their records. - * @param expand value of "expand" to be set - */ - public void setExpand(Boolean expand) { - this.expand = expand; - } - - public Boolean getExcludeInterim() { - return excludeInterim; - } - - /** - * Sets the value of "exclude_interim". - * When {@code true}, interim buckets will be filtered out. - * @param excludeInterim value of "exclude_interim" to be set - */ - public void setExcludeInterim(Boolean excludeInterim) { - this.excludeInterim = excludeInterim; - } - - public String getStart() { - return start; - } - - /** - * Sets the value of "start" which is a timestamp. - * Only buckets whose timestamp is on or after the "start" value will be returned. - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * Sets the value of "end" which is a timestamp. - * Only buckets whose timestamp is before the "end" value will be returned. - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public PageParams getPageParams() { - return pageParams; - } - - /** - * Sets the paging parameters - * @param pageParams the paging parameters - */ - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - public Double getAnomalyScore() { - return anomalyScore; - } - - /** - * Sets the value of "anomaly_score". - * Only buckets with "anomaly_score" equal or greater will be returned. - * @param anomalyScore value of "anomaly_score". - */ - public void setAnomalyScore(Double anomalyScore) { - this.anomalyScore = anomalyScore; - } - - public String getSort() { - return sort; - } - - /** - * Sets the value of "sort". - * Specifies the bucket field to sort on. - * @param sort value of "sort". - */ - public void setSort(String sort) { - this.sort = sort; - } - - public Boolean getDescending() { - return descending; - } - - /** - * Sets the value of "desc". - * Specifies the sorting order. - * @param descending value of "desc" - */ - public void setDescending(boolean descending) { - this.descending = descending; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (timestamp != null) { - builder.field(Result.TIMESTAMP.getPreferredName(), timestamp); - } - if (expand != null) { - builder.field(EXPAND.getPreferredName(), expand); - } - if (excludeInterim != null) { - builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - if (anomalyScore != null) { - builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore); - } - if (sort != null) { - builder.field(SORT.getPreferredName(), sort); - } - if (descending != null) { - builder.field(DESCENDING.getPreferredName(), descending); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, timestamp, expand, excludeInterim, anomalyScore, pageParams, start, end, sort, descending); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetBucketsRequest other = (GetBucketsRequest) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(timestamp, other.timestamp) - && Objects.equals(expand, other.expand) - && Objects.equals(excludeInterim, other.excludeInterim) - && Objects.equals(anomalyScore, other.anomalyScore) - && Objects.equals(pageParams, other.pageParams) - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(sort, other.sort) - && Objects.equals(descending, other.descending); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsResponse.java deleted file mode 100644 index ced8ad201adbc..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsResponse.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.results.Bucket; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * A response containing the requested buckets - */ -public class GetBucketsResponse extends AbstractResultResponse { - - public static final ParseField BUCKETS = new ParseField("buckets"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_buckets_response", - true, - a -> new GetBucketsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), Bucket.PARSER, BUCKETS); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); - } - - public static GetBucketsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetBucketsResponse(List buckets, long count) { - super(BUCKETS, buckets, count); - } - - /** - * The retrieved buckets - * @return the retrieved buckets - */ - public List buckets() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(count, results); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetBucketsResponse other = (GetBucketsResponse) obj; - return count == other.count && Objects.equals(results, other.results); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsRequest.java deleted file mode 100644 index 91bc1f0b537ba..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsRequest.java +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.calendars.Calendar; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Get the Scheduled Events for a Calendar - */ -public class GetCalendarEventsRequest implements Validatable, ToXContentObject { - - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_calendar_events_request", - a -> new GetCalendarEventsRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Calendar.ID); - PARSER.declareString(GetCalendarEventsRequest::setStart, START); - PARSER.declareString(GetCalendarEventsRequest::setEnd, END); - PARSER.declareString(GetCalendarEventsRequest::setJobId, Job.ID); - PARSER.declareObject(GetCalendarEventsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - } - - private final String calendarId; - private String start; - private String end; - private String jobId; - private PageParams pageParams; - - /** - * Create a new request to get the ScheduledEvents for the given calendarId. - * - * @param calendarId The ID of the calendar. - * Can be `_all` to get ALL ScheduledEvents for all calendars. - */ - public GetCalendarEventsRequest(String calendarId) { - this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null."); - } - - public String getCalendarId() { - return calendarId; - } - - public PageParams getPageParams() { - return pageParams; - } - - /** - * The paging parameters for the gathered ScheduledEvents - * @param pageParams The desired paging params - */ - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - public String getStart() { - return start; - } - - /** - * Specifies to get events with timestamps after this time. - * - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * Specifies to get events with timestamps earlier than this time. - * - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public String getJobId() { - return jobId; - } - - /** - * The jobId for which to get the ScheduledEvents. When this option is used calendarId must be `_all` - * @param jobId The job for which to get the events. - */ - public void setJobId(String jobId) { - this.jobId = jobId; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Calendar.ID.getPreferredName(), calendarId); - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (jobId != null) { - builder.field(Job.ID.getPreferredName(), jobId); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(calendarId, start, end, jobId, pageParams); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - GetCalendarEventsRequest other = (GetCalendarEventsRequest) obj; - return Objects.equals(calendarId, other.calendarId) - && Objects.equals(pageParams, other.pageParams) - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(jobId, other.jobId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsResponse.java deleted file mode 100644 index 2c36c1c329e84..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsResponse.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.calendars.ScheduledEvent; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Contains a {@link List} of the found {@link ScheduledEvent} objects and the total count found - */ -public class GetCalendarEventsResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("events"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "calendar_events_response", - true, - a -> new GetCalendarEventsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), ScheduledEvent.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), COUNT); - } - - GetCalendarEventsResponse(List events, long count) { - super(RESULTS_FIELD, events, count); - } - - /** - * The collection of {@link ScheduledEvent} objects found in the query - */ - public List events() { - return results; - } - - public static GetCalendarEventsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetCalendarEventsResponse other = (GetCalendarEventsResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java deleted file mode 100644 index 55bcbd88964be..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.calendars.Calendar; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class GetCalendarsRequest implements Validatable, ToXContentObject { - - public static final ObjectParser PARSER = new ObjectParser<>( - "get_calendars_request", - GetCalendarsRequest::new - ); - - static { - PARSER.declareString(GetCalendarsRequest::setCalendarId, Calendar.ID); - PARSER.declareObject(GetCalendarsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - } - - private String calendarId; - private PageParams pageParams; - - public GetCalendarsRequest() {} - - public GetCalendarsRequest(String calendarId) { - this.calendarId = calendarId; - } - - public String getCalendarId() { - return calendarId; - } - - public void setCalendarId(String calendarId) { - this.calendarId = calendarId; - } - - public PageParams getPageParams() { - return pageParams; - } - - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (calendarId != null) { - builder.field(Calendar.ID.getPreferredName(), calendarId); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(calendarId, pageParams); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetCalendarsRequest other = (GetCalendarsRequest) obj; - return Objects.equals(calendarId, other.calendarId) && Objects.equals(pageParams, other.pageParams); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsResponse.java deleted file mode 100644 index bf2119692b485..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsResponse.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.calendars.Calendar; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class GetCalendarsResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("calendars"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "calendars_response", - true, - a -> new GetCalendarsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), Calendar.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), AbstractResultResponse.COUNT); - } - - public static GetCalendarsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetCalendarsResponse(List calendars, long count) { - super(RESULTS_FIELD, calendars, count); - } - - /** - * The collection of {@link Calendar} objects found in the query - */ - public List calendars() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetCalendarsResponse other = (GetCalendarsResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java deleted file mode 100644 index 9a00c09ffd847..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.results.CategoryDefinition; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to retrieve categories of a given job - */ -public class GetCategoriesRequest implements Validatable, ToXContentObject { - - public static final ParseField CATEGORY_ID = CategoryDefinition.CATEGORY_ID; - public static final ParseField PARTITION_FIELD_VALUE = CategoryDefinition.PARTITION_FIELD_VALUE; - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_categories_request", - a -> new GetCategoriesRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareLong(GetCategoriesRequest::setCategoryId, CATEGORY_ID); - PARSER.declareObject(GetCategoriesRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - PARSER.declareString(GetCategoriesRequest::setPartitionFieldValue, PARTITION_FIELD_VALUE); - } - - private final String jobId; - private Long categoryId; - private PageParams pageParams; - private String partitionFieldValue; - - /** - * Constructs a request to retrieve category information from a given job - * @param jobId id of the job from which to retrieve results - */ - public GetCategoriesRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId); - } - - public String getJobId() { - return jobId; - } - - public PageParams getPageParams() { - return pageParams; - } - - public Long getCategoryId() { - return categoryId; - } - - /** - * Sets the category id - * @param categoryId the category id - */ - public void setCategoryId(Long categoryId) { - this.categoryId = categoryId; - } - - /** - * Sets the paging parameters - * @param pageParams the paging parameters - */ - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - public String getPartitionFieldValue() { - return partitionFieldValue; - } - - /** - * Sets the partition field value - * @param partitionFieldValue the partition field value - */ - public void setPartitionFieldValue(String partitionFieldValue) { - this.partitionFieldValue = partitionFieldValue; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (categoryId != null) { - builder.field(CATEGORY_ID.getPreferredName(), categoryId); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - if (partitionFieldValue != null) { - builder.field(PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetCategoriesRequest request = (GetCategoriesRequest) obj; - return Objects.equals(jobId, request.jobId) - && Objects.equals(categoryId, request.categoryId) - && Objects.equals(pageParams, request.pageParams) - && Objects.equals(partitionFieldValue, request.partitionFieldValue); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, categoryId, pageParams, partitionFieldValue); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesResponse.java deleted file mode 100644 index d1e542d18e43d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesResponse.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.results.CategoryDefinition; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * A response containing the requested categories - */ -public class GetCategoriesResponse extends AbstractResultResponse { - - public static final ParseField CATEGORIES = new ParseField("categories"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_categories_response", - true, - a -> new GetCategoriesResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), CategoryDefinition.PARSER, CATEGORIES); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); - } - - public static GetCategoriesResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetCategoriesResponse(List categories, long count) { - super(CATEGORIES, categories, count); - } - - /** - * The retrieved categories - * @return the retrieved categories - */ - public List categories() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(count, results); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetCategoriesResponse other = (GetCategoriesResponse) obj; - return count == other.count && Objects.equals(results, other.results); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsRequest.java deleted file mode 100644 index 9b19280f9729f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.core.Nullable; - -import java.util.Arrays; -import java.util.List; -import java.util.Objects; -import java.util.Optional; - -public class GetDataFrameAnalyticsRequest implements Validatable { - - public static final String ALLOW_NO_MATCH = "allow_no_match"; - public static final String EXCLUDE_GENERATED = "exclude_generated"; - - private final List ids; - private Boolean allowNoMatch; - private PageParams pageParams; - private Boolean excludeGenerated; - - /** - * Helper method to create a request that will get ALL Data Frame Analytics - * @return new {@link GetDataFrameAnalyticsRequest} object for the id "_all" - */ - public static GetDataFrameAnalyticsRequest getAllDataFrameAnalyticsRequest() { - return new GetDataFrameAnalyticsRequest("_all"); - } - - public GetDataFrameAnalyticsRequest(String... ids) { - this.ids = Arrays.asList(ids); - } - - public List getIds() { - return ids; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - /** - * Setting this flag to `true` removes certain fields from the configuration on retrieval. - * - * This is useful when getting the configuration and wanting to put it in another cluster. - * - * Default value is false. - * @param excludeGenerated Boolean value indicating if certain fields should be removed - */ - public void setExcludeGenerated(boolean excludeGenerated) { - this.excludeGenerated = excludeGenerated; - } - - public Boolean getExcludeGenerated() { - return excludeGenerated; - } - - /** - * Whether to ignore if a wildcard expression matches no data frame analytics. - * - * @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) - * does not match any data frame analytics - */ - public GetDataFrameAnalyticsRequest setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - return this; - } - - public PageParams getPageParams() { - return pageParams; - } - - public GetDataFrameAnalyticsRequest setPageParams(@Nullable PageParams pageParams) { - this.pageParams = pageParams; - return this; - } - - @Override - public Optional validate() { - if (ids == null || ids.isEmpty()) { - return Optional.of(ValidationException.withError("data frame analytics id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetDataFrameAnalyticsRequest other = (GetDataFrameAnalyticsRequest) o; - return Objects.equals(ids, other.ids) - && Objects.equals(allowNoMatch, other.allowNoMatch) - && Objects.equals(excludeGenerated, other.excludeGenerated) - && Objects.equals(pageParams, other.pageParams); - } - - @Override - public int hashCode() { - return Objects.hash(ids, allowNoMatch, excludeGenerated, pageParams); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsResponse.java deleted file mode 100644 index 8e991efa10d44..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsResponse.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class GetDataFrameAnalyticsResponse { - - public static final ParseField DATA_FRAME_ANALYTICS = new ParseField("data_frame_analytics"); - - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_data_frame_analytics", - true, - args -> new GetDataFrameAnalyticsResponse((List) args[0]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), (p, c) -> DataFrameAnalyticsConfig.fromXContent(p), DATA_FRAME_ANALYTICS); - } - - public static GetDataFrameAnalyticsResponse fromXContent(final XContentParser parser) { - return PARSER.apply(parser, null); - } - - private List analytics; - - public GetDataFrameAnalyticsResponse(List analytics) { - this.analytics = analytics; - } - - public List getAnalytics() { - return analytics; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetDataFrameAnalyticsResponse other = (GetDataFrameAnalyticsResponse) o; - return Objects.equals(this.analytics, other.analytics); - } - - @Override - public int hashCode() { - return Objects.hash(analytics); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsRequest.java deleted file mode 100644 index 50a71f5a4dc33..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsRequest.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ParseField; - -import java.util.Arrays; -import java.util.List; -import java.util.Objects; -import java.util.Optional; - -/** - * Request to get data frame analytics stats - */ -public class GetDataFrameAnalyticsStatsRequest implements Validatable { - - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - - private final List ids; - private Boolean allowNoMatch; - private PageParams pageParams; - - public GetDataFrameAnalyticsStatsRequest(String... ids) { - this.ids = Arrays.asList(ids); - } - - public List getIds() { - return ids; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no data frame analytics. - * - * @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) - * does not match any data frame analytics - */ - public GetDataFrameAnalyticsStatsRequest setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - return this; - } - - public PageParams getPageParams() { - return pageParams; - } - - public GetDataFrameAnalyticsStatsRequest setPageParams(@Nullable PageParams pageParams) { - this.pageParams = pageParams; - return this; - } - - @Override - public Optional validate() { - if (ids == null || ids.isEmpty()) { - return Optional.of(ValidationException.withError("data frame analytics id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetDataFrameAnalyticsStatsRequest other = (GetDataFrameAnalyticsStatsRequest) o; - return Objects.equals(ids, other.ids) - && Objects.equals(allowNoMatch, other.allowNoMatch) - && Objects.equals(pageParams, other.pageParams); - } - - @Override - public int hashCode() { - return Objects.hash(ids, allowNoMatch, pageParams); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsResponse.java deleted file mode 100644 index 00284b0802a16..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsResponse.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.TaskOperationFailure; -import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsStats; -import org.elasticsearch.client.transform.AcknowledgedTasksResponse; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class GetDataFrameAnalyticsStatsResponse { - - public static GetDataFrameAnalyticsStatsResponse fromXContent(XContentParser parser) { - return GetDataFrameAnalyticsStatsResponse.PARSER.apply(parser, null); - } - - private static final ParseField DATA_FRAME_ANALYTICS = new ParseField("data_frame_analytics"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_data_frame_analytics_stats_response", - true, - args -> new GetDataFrameAnalyticsStatsResponse( - (List) args[0], - (List) args[1], - (List) args[2] - ) - ); - - static { - PARSER.declareObjectArray(constructorArg(), (p, c) -> DataFrameAnalyticsStats.fromXContent(p), DATA_FRAME_ANALYTICS); - PARSER.declareObjectArray( - optionalConstructorArg(), - (p, c) -> TaskOperationFailure.fromXContent(p), - AcknowledgedTasksResponse.TASK_FAILURES - ); - PARSER.declareObjectArray( - optionalConstructorArg(), - (p, c) -> ElasticsearchException.fromXContent(p), - AcknowledgedTasksResponse.NODE_FAILURES - ); - } - - private final List analyticsStats; - private final List taskFailures; - private final List nodeFailures; - - public GetDataFrameAnalyticsStatsResponse( - List analyticsStats, - @Nullable List taskFailures, - @Nullable List nodeFailures - ) { - this.analyticsStats = analyticsStats; - this.taskFailures = taskFailures == null ? Collections.emptyList() : Collections.unmodifiableList(taskFailures); - this.nodeFailures = nodeFailures == null ? Collections.emptyList() : Collections.unmodifiableList(nodeFailures); - } - - public List getAnalyticsStats() { - return analyticsStats; - } - - public List getNodeFailures() { - return nodeFailures; - } - - public List getTaskFailures() { - return taskFailures; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetDataFrameAnalyticsStatsResponse other = (GetDataFrameAnalyticsStatsResponse) o; - return Objects.equals(analyticsStats, other.analyticsStats) - && Objects.equals(nodeFailures, other.nodeFailures) - && Objects.equals(taskFailures, other.taskFailures); - } - - @Override - public int hashCode() { - return Objects.hash(analyticsStats, nodeFailures, taskFailures); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedRequest.java deleted file mode 100644 index 67fa0503a9b77..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedRequest.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request object to get {@link DatafeedConfig} objects with the matching {@code datafeedId}s. - * - * {@code _all} explicitly gets all the datafeeds in the cluster - * An empty request (no {@code datafeedId}s) implicitly gets all the datafeeds in the cluster - */ -public class GetDatafeedRequest implements Validatable, ToXContentObject { - - public static final ParseField DATAFEED_IDS = new ParseField("datafeed_ids"); - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - public static final String EXCLUDE_GENERATED = "exclude_generated"; - - private static final String ALL_DATAFEEDS = "_all"; - private final List datafeedIds; - private Boolean allowNoMatch; - private Boolean excludeGenerated; - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_datafeed_request", - true, - a -> new GetDatafeedRequest(a[0] == null ? new ArrayList<>() : (List) a[0]) - ); - - static { - PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), DATAFEED_IDS); - PARSER.declareBoolean(GetDatafeedRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - /** - * Helper method to create a query that will get ALL datafeeds - * @return new {@link GetDatafeedRequest} object searching for the datafeedId "_all" - */ - public static GetDatafeedRequest getAllDatafeedsRequest() { - return new GetDatafeedRequest(ALL_DATAFEEDS); - } - - /** - * Get the specified {@link DatafeedConfig} configurations via their unique datafeedIds - * @param datafeedIds must not contain any null values - */ - public GetDatafeedRequest(String... datafeedIds) { - this(Arrays.asList(datafeedIds)); - } - - GetDatafeedRequest(List datafeedIds) { - if (datafeedIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("datafeedIds must not contain null values"); - } - this.datafeedIds = new ArrayList<>(datafeedIds); - } - - /** - * All the datafeedIds for which to get configuration information - */ - public List getDatafeedIds() { - return datafeedIds; - } - - /** - * Whether to ignore if a wildcard expression matches no datafeeds. - * - * @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) - * does not match any datafeeds - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - /** - * Setting this flag to `true` removes certain fields from the configuration on retrieval. - * - * This is useful when getting the configuration and wanting to put it in another cluster. - * - * Default value is false. - * @param excludeGenerated Boolean value indicating if certain fields should be removed - */ - public void setExcludeGenerated(boolean excludeGenerated) { - this.excludeGenerated = excludeGenerated; - } - - public Boolean getExcludeGenerated() { - return excludeGenerated; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedIds, excludeGenerated, allowNoMatch); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || other.getClass() != getClass()) { - return false; - } - - GetDatafeedRequest that = (GetDatafeedRequest) other; - return Objects.equals(datafeedIds, that.datafeedIds) - && Objects.equals(allowNoMatch, that.allowNoMatch) - && Objects.equals(excludeGenerated, that.excludeGenerated); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - if (datafeedIds.isEmpty() == false) { - builder.stringListField(DATAFEED_IDS.getPreferredName(), datafeedIds); - } - - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedResponse.java deleted file mode 100644 index b8a6a0d79972a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedResponse.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Contains a {@link List} of the found {@link DatafeedConfig} objects and the total count found - */ -public class GetDatafeedResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("datafeeds"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_datafeed_response", - true, - a -> new GetDatafeedResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), DatafeedConfig.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), AbstractResultResponse.COUNT); - } - - GetDatafeedResponse(List datafeedBuilders, long count) { - super(RESULTS_FIELD, datafeedBuilders.stream().map(DatafeedConfig.Builder::build).collect(Collectors.toList()), count); - } - - /** - * The collection of {@link DatafeedConfig} objects found in the query - */ - public List datafeeds() { - return results; - } - - public static GetDatafeedResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetDatafeedResponse other = (GetDatafeedResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsRequest.java deleted file mode 100644 index 081504354eb20..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsRequest.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request object to get {@link org.elasticsearch.client.ml.datafeed.DatafeedStats} by their respective datafeedIds - * - * {@code _all} explicitly gets all the datafeeds' statistics in the cluster - * An empty request (no {@code datafeedId}s) implicitly gets all the datafeeds' statistics in the cluster - */ -public class GetDatafeedStatsRequest implements Validatable, ToXContentObject { - - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_datafeed_stats_request", - a -> new GetDatafeedStatsRequest((List) a[0]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())), - DatafeedConfig.ID, - ObjectParser.ValueType.STRING_ARRAY - ); - PARSER.declareBoolean(GetDatafeedStatsRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - private static final String ALL_DATAFEEDS = "_all"; - - private final List datafeedIds; - private Boolean allowNoMatch; - - /** - * Explicitly gets all datafeeds statistics - * - * @return a {@link GetDatafeedStatsRequest} for all existing datafeeds - */ - public static GetDatafeedStatsRequest getAllDatafeedStatsRequest() { - return new GetDatafeedStatsRequest(ALL_DATAFEEDS); - } - - GetDatafeedStatsRequest(List datafeedIds) { - if (datafeedIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("datafeedIds must not contain null values"); - } - this.datafeedIds = new ArrayList<>(datafeedIds); - } - - /** - * Get the specified Datafeed's statistics via their unique datafeedIds - * - * @param datafeedIds must be non-null and each datafeedId must be non-null - */ - public GetDatafeedStatsRequest(String... datafeedIds) { - this(Arrays.asList(datafeedIds)); - } - - /** - * All the datafeedIds for which to get statistics - */ - public List getDatafeedIds() { - return datafeedIds; - } - - public Boolean getAllowNoMatch() { - return this.allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no datafeeds. - * - * This includes {@code _all} string or when no datafeeds have been specified - * - * @param allowNoMatch When {@code true} ignore if wildcard or {@code _all} matches no datafeeds. Defaults to {@code true} - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedIds, allowNoMatch); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - GetDatafeedStatsRequest that = (GetDatafeedStatsRequest) other; - return Objects.equals(datafeedIds, that.datafeedIds) && Objects.equals(allowNoMatch, that.allowNoMatch); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(DatafeedConfig.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(datafeedIds)); - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - builder.endObject(); - return builder; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsResponse.java deleted file mode 100644 index 94a49fc074c04..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsResponse.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.datafeed.DatafeedStats; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Contains a {@link List} of the found {@link DatafeedStats} objects and the total count found - */ -public class GetDatafeedStatsResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("datafeeds"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_datafeed_stats_response", - true, - a -> new GetDatafeedStatsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), DatafeedStats.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), COUNT); - } - - GetDatafeedStatsResponse(List results, long count) { - super(RESULTS_FIELD, results, count); - } - - /** - * The collection of {@link DatafeedStats} objects found in the query - */ - public List datafeedStats() { - return results; - } - - public static GetDatafeedStatsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetDatafeedStatsResponse other = (GetDatafeedStatsResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersRequest.java deleted file mode 100644 index cafa4d8b331f5..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersRequest.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.job.config.MlFilter; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to retrieve {@link MlFilter}s - */ -public class GetFiltersRequest implements Validatable, ToXContentObject { - - public static final ObjectParser PARSER = new ObjectParser<>("get_filters_request", GetFiltersRequest::new); - - static { - PARSER.declareString(GetFiltersRequest::setFilterId, MlFilter.ID); - PARSER.declareInt(GetFiltersRequest::setFrom, PageParams.FROM); - PARSER.declareInt(GetFiltersRequest::setSize, PageParams.SIZE); - } - - private String filterId; - private Integer from; - private Integer size; - - public String getFilterId() { - return filterId; - } - - public Integer getFrom() { - return from; - } - - public Integer getSize() { - return size; - } - - /** - * Sets the filter id - * @param filterId the filter id - */ - public void setFilterId(String filterId) { - this.filterId = filterId; - } - - /** - * Sets the number of filters to skip. - * @param from set the `from` parameter - */ - public void setFrom(Integer from) { - this.from = from; - } - - /** - * Sets the number of filters to return. - * @param size set the `size` parameter - */ - public void setSize(Integer size) { - this.size = size; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (filterId != null) { - builder.field(MlFilter.ID.getPreferredName(), filterId); - } - if (from != null) { - builder.field(PageParams.FROM.getPreferredName(), from); - } - if (size != null) { - builder.field(PageParams.SIZE.getPreferredName(), size); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetFiltersRequest request = (GetFiltersRequest) obj; - return Objects.equals(filterId, request.filterId) && Objects.equals(from, request.from) && Objects.equals(size, request.size); - } - - @Override - public int hashCode() { - return Objects.hash(filterId, from, size); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersResponse.java deleted file mode 100644 index a0a190d89cfc2..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersResponse.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.config.MlFilter; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Contains a {@link List} of the found {@link MlFilter} objects and the total count found - */ -public class GetFiltersResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("filters"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_filters_response", - true, - a -> new GetFiltersResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), MlFilter.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), AbstractResultResponse.COUNT); - } - - GetFiltersResponse(List filters, long count) { - super(RESULTS_FIELD, filters.stream().map(MlFilter.Builder::build).collect(Collectors.toList()), count); - } - - /** - * The collection of {@link MlFilter} objects found in the query - */ - public List filters() { - return results; - } - - public static GetFiltersResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetFiltersResponse other = (GetFiltersResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersRequest.java deleted file mode 100644 index b174f4c91f1af..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersRequest.java +++ /dev/null @@ -1,212 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to retrieve influencers of a given job - */ -public class GetInfluencersRequest implements Validatable, ToXContentObject { - - public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim"); - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField INFLUENCER_SCORE = new ParseField("influencer_score"); - public static final ParseField SORT = new ParseField("sort"); - public static final ParseField DESCENDING = new ParseField("desc"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_influencers_request", - a -> new GetInfluencersRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareBoolean(GetInfluencersRequest::setExcludeInterim, EXCLUDE_INTERIM); - PARSER.declareStringOrNull(GetInfluencersRequest::setStart, START); - PARSER.declareStringOrNull(GetInfluencersRequest::setEnd, END); - PARSER.declareObject(GetInfluencersRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - PARSER.declareDouble(GetInfluencersRequest::setInfluencerScore, INFLUENCER_SCORE); - PARSER.declareString(GetInfluencersRequest::setSort, SORT); - PARSER.declareBoolean(GetInfluencersRequest::setDescending, DESCENDING); - } - - private final String jobId; - private Boolean excludeInterim; - private String start; - private String end; - private Double influencerScore; - private PageParams pageParams; - private String sort; - private Boolean descending; - - /** - * Constructs a request to retrieve influencers of a given job - * @param jobId id of the job to retrieve influencers of - */ - public GetInfluencersRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId); - } - - public String getJobId() { - return jobId; - } - - public Boolean getExcludeInterim() { - return excludeInterim; - } - - /** - * Sets the value of "exclude_interim". - * When {@code true}, interim influencers will be filtered out. - * @param excludeInterim value of "exclude_interim" to be set - */ - public void setExcludeInterim(Boolean excludeInterim) { - this.excludeInterim = excludeInterim; - } - - public String getStart() { - return start; - } - - /** - * Sets the value of "start" which is a timestamp. - * Only influencers whose timestamp is on or after the "start" value will be returned. - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * Sets the value of "end" which is a timestamp. - * Only influencers whose timestamp is before the "end" value will be returned. - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public PageParams getPageParams() { - return pageParams; - } - - /** - * Sets the paging parameters - * @param pageParams The paging parameters - */ - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - public Double getInfluencerScore() { - return influencerScore; - } - - /** - * Sets the value of "influencer_score". - * Only influencers with "influencer_score" equal or greater will be returned. - * @param influencerScore value of "influencer_score". - */ - public void setInfluencerScore(Double influencerScore) { - this.influencerScore = influencerScore; - } - - public String getSort() { - return sort; - } - - /** - * Sets the value of "sort". - * Specifies the influencer field to sort on. - * @param sort value of "sort". - */ - public void setSort(String sort) { - this.sort = sort; - } - - public Boolean getDescending() { - return descending; - } - - /** - * Sets the value of "desc". - * Specifies the sorting order. - * @param descending value of "desc" - */ - public void setDescending(Boolean descending) { - this.descending = descending; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (excludeInterim != null) { - builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - if (influencerScore != null) { - builder.field(INFLUENCER_SCORE.getPreferredName(), influencerScore); - } - if (sort != null) { - builder.field(SORT.getPreferredName(), sort); - } - if (descending != null) { - builder.field(DESCENDING.getPreferredName(), descending); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, excludeInterim, influencerScore, pageParams, start, end, sort, descending); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetInfluencersRequest other = (GetInfluencersRequest) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(excludeInterim, other.excludeInterim) - && Objects.equals(influencerScore, other.influencerScore) - && Objects.equals(pageParams, other.pageParams) - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(sort, other.sort) - && Objects.equals(descending, other.descending); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersResponse.java deleted file mode 100644 index 6d075c7fb535d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersResponse.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.results.Influencer; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * A response containing the requested influencers - */ -public class GetInfluencersResponse extends AbstractResultResponse { - - public static final ParseField INFLUENCERS = new ParseField("influencers"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_influencers_response", - true, - a -> new GetInfluencersResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), Influencer.PARSER, INFLUENCERS); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); - } - - public static GetInfluencersResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetInfluencersResponse(List influencers, long count) { - super(INFLUENCERS, influencers, count); - } - - /** - * The retrieved influencers - * @return the retrieved influencers - */ - public List influencers() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(count, results); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetInfluencersResponse other = (GetInfluencersResponse) obj; - return count == other.count && Objects.equals(results, other.results); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java deleted file mode 100644 index 037af8a412132..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request object to get {@link Job} objects with the matching {@code jobId}s or - * {@code groupName}s. - * - * {@code _all} explicitly gets all the jobs in the cluster - * An empty request (no {@code jobId}s) implicitly gets all the jobs in the cluster - */ -public class GetJobRequest implements Validatable, ToXContentObject { - - public static final ParseField JOB_IDS = new ParseField("job_ids"); - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - public static final String EXCLUDE_GENERATED = "exclude_generated"; - - private static final String ALL_JOBS = "_all"; - private final List jobIds; - private Boolean allowNoMatch; - private Boolean excludeGenerated; - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_job_request", - true, - a -> new GetJobRequest(a[0] == null ? new ArrayList<>() : (List) a[0]) - ); - - static { - PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), JOB_IDS); - PARSER.declareBoolean(GetJobRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - /** - * Helper method to create a query that will get ALL jobs - * @return new {@link GetJobRequest} object searching for the jobId "_all" - */ - public static GetJobRequest getAllJobsRequest() { - return new GetJobRequest(ALL_JOBS); - } - - /** - * Get the specified {@link Job} configurations via their unique jobIds - * @param jobIds must not contain any null values - */ - public GetJobRequest(String... jobIds) { - this(Arrays.asList(jobIds)); - } - - GetJobRequest(List jobIds) { - if (jobIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("jobIds must not contain null values"); - } - this.jobIds = new ArrayList<>(jobIds); - } - - /** - * All the jobIds for which to get configuration information - */ - public List getJobIds() { - return jobIds; - } - - /** - * Whether to ignore if a wildcard expression matches no jobs. - * - * @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) does not match any jobs - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - /** - * Setting this flag to `true` removes certain fields from the configuration on retrieval. - * - * This is useful when getting the configuration and wanting to put it in another cluster. - * - * Default value is false. - * @param excludeGenerated Boolean value indicating if certain fields should be removed - */ - public void setExcludeGenerated(boolean excludeGenerated) { - this.excludeGenerated = excludeGenerated; - } - - public Boolean getExcludeGenerated() { - return excludeGenerated; - } - - @Override - public int hashCode() { - return Objects.hash(jobIds, excludeGenerated, allowNoMatch); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || other.getClass() != getClass()) { - return false; - } - - GetJobRequest that = (GetJobRequest) other; - return Objects.equals(jobIds, that.jobIds) - && Objects.equals(excludeGenerated, that.excludeGenerated) - && Objects.equals(allowNoMatch, that.allowNoMatch); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - if (jobIds.isEmpty() == false) { - builder.stringListField(JOB_IDS.getPreferredName(), jobIds); - } - - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobResponse.java deleted file mode 100644 index ccc40edf3687d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobResponse.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Contains a {@link List} of the found {@link Job} objects and the total count found - */ -public class GetJobResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("jobs"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "jobs_response", - true, - a -> new GetJobResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), Job.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), AbstractResultResponse.COUNT); - } - - GetJobResponse(List jobBuilders, long count) { - super(RESULTS_FIELD, jobBuilders.stream().map(Job.Builder::build).collect(Collectors.toList()), count); - } - - /** - * The collection of {@link Job} objects found in the query - */ - public List jobs() { - return results; - } - - public static GetJobResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetJobResponse other = (GetJobResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java deleted file mode 100644 index 009b0239e276d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request object to get {@link org.elasticsearch.client.ml.job.stats.JobStats} by their respective jobIds - * - * {@code _all} explicitly gets all the jobs' statistics in the cluster - * An empty request (no {@code jobId}s) implicitly gets all the jobs' statistics in the cluster - */ -public class GetJobStatsRequest implements Validatable, ToXContentObject { - - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_jobs_stats_request", - a -> new GetJobStatsRequest((List) a[0]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())), - Job.ID, - ObjectParser.ValueType.STRING_ARRAY - ); - PARSER.declareBoolean(GetJobStatsRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - private static final String ALL_JOBS = "_all"; - - private final List jobIds; - private Boolean allowNoMatch; - - /** - * Explicitly gets all jobs statistics - * - * @return a {@link GetJobStatsRequest} for all existing jobs - */ - public static GetJobStatsRequest getAllJobStatsRequest() { - return new GetJobStatsRequest(ALL_JOBS); - } - - GetJobStatsRequest(List jobIds) { - if (jobIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("jobIds must not contain null values"); - } - this.jobIds = new ArrayList<>(jobIds); - } - - /** - * Get the specified Job's statistics via their unique jobIds - * - * @param jobIds must be non-null and each jobId must be non-null - */ - public GetJobStatsRequest(String... jobIds) { - this(Arrays.asList(jobIds)); - } - - /** - * All the jobIds for which to get statistics - */ - public List getJobIds() { - return jobIds; - } - - public Boolean getAllowNoMatch() { - return this.allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no jobs. - * - * This includes {@code _all} string or when no jobs have been specified - * - * @param allowNoMatch When {@code true} ignore if wildcard or {@code _all} matches no jobs. Defaults to {@code true} - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - @Override - public int hashCode() { - return Objects.hash(jobIds, allowNoMatch); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - GetJobStatsRequest that = (GetJobStatsRequest) other; - return Objects.equals(jobIds, that.jobIds) && Objects.equals(allowNoMatch, that.allowNoMatch); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(jobIds)); - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java deleted file mode 100644 index 3443010fe66a4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.stats.JobStats; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Contains a {@link List} of the found {@link JobStats} objects and the total count found - */ -public class GetJobStatsResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("jobs"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "jobs_stats_response", - true, - a -> new GetJobStatsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), JobStats.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), COUNT); - } - - GetJobStatsResponse(List jobStats, long count) { - super(RESULTS_FIELD, jobStats, count); - } - - /** - * The collection of {@link JobStats} objects found in the query - */ - public List jobStats() { - return results; - } - - public static GetJobStatsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetJobStatsResponse other = (GetJobStatsResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsRequest.java deleted file mode 100644 index d6ecbf18a2444..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsRequest.java +++ /dev/null @@ -1,191 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to retrieve information about model snapshots for a given job - */ -public class GetModelSnapshotsRequest implements Validatable, ToXContentObject { - - public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id"); - public static final ParseField SORT = new ParseField("sort"); - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField DESC = new ParseField("desc"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_model_snapshots_request", - a -> new GetModelSnapshotsRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString(GetModelSnapshotsRequest::setSnapshotId, SNAPSHOT_ID); - PARSER.declareString(GetModelSnapshotsRequest::setSort, SORT); - PARSER.declareStringOrNull(GetModelSnapshotsRequest::setStart, START); - PARSER.declareStringOrNull(GetModelSnapshotsRequest::setEnd, END); - PARSER.declareBoolean(GetModelSnapshotsRequest::setDesc, DESC); - PARSER.declareObject(GetModelSnapshotsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - } - - private final String jobId; - private String snapshotId; - private String sort; - private String start; - private String end; - private Boolean desc; - private PageParams pageParams; - - /** - * Constructs a request to retrieve snapshot information from a given job - * @param jobId id of the job from which to retrieve results - */ - public GetModelSnapshotsRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId); - } - - public String getJobId() { - return jobId; - } - - public String getSnapshotId() { - return snapshotId; - } - - /** - * Sets the id of the snapshot to retrieve. - * @param snapshotId the snapshot id - */ - public void setSnapshotId(String snapshotId) { - this.snapshotId = snapshotId; - } - - public String getSort() { - return sort; - } - - /** - * Sets the value of "sort". - * Specifies the snapshot field to sort on. - * @param sort value of "sort". - */ - public void setSort(String sort) { - this.sort = sort; - } - - public PageParams getPageParams() { - return pageParams; - } - - /** - * Sets the paging parameters - * @param pageParams the paging parameters - */ - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - public String getStart() { - return start; - } - - /** - * Sets the value of "start" which is a timestamp. - * Only snapshots whose timestamp is on or after the "start" value will be returned. - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * Sets the value of "end" which is a timestamp. - * Only snapshots whose timestamp is before the "end" value will be returned. - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public Boolean getDesc() { - return desc; - } - - /** - * Sets the value of "desc". - * Specifies the sorting order. - * @param desc value of "desc" - */ - public void setDesc(boolean desc) { - this.desc = desc; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (snapshotId != null) { - builder.field(SNAPSHOT_ID.getPreferredName(), snapshotId); - } - if (sort != null) { - builder.field(SORT.getPreferredName(), sort); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (desc != null) { - builder.field(DESC.getPreferredName(), desc); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetModelSnapshotsRequest request = (GetModelSnapshotsRequest) obj; - return Objects.equals(jobId, request.jobId) - && Objects.equals(snapshotId, request.snapshotId) - && Objects.equals(sort, request.sort) - && Objects.equals(start, request.start) - && Objects.equals(end, request.end) - && Objects.equals(desc, request.desc) - && Objects.equals(pageParams, request.pageParams); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, snapshotId, pageParams, start, end, sort, desc); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsResponse.java deleted file mode 100644 index b52055ced3046..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsResponse.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.process.ModelSnapshot; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -/** - * A response containing the requested snapshots - */ -public class GetModelSnapshotsResponse extends AbstractResultResponse { - - public static final ParseField SNAPSHOTS = new ParseField("model_snapshots"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_model_snapshots_response", - true, - a -> new GetModelSnapshotsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), ModelSnapshot.PARSER, SNAPSHOTS); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); - } - - public static GetModelSnapshotsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetModelSnapshotsResponse(List snapshotBuilders, long count) { - super(SNAPSHOTS, snapshotBuilders.stream().map(ModelSnapshot.Builder::build).collect(Collectors.toList()), count); - } - - /** - * The retrieved snapshots - * @return the retrieved snapshots - */ - public List snapshots() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(count, results); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetModelSnapshotsResponse other = (GetModelSnapshotsResponse) obj; - return count == other.count && Objects.equals(results, other.results); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsRequest.java deleted file mode 100644 index 628fcc804d423..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsRequest.java +++ /dev/null @@ -1,251 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -/** - * A request to retrieve overall buckets of set of jobs - */ -public class GetOverallBucketsRequest implements Validatable, ToXContentObject { - - public static final ParseField TOP_N = new ParseField("top_n"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - public static final ParseField OVERALL_SCORE = new ParseField("overall_score"); - public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim"); - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - - private static final String ALL_JOBS = "_all"; - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_overall_buckets_request", - a -> new GetOverallBucketsRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareInt(GetOverallBucketsRequest::setTopN, TOP_N); - PARSER.declareString(GetOverallBucketsRequest::setBucketSpan, BUCKET_SPAN); - PARSER.declareBoolean(GetOverallBucketsRequest::setExcludeInterim, EXCLUDE_INTERIM); - PARSER.declareDouble(GetOverallBucketsRequest::setOverallScore, OVERALL_SCORE); - PARSER.declareStringOrNull(GetOverallBucketsRequest::setStart, START); - PARSER.declareStringOrNull(GetOverallBucketsRequest::setEnd, END); - PARSER.declareBoolean(GetOverallBucketsRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - private final List jobIds; - private Integer topN; - private TimeValue bucketSpan; - private Boolean excludeInterim; - private Double overallScore; - private String start; - private String end; - private Boolean allowNoMatch; - - private GetOverallBucketsRequest(String jobId) { - this(Strings.tokenizeToStringArray(jobId, ",")); - } - - /** - * Constructs a request to retrieve overall buckets for a set of jobs - * @param jobIds The job identifiers. Each can be a job identifier, a group name, or a wildcard expression. - */ - public GetOverallBucketsRequest(String... jobIds) { - this(Arrays.asList(jobIds)); - } - - /** - * Constructs a request to retrieve overall buckets for a set of jobs - * @param jobIds The job identifiers. Each can be a job identifier, a group name, or a wildcard expression. - */ - public GetOverallBucketsRequest(List jobIds) { - if (jobIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("jobIds must not contain null values"); - } - if (jobIds.isEmpty()) { - this.jobIds = Collections.singletonList(ALL_JOBS); - } else { - this.jobIds = Collections.unmodifiableList(jobIds); - } - } - - public List getJobIds() { - return jobIds; - } - - public Integer getTopN() { - return topN; - } - - /** - * Sets the value of "top_n". - * @param topN The number of top job bucket scores to be used in the overall_score calculation. Defaults to 1. - */ - public void setTopN(Integer topN) { - this.topN = topN; - } - - public TimeValue getBucketSpan() { - return bucketSpan; - } - - /** - * Sets the value of "bucket_span". - * @param bucketSpan The span of the overall buckets. Must be greater or equal to the largest job’s bucket_span. - * Defaults to the largest job’s bucket_span. - */ - public void setBucketSpan(TimeValue bucketSpan) { - this.bucketSpan = bucketSpan; - } - - private void setBucketSpan(String bucketSpan) { - this.bucketSpan = TimeValue.parseTimeValue(bucketSpan, BUCKET_SPAN.getPreferredName()); - } - - public boolean isExcludeInterim() { - return excludeInterim; - } - - /** - * Sets the value of "exclude_interim". - * When {@code true}, interim overall buckets will be filtered out. - * Overall buckets are interim if any of the job buckets within the overall bucket interval are interim. - * @param excludeInterim value of "exclude_interim" to be set - */ - public void setExcludeInterim(Boolean excludeInterim) { - this.excludeInterim = excludeInterim; - } - - public String getStart() { - return start; - } - - /** - * Sets the value of "start" which is a timestamp. - * Only overall buckets whose timestamp is on or after the "start" value will be returned. - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * Sets the value of "end" which is a timestamp. - * Only overall buckets whose timestamp is before the "end" value will be returned. - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public Double getOverallScore() { - return overallScore; - } - - /** - * Sets the value of "overall_score". - * Only buckets with "overall_score" equal or greater will be returned. - * @param overallScore value of "anomaly_score". - */ - public void setOverallScore(double overallScore) { - this.overallScore = overallScore; - } - - /** - * See {@link GetJobRequest#getAllowNoMatch()} - * @param allowNoMatch value of "allow_no_match". - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no jobs. - * - * If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) does not match any jobs - */ - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - if (jobIds.isEmpty() == false) { - builder.field(Job.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(jobIds)); - } - if (topN != null) { - builder.field(TOP_N.getPreferredName(), topN); - } - if (bucketSpan != null) { - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan.getStringRep()); - } - if (excludeInterim != null) { - builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (overallScore != null) { - builder.field(OVERALL_SCORE.getPreferredName(), overallScore); - } - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(jobIds, topN, bucketSpan, excludeInterim, overallScore, start, end, allowNoMatch); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetOverallBucketsRequest other = (GetOverallBucketsRequest) obj; - return Objects.equals(jobIds, other.jobIds) - && Objects.equals(topN, other.topN) - && Objects.equals(bucketSpan, other.bucketSpan) - && Objects.equals(excludeInterim, other.excludeInterim) - && Objects.equals(overallScore, other.overallScore) - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(allowNoMatch, other.allowNoMatch); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsResponse.java deleted file mode 100644 index a75b740c99a14..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsResponse.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.results.OverallBucket; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * A response containing the requested overall buckets - */ -public class GetOverallBucketsResponse extends AbstractResultResponse { - - public static final ParseField OVERALL_BUCKETS = new ParseField("overall_buckets"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_overall_buckets_response", - true, - a -> new GetOverallBucketsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), OverallBucket.PARSER, OVERALL_BUCKETS); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); - } - - public static GetOverallBucketsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetOverallBucketsResponse(List overallBuckets, long count) { - super(OVERALL_BUCKETS, overallBuckets, count); - } - - /** - * The retrieved overall buckets - * @return the retrieved overall buckets - */ - public List overallBuckets() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(count, results); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetOverallBucketsResponse other = (GetOverallBucketsResponse) obj; - return count == other.count && Objects.equals(results, other.results); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsRequest.java deleted file mode 100644 index c3ebcd1f86e99..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsRequest.java +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to retrieve records of a given job - */ -public class GetRecordsRequest implements ToXContentObject, Validatable { - - public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim"); - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField RECORD_SCORE = new ParseField("record_score"); - public static final ParseField SORT = new ParseField("sort"); - public static final ParseField DESCENDING = new ParseField("desc"); - - public static final ObjectParser PARSER = new ObjectParser<>("get_records_request", GetRecordsRequest::new); - - static { - PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID); - PARSER.declareBoolean(GetRecordsRequest::setExcludeInterim, EXCLUDE_INTERIM); - PARSER.declareStringOrNull(GetRecordsRequest::setStart, START); - PARSER.declareStringOrNull(GetRecordsRequest::setEnd, END); - PARSER.declareObject(GetRecordsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - PARSER.declareDouble(GetRecordsRequest::setRecordScore, RECORD_SCORE); - PARSER.declareString(GetRecordsRequest::setSort, SORT); - PARSER.declareBoolean(GetRecordsRequest::setDescending, DESCENDING); - } - - private String jobId; - private Boolean excludeInterim; - private String start; - private String end; - private PageParams pageParams; - private Double recordScore; - private String sort; - private Boolean descending; - - private GetRecordsRequest() {} - - /** - * Constructs a request to retrieve records of a given job - * @param jobId id of the job to retrieve records of - */ - public GetRecordsRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId); - } - - public String getJobId() { - return jobId; - } - - public Boolean getExcludeInterim() { - return excludeInterim; - } - - /** - * Sets the value of "exclude_interim". - * When {@code true}, interim records will be filtered out. - * @param excludeInterim value of "exclude_interim" to be set - */ - public void setExcludeInterim(Boolean excludeInterim) { - this.excludeInterim = excludeInterim; - } - - public String getStart() { - return start; - } - - /** - * Sets the value of "start" which is a timestamp. - * Only records whose timestamp is on or after the "start" value will be returned. - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * Sets the value of "end" which is a timestamp. - * Only records whose timestamp is before the "end" value will be returned. - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public PageParams getPageParams() { - return pageParams; - } - - /** - * Sets the paging parameters - * @param pageParams The paging parameters - */ - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - public Double getRecordScore() { - return recordScore; - } - - /** - * Sets the value of "record_score". - * Only records with "record_score" equal or greater will be returned. - * @param recordScore value of "record_score". - */ - public void setRecordScore(Double recordScore) { - this.recordScore = recordScore; - } - - public String getSort() { - return sort; - } - - /** - * Sets the value of "sort". - * Specifies the record field to sort on. - * @param sort value of "sort". - */ - public void setSort(String sort) { - this.sort = sort; - } - - public Boolean getDescending() { - return descending; - } - - /** - * Sets the value of "desc". - * Specifies the sorting order. - * @param descending value of "desc" - */ - public void setDescending(Boolean descending) { - this.descending = descending; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (excludeInterim != null) { - builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - if (recordScore != null) { - builder.field(RECORD_SCORE.getPreferredName(), recordScore); - } - if (sort != null) { - builder.field(SORT.getPreferredName(), sort); - } - if (descending != null) { - builder.field(DESCENDING.getPreferredName(), descending); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, excludeInterim, recordScore, pageParams, start, end, sort, descending); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetRecordsRequest other = (GetRecordsRequest) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(excludeInterim, other.excludeInterim) - && Objects.equals(recordScore, other.recordScore) - && Objects.equals(pageParams, other.pageParams) - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(sort, other.sort) - && Objects.equals(descending, other.descending); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsResponse.java deleted file mode 100644 index 3f94a06211ade..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsResponse.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.results.AnomalyRecord; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * A response containing the requested records - */ -public class GetRecordsResponse extends AbstractResultResponse { - - public static final ParseField RECORDS = new ParseField("records"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_records_response", - true, - a -> new GetRecordsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AnomalyRecord.PARSER, RECORDS); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); - } - - public static GetRecordsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetRecordsResponse(List records, long count) { - super(RECORDS, records, count); - } - - /** - * The retrieved records - * @return the retrieved records - */ - public List records() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(count, results); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetRecordsResponse other = (GetRecordsResponse) obj; - return count == other.count && Objects.equals(results, other.results); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsRequest.java deleted file mode 100644 index 50b59c6a92f4f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsRequest.java +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.inference.TrainedModelConfig; -import org.elasticsearch.core.Nullable; - -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; - -public class GetTrainedModelsRequest implements Validatable { - - private static final String DEFINITION = "definition"; - private static final String TOTAL_FEATURE_IMPORTANCE = "total_feature_importance"; - private static final String FEATURE_IMPORTANCE_BASELINE = "feature_importance_baseline"; - public static final String ALLOW_NO_MATCH = "allow_no_match"; - public static final String EXCLUDE_GENERATED = "exclude_generated"; - public static final String DECOMPRESS_DEFINITION = "decompress_definition"; - public static final String TAGS = "tags"; - public static final String INCLUDE = "include"; - - private final List ids; - private Boolean allowNoMatch; - private Set includes = new HashSet<>(); - private Boolean decompressDefinition; - private Boolean excludeGenerated; - private PageParams pageParams; - private List tags; - - /** - * Helper method to create a request that will get ALL TrainedModelConfigs - * @return new {@link GetTrainedModelsRequest} object for the id "_all" - */ - public static GetTrainedModelsRequest getAllTrainedModelConfigsRequest() { - return new GetTrainedModelsRequest("_all"); - } - - public GetTrainedModelsRequest(String... ids) { - this.ids = Arrays.asList(ids); - } - - public List getIds() { - return ids; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no trained models. - * - * @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) - * does not match any trained models - */ - public GetTrainedModelsRequest setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - return this; - } - - public PageParams getPageParams() { - return pageParams; - } - - public GetTrainedModelsRequest setPageParams(@Nullable PageParams pageParams) { - this.pageParams = pageParams; - return this; - } - - public Set getIncludes() { - return Collections.unmodifiableSet(includes); - } - - public GetTrainedModelsRequest includeDefinition() { - this.includes.add(DEFINITION); - return this; - } - - public GetTrainedModelsRequest includeTotalFeatureImportance() { - this.includes.add(TOTAL_FEATURE_IMPORTANCE); - return this; - } - - public GetTrainedModelsRequest includeFeatureImportanceBaseline() { - this.includes.add(FEATURE_IMPORTANCE_BASELINE); - return this; - } - - /** - * Whether to include the full model definition. - * - * The full model definition can be very large. - * @deprecated Use {@link GetTrainedModelsRequest#includeDefinition()} - * @param includeDefinition If {@code true}, the definition is included. - */ - @Deprecated - public GetTrainedModelsRequest setIncludeDefinition(Boolean includeDefinition) { - if (includeDefinition != null && includeDefinition) { - return this.includeDefinition(); - } - return this; - } - - public Boolean getDecompressDefinition() { - return decompressDefinition; - } - - /** - * Whether or not to decompress the trained model, or keep it in its compressed string form - * - * @param decompressDefinition If {@code true}, the definition is decompressed. - */ - public GetTrainedModelsRequest setDecompressDefinition(Boolean decompressDefinition) { - this.decompressDefinition = decompressDefinition; - return this; - } - - public List getTags() { - return tags; - } - - /** - * The tags that the trained model must match. These correspond to {@link TrainedModelConfig#getTags()}. - * - * The models returned will match ALL tags supplied. - * If none are provided, only the provided ids are used to find models - * @param tags The tags to match when finding models - */ - public GetTrainedModelsRequest setTags(List tags) { - this.tags = tags; - return this; - } - - /** - * See {@link GetTrainedModelsRequest#setTags(List)} - */ - public GetTrainedModelsRequest setTags(String... tags) { - return setTags(Arrays.asList(tags)); - } - - public Boolean getExcludeGenerated() { - return excludeGenerated; - } - - /** - * Setting this flag to `true` removes certain fields from the model definition on retrieval. - * - * This is useful when getting the model and wanting to put it in another cluster. - * - * Default value is false. - * @param excludeGenerated Boolean value indicating if certain fields should be removed from the mode on GET - */ - public GetTrainedModelsRequest setExcludeGenerated(Boolean excludeGenerated) { - this.excludeGenerated = excludeGenerated; - return this; - } - - @Override - public Optional validate() { - if (ids == null || ids.isEmpty()) { - return Optional.of(ValidationException.withError("trained model id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetTrainedModelsRequest other = (GetTrainedModelsRequest) o; - return Objects.equals(ids, other.ids) - && Objects.equals(allowNoMatch, other.allowNoMatch) - && Objects.equals(decompressDefinition, other.decompressDefinition) - && Objects.equals(includes, other.includes) - && Objects.equals(excludeGenerated, other.excludeGenerated) - && Objects.equals(pageParams, other.pageParams); - } - - @Override - public int hashCode() { - return Objects.hash(ids, allowNoMatch, pageParams, decompressDefinition, includes, excludeGenerated); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsResponse.java deleted file mode 100644 index 9fb7cf8f7fd13..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsResponse.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.inference.TrainedModelConfig; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class GetTrainedModelsResponse { - - public static final ParseField TRAINED_MODEL_CONFIGS = new ParseField("trained_model_configs"); - public static final ParseField COUNT = new ParseField("count"); - - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_trained_model_configs", - true, - args -> new GetTrainedModelsResponse((List) args[0], (Long) args[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), (p, c) -> TrainedModelConfig.fromXContent(p), TRAINED_MODEL_CONFIGS); - PARSER.declareLong(constructorArg(), COUNT); - } - - public static GetTrainedModelsResponse fromXContent(final XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List trainedModels; - private final Long count; - - public GetTrainedModelsResponse(List trainedModels, Long count) { - this.trainedModels = trainedModels; - this.count = count; - } - - public List getTrainedModels() { - return trainedModels; - } - - /** - * @return The total count of the trained models that matched the ID pattern. - */ - public Long getCount() { - return count; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetTrainedModelsResponse other = (GetTrainedModelsResponse) o; - return Objects.equals(this.trainedModels, other.trainedModels) && Objects.equals(this.count, other.count); - } - - @Override - public int hashCode() { - return Objects.hash(trainedModels, count); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsRequest.java deleted file mode 100644 index 0185f531b0c68..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsRequest.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.core.Nullable; - -import java.util.Arrays; -import java.util.List; -import java.util.Objects; -import java.util.Optional; - -public class GetTrainedModelsStatsRequest implements Validatable { - - public static final String ALLOW_NO_MATCH = "allow_no_match"; - - private final List ids; - private Boolean allowNoMatch; - private PageParams pageParams; - - /** - * Helper method to create a request that will get ALL TrainedModelStats - * @return new {@link GetTrainedModelsStatsRequest} object for the id "_all" - */ - public static GetTrainedModelsStatsRequest getAllTrainedModelStatsRequest() { - return new GetTrainedModelsStatsRequest("_all"); - } - - public GetTrainedModelsStatsRequest(String... ids) { - this.ids = Arrays.asList(ids); - } - - public List getIds() { - return ids; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no trained models. - * - * @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) - * does not match any trained models - */ - public GetTrainedModelsStatsRequest setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - return this; - } - - public PageParams getPageParams() { - return pageParams; - } - - public GetTrainedModelsStatsRequest setPageParams(@Nullable PageParams pageParams) { - this.pageParams = pageParams; - return this; - } - - @Override - public Optional validate() { - if (ids == null || ids.isEmpty()) { - return Optional.of(ValidationException.withError("trained model id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetTrainedModelsStatsRequest other = (GetTrainedModelsStatsRequest) o; - return Objects.equals(ids, other.ids) - && Objects.equals(allowNoMatch, other.allowNoMatch) - && Objects.equals(pageParams, other.pageParams); - } - - @Override - public int hashCode() { - return Objects.hash(ids, allowNoMatch, pageParams); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsResponse.java deleted file mode 100644 index ca218657cce83..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsResponse.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.inference.TrainedModelStats; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class GetTrainedModelsStatsResponse { - - public static final ParseField TRAINED_MODEL_STATS = new ParseField("trained_model_stats"); - public static final ParseField COUNT = new ParseField("count"); - - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_trained_model_stats", - true, - args -> new GetTrainedModelsStatsResponse((List) args[0], (Long) args[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), (p, c) -> TrainedModelStats.fromXContent(p), TRAINED_MODEL_STATS); - PARSER.declareLong(constructorArg(), COUNT); - } - - public static GetTrainedModelsStatsResponse fromXContent(final XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List trainedModelStats; - private final Long count; - - public GetTrainedModelsStatsResponse(List trainedModelStats, Long count) { - this.trainedModelStats = trainedModelStats; - this.count = count; - } - - public List getTrainedModelStats() { - return trainedModelStats; - } - - /** - * @return The total count of the trained models that matched the ID pattern. - */ - public Long getCount() { - return count; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetTrainedModelsStatsResponse other = (GetTrainedModelsStatsResponse) o; - return Objects.equals(this.trainedModelStats, other.trainedModelStats) && Objects.equals(this.count, other.count); - } - - @Override - public int hashCode() { - return Objects.hash(trainedModelStats, count); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoRequest.java deleted file mode 100644 index 6c5f1787fd183..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoRequest.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -public class MlInfoRequest implements Validatable {} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoResponse.java deleted file mode 100644 index 6fa6c6eaaf6be..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoResponse.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Map; -import java.util.Objects; - -public class MlInfoResponse implements Validatable { - private final Map info; - - private MlInfoResponse(Map info) { - this.info = info; - } - - public Map getInfo() { - return info; - } - - public static MlInfoResponse fromXContent(XContentParser parser) throws IOException { - Map info = parser.map(); - return new MlInfoResponse(info); - } - - @Override - public int hashCode() { - return Objects.hash(info); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - MlInfoResponse other = (MlInfoResponse) obj; - return Objects.equals(info, other.info); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java deleted file mode 100644 index 4a732c9523415..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.Map; -import java.util.Objects; - -/** - * A Pojo class containing an Elastic Node's attributes - */ -public class NodeAttributes implements ToXContentObject { - - public static final ParseField ID = new ParseField("id"); - public static final ParseField NAME = new ParseField("name"); - public static final ParseField EPHEMERAL_ID = new ParseField("ephemeral_id"); - public static final ParseField TRANSPORT_ADDRESS = new ParseField("transport_address"); - public static final ParseField ATTRIBUTES = new ParseField("attributes"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("node", true, (a) -> { - int i = 0; - String id = (String) a[i++]; - String name = (String) a[i++]; - String ephemeralId = (String) a[i++]; - String transportAddress = (String) a[i++]; - Map attributes = (Map) a[i]; - return new NodeAttributes(id, name, ephemeralId, transportAddress, attributes); - }); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME); - PARSER.declareString(ConstructingObjectParser.constructorArg(), EPHEMERAL_ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), TRANSPORT_ADDRESS); - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), ATTRIBUTES, ObjectParser.ValueType.OBJECT); - } - - private final String id; - private final String name; - private final String ephemeralId; - private final String transportAddress; - private final Map attributes; - - public NodeAttributes(String id, String name, String ephemeralId, String transportAddress, Map attributes) { - this.id = id; - this.name = name; - this.ephemeralId = ephemeralId; - this.transportAddress = transportAddress; - this.attributes = Collections.unmodifiableMap(attributes); - } - - /** - * The unique identifier of the node. - */ - public String getId() { - return id; - } - - /** - * The node name. - */ - public String getName() { - return name; - } - - /** - * The ephemeral id of the node. - */ - public String getEphemeralId() { - return ephemeralId; - } - - /** - * The host and port where transport HTTP connections are accepted. - */ - public String getTransportAddress() { - return transportAddress; - } - - /** - * Additional attributes related to this node e.g., {"ml.max_open_jobs": "10"}. - */ - public Map getAttributes() { - return attributes; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ID.getPreferredName(), id); - builder.field(NAME.getPreferredName(), name); - builder.field(EPHEMERAL_ID.getPreferredName(), ephemeralId); - builder.field(TRANSPORT_ADDRESS.getPreferredName(), transportAddress); - builder.field(ATTRIBUTES.getPreferredName(), attributes); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(id, name, ephemeralId, transportAddress, attributes); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - NodeAttributes that = (NodeAttributes) other; - return Objects.equals(id, that.id) - && Objects.equals(name, that.name) - && Objects.equals(ephemeralId, that.ephemeralId) - && Objects.equals(transportAddress, that.transportAddress) - && Objects.equals(attributes, that.attributes); - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobRequest.java deleted file mode 100644 index c19ff484242ad..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobRequest.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to open a Machine Learning Job - */ -public class OpenJobRequest implements Validatable, ToXContentObject { - - public static final ParseField TIMEOUT = new ParseField("timeout"); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "open_job_request", - true, - a -> new OpenJobRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString((request, val) -> request.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); - } - - public static OpenJobRequest fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private String jobId; - private TimeValue timeout; - - /** - * Create a new request with the desired jobId - * - * @param jobId unique jobId, must not be null - */ - public OpenJobRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null"); - } - - public String getJobId() { - return jobId; - } - - /** - * The jobId to open - * - * @param jobId unique jobId, must not be null - */ - public void setJobId(String jobId) { - this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null"); - } - - public TimeValue getTimeout() { - return timeout; - } - - /** - * How long to wait for job to open before timing out the request - * - * @param timeout default value of 30 minutes - */ - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - } - builder.endObject(); - return builder; - } - - @Override - public String toString() { - return Strings.toString(this); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, timeout); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - OpenJobRequest that = (OpenJobRequest) other; - return Objects.equals(jobId, that.jobId) && Objects.equals(timeout, that.timeout); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobResponse.java deleted file mode 100644 index a9c6118db26d6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobResponse.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response indicating if the Machine Learning Job is now opened or not - */ -public class OpenJobResponse implements ToXContentObject { - - private static final ParseField OPENED = new ParseField("opened"); - private static final ParseField NODE = new ParseField("node"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "open_job_response", - true, - (a) -> new OpenJobResponse((Boolean) a[0], (String) a[1]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), OPENED); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), NODE); - } - - private final boolean opened; - private final String node; - - OpenJobResponse(boolean opened, String node) { - this.opened = opened; - this.node = node; - } - - public static OpenJobResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - /** - * Has the job opened or not - * - * @return boolean value indicating the job opened status - */ - public boolean isOpened() { - return opened; - } - - /** - * The node that the job was assigned to - * - * @return The ID of a node if the job was assigned to a node. If an empty string is returned - * it means the job was allowed to open lazily and has not yet been assigned to a node. - * If null is returned it means the server version is too old to return node - * information. - */ - public String getNode() { - return node; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - OpenJobResponse that = (OpenJobResponse) other; - return opened == that.opened && Objects.equals(node, that.node); - } - - @Override - public int hashCode() { - return Objects.hash(opened, node); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(OPENED.getPreferredName(), opened); - if (node != null) { - builder.field(NODE.getPreferredName(), node); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventRequest.java deleted file mode 100644 index 0752221e9eee0..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventRequest.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.calendars.Calendar; -import org.elasticsearch.client.ml.calendars.ScheduledEvent; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -/** - * Request to add a ScheduledEvent to a Machine Learning calendar - */ -public class PostCalendarEventRequest implements Validatable, ToXContentObject { - - private final String calendarId; - private final List scheduledEvents; - - public static final String INCLUDE_CALENDAR_ID_KEY = "include_calendar_id"; - public static final ParseField EVENTS = new ParseField("events"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "post_calendar_event_request", - a -> new PostCalendarEventRequest((String) a[0], (List) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Calendar.ID); - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> ScheduledEvent.PARSER.apply(p, null), EVENTS); - } - public static final MapParams EXCLUDE_CALENDAR_ID_PARAMS = new MapParams( - Collections.singletonMap(INCLUDE_CALENDAR_ID_KEY, Boolean.toString(false)) - ); - - /** - * Create a new PostCalendarEventRequest with an existing non-null calendarId and a list of Scheduled events - * - * @param calendarId The ID of the calendar, must be non-null - * @param scheduledEvents The non-null, non-empty, list of {@link ScheduledEvent} objects to add to the calendar - */ - public PostCalendarEventRequest(String calendarId, List scheduledEvents) { - this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null."); - this.scheduledEvents = Objects.requireNonNull(scheduledEvents, "[events] must not be null."); - if (scheduledEvents.isEmpty()) { - throw new IllegalArgumentException("At least 1 event is required"); - } - } - - public String getCalendarId() { - return calendarId; - } - - public List getScheduledEvents() { - return scheduledEvents; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (params.paramAsBoolean(INCLUDE_CALENDAR_ID_KEY, true)) { - builder.field(Calendar.ID.getPreferredName(), calendarId); - } - builder.field(EVENTS.getPreferredName(), scheduledEvents); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(calendarId, scheduledEvents); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - PostCalendarEventRequest other = (PostCalendarEventRequest) obj; - return Objects.equals(calendarId, other.calendarId) && Objects.equals(scheduledEvents, other.scheduledEvents); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventResponse.java deleted file mode 100644 index 4aeb8da98f260..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventResponse.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.calendars.ScheduledEvent; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * Response to adding ScheduledEvent(s) to a Machine Learning calendar - */ -public class PostCalendarEventResponse implements ToXContentObject { - - private final List scheduledEvents; - public static final ParseField EVENTS = new ParseField("events"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "post_calendar_event_response", - true, - a -> new PostCalendarEventResponse((List) a[0]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> ScheduledEvent.PARSER.apply(p, null), EVENTS); - } - - public static PostCalendarEventResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - /** - * Create a new PostCalendarEventResponse containing the scheduled Events - * - * @param scheduledEvents The list of {@link ScheduledEvent} objects - */ - public PostCalendarEventResponse(List scheduledEvents) { - this.scheduledEvents = scheduledEvents; - } - - public List getScheduledEvents() { - return scheduledEvents; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(EVENTS.getPreferredName(), scheduledEvents); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(scheduledEvents); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - PostCalendarEventResponse other = (PostCalendarEventResponse) obj; - return Objects.equals(scheduledEvents, other.scheduledEvents); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataRequest.java deleted file mode 100644 index 5918f15c412c4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataRequest.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentType; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * Request to post data to a Machine Learning job - */ -public class PostDataRequest implements Validatable, ToXContentObject { - - public static final ParseField RESET_START = new ParseField("reset_start"); - public static final ParseField RESET_END = new ParseField("reset_end"); - public static final ParseField CONTENT_TYPE = new ParseField("content_type"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "post_data_request", - (a) -> new PostDataRequest((String) a[0], XContentType.fromMediaType((String) a[1]), new byte[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), CONTENT_TYPE); - PARSER.declareStringOrNull(PostDataRequest::setResetEnd, RESET_END); - PARSER.declareStringOrNull(PostDataRequest::setResetStart, RESET_START); - } - - private final String jobId; - private final XContentType xContentType; - private final BytesReference content; - private String resetStart; - private String resetEnd; - - /** - * Create a new PostDataRequest object - * - * @param jobId non-null jobId of the job to post data to - * @param xContentType content type of the data to post. Only {@link XContentType#JSON} or {@link XContentType#SMILE} are supported - * @param content bulk serialized content in the format of the passed {@link XContentType} - */ - public PostDataRequest(String jobId, XContentType xContentType, BytesReference content) { - this.jobId = Objects.requireNonNull(jobId, "job_id must not be null"); - this.xContentType = Objects.requireNonNull(xContentType, "content_type must not be null"); - this.content = Objects.requireNonNull(content, "content must not be null"); - } - - /** - * Create a new PostDataRequest object referencing the passed {@code byte[]} content - * - * @param jobId non-null jobId of the job to post data to - * @param xContentType content type of the data to post. Only {@link XContentType#JSON} or {@link XContentType#SMILE} are supported - * @param content bulk serialized content in the format of the passed {@link XContentType} - */ - public PostDataRequest(String jobId, XContentType xContentType, byte[] content) { - this(jobId, xContentType, new BytesArray(content)); - } - - /** - * Create a new PostDataRequest object referencing the passed {@link JsonBuilder} object - * - * @param jobId non-null jobId of the job to post data to - * @param builder {@link JsonBuilder} object containing documents to be serialized and sent in {@link XContentType#JSON} format - */ - public PostDataRequest(String jobId, JsonBuilder builder) { - this(jobId, XContentType.JSON, builder.build()); - } - - public String getJobId() { - return jobId; - } - - public String getResetStart() { - return resetStart; - } - - /** - * Specifies the start of the bucket resetting range - * - * @param resetStart String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO 8601 string - */ - public void setResetStart(String resetStart) { - this.resetStart = resetStart; - } - - public String getResetEnd() { - return resetEnd; - } - - /** - * Specifies the end of the bucket resetting range - * - * @param resetEnd String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO 8601 string - */ - public void setResetEnd(String resetEnd) { - this.resetEnd = resetEnd; - } - - public BytesReference getContent() { - return content; - } - - public XContentType getXContentType() { - return xContentType; - } - - @Override - public int hashCode() { - // We leave out the content for server side parity - return Objects.hash(jobId, resetStart, resetEnd, xContentType); - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - // We leave out the content for server side parity - PostDataRequest other = (PostDataRequest) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(resetStart, other.resetStart) - && Objects.equals(resetEnd, other.resetEnd) - && Objects.equals(xContentType, other.xContentType); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(CONTENT_TYPE.getPreferredName(), xContentType.mediaType()); - if (resetEnd != null) { - builder.field(RESET_END.getPreferredName(), resetEnd); - } - if (resetStart != null) { - builder.field(RESET_START.getPreferredName(), resetStart); - } - builder.endObject(); - return builder; - } - - /** - * Class for incrementally building a bulk document request in {@link XContentType#JSON} format - */ - public static class JsonBuilder { - - private final List bytes = new ArrayList<>(); - - /** - * Add a document via a {@code byte[]} array - * - * @param doc {@code byte[]} array of a serialized JSON object - */ - public JsonBuilder addDoc(byte[] doc) { - bytes.add(ByteBuffer.wrap(doc)); - return this; - } - - /** - * Add a document via a serialized JSON String - * - * @param doc a serialized JSON String - */ - public JsonBuilder addDoc(String doc) { - bytes.add(ByteBuffer.wrap(doc.getBytes(StandardCharsets.UTF_8))); - return this; - } - - /** - * Add a document via an object map - * - * @param doc document object to add to bulk request - * @throws IOException on parsing/serialization errors - */ - public JsonBuilder addDoc(Map doc) throws IOException { - try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { - builder.map(doc); - bytes.add(ByteBuffer.wrap(BytesReference.toBytes(BytesReference.bytes(builder)))); - } - return this; - } - - private BytesReference build() { - ByteBuffer[] buffers = bytes.toArray(new ByteBuffer[bytes.size()]); - return BytesReference.fromByteBuffers(buffers); - } - - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataResponse.java deleted file mode 100644 index 4d8c8886fd896..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataResponse.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.process.DataCounts; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response object when posting data to a Machine Learning Job - */ -public class PostDataResponse implements ToXContentObject { - - private DataCounts dataCounts; - - public static PostDataResponse fromXContent(XContentParser parser) throws IOException { - return new PostDataResponse(DataCounts.PARSER.parse(parser, null)); - } - - public PostDataResponse(DataCounts counts) { - this.dataCounts = counts; - } - - public DataCounts getDataCounts() { - return dataCounts; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return dataCounts.toXContent(builder, params); - } - - @Override - public int hashCode() { - return Objects.hashCode(dataCounts); - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - PostDataResponse other = (PostDataResponse) obj; - return Objects.equals(dataCounts, other.dataCounts); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedRequest.java deleted file mode 100644 index 8bd277fa31efc..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedRequest.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to preview a MachineLearning Datafeed - */ -public class PreviewDatafeedRequest implements Validatable, ToXContentObject { - - private static final ParseField DATAFEED_CONFIG = new ParseField("datafeed_config"); - private static final ParseField JOB_CONFIG = new ParseField("job_config"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "preview_datafeed_request", - a -> new PreviewDatafeedRequest((String) a[0], (DatafeedConfig.Builder) a[1], (Job.Builder) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), DatafeedConfig.ID); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), DatafeedConfig.PARSER, DATAFEED_CONFIG); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), Job.PARSER, JOB_CONFIG); - } - - public static PreviewDatafeedRequest fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final String datafeedId; - private final DatafeedConfig datafeedConfig; - private final Job jobConfig; - - private PreviewDatafeedRequest( - @Nullable String datafeedId, - @Nullable DatafeedConfig.Builder datafeedConfig, - @Nullable Job.Builder jobConfig - ) { - this.datafeedId = datafeedId; - this.datafeedConfig = datafeedConfig == null ? null : datafeedConfig.build(); - this.jobConfig = jobConfig == null ? null : jobConfig.build(); - } - - /** - * Create a new request with the desired datafeedId - * - * @param datafeedId unique datafeedId, must not be null - */ - public PreviewDatafeedRequest(String datafeedId) { - this.datafeedId = Objects.requireNonNull(datafeedId, "[datafeed_id] must not be null"); - this.datafeedConfig = null; - this.jobConfig = null; - } - - /** - * Create a new request to preview the provided datafeed config and optional job config - * @param datafeedConfig The datafeed to preview - * @param jobConfig The associated job config (required if the datafeed does not refer to an existing job) - */ - public PreviewDatafeedRequest(DatafeedConfig datafeedConfig, Job jobConfig) { - this.datafeedId = null; - this.datafeedConfig = datafeedConfig; - this.jobConfig = jobConfig; - } - - public String getDatafeedId() { - return datafeedId; - } - - public DatafeedConfig getDatafeedConfig() { - return datafeedConfig; - } - - public Job getJobConfig() { - return jobConfig; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (datafeedId != null) { - builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); - } - if (datafeedConfig != null) { - builder.field(DATAFEED_CONFIG.getPreferredName(), datafeedConfig); - } - if (jobConfig != null) { - builder.field(JOB_CONFIG.getPreferredName(), jobConfig); - } - builder.endObject(); - return builder; - } - - @Override - public String toString() { - return Strings.toString(this); - } - - @Override - public int hashCode() { - return Objects.hash(datafeedId, datafeedConfig, jobConfig); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - PreviewDatafeedRequest that = (PreviewDatafeedRequest) other; - return Objects.equals(datafeedId, that.datafeedId) - && Objects.equals(datafeedConfig, that.datafeedConfig) - && Objects.equals(jobConfig, that.jobConfig); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedResponse.java deleted file mode 100644 index 44ed4e40cd165..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedResponse.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xcontent.DeprecationHandler; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentType; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; - -/** - * Response containing a datafeed preview in JSON format - */ -public class PreviewDatafeedResponse implements ToXContentObject { - - private BytesReference preview; - - public static PreviewDatafeedResponse fromXContent(XContentParser parser) throws IOException { - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - parser.nextToken(); - builder.copyCurrentStructure(parser); - return new PreviewDatafeedResponse(BytesReference.bytes(builder)); - } - } - - public PreviewDatafeedResponse(BytesReference preview) { - this.preview = preview; - } - - public BytesReference getPreview() { - return preview; - } - - /** - * Parses the preview to a list of {@link Map} objects - * @return List of previewed data - * @throws IOException If there is a parsing issue with the {@link BytesReference} - * @throws java.lang.ClassCastException If casting the raw {@link Object} entries to a {@link Map} fails - */ - @SuppressWarnings("unchecked") - public List> getDataList() throws IOException { - try ( - StreamInput streamInput = preview.streamInput(); - XContentParser parser = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, streamInput) - ) { - XContentParser.Token token = parser.nextToken(); - if (token == XContentParser.Token.START_ARRAY) { - return parser.listOrderedMap().stream().map(obj -> (Map) obj).collect(Collectors.toList()); - } else { - return Collections.singletonList(parser.mapOrdered()); - } - } - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - try (InputStream stream = preview.streamInput()) { - builder.rawValue(stream, XContentType.JSON); - } - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(preview); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - PreviewDatafeedResponse other = (PreviewDatafeedResponse) obj; - return Objects.equals(preview, other.preview); - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarJobRequest.java deleted file mode 100644 index cba01a764f6ca..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarJobRequest.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.security.InvalidParameterException; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request class for adding Machine Learning Jobs to an existing calendar - */ -public class PutCalendarJobRequest implements Validatable { - - private final List jobIds; - private final String calendarId; - - /** - * Create a new request referencing an existing Calendar and which JobIds to add - * to it. - * - * @param calendarId The non-null ID of the calendar - * @param jobIds JobIds to add to the calendar, cannot be empty, or contain null values. - * It can be a list of jobs or groups. - */ - public PutCalendarJobRequest(String calendarId, String... jobIds) { - this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null."); - if (jobIds.length == 0) { - throw new InvalidParameterException("jobIds must not be empty."); - } - if (Arrays.stream(jobIds).anyMatch(Objects::isNull)) { - throw new NullPointerException("jobIds must not contain null values."); - } - this.jobIds = Arrays.asList(jobIds); - } - - public List getJobIds() { - return jobIds; - } - - public String getCalendarId() { - return calendarId; - } - - @Override - public int hashCode() { - return Objects.hash(jobIds, calendarId); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - PutCalendarJobRequest that = (PutCalendarJobRequest) other; - return Objects.equals(jobIds, that.jobIds) && Objects.equals(calendarId, that.calendarId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarRequest.java deleted file mode 100644 index 7a45bc3163732..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarRequest.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.calendars.Calendar; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to create a new Machine Learning calendar - */ -public class PutCalendarRequest implements Validatable, ToXContentObject { - - private final Calendar calendar; - - public PutCalendarRequest(Calendar calendar) { - this.calendar = calendar; - } - - public Calendar getCalendar() { - return calendar; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - calendar.toXContent(builder, params); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(calendar); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - PutCalendarRequest other = (PutCalendarRequest) obj; - return Objects.equals(calendar, other.calendar); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarResponse.java deleted file mode 100644 index 3e3170a954815..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarResponse.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.calendars.Calendar; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -public class PutCalendarResponse implements ToXContentObject { - - public static PutCalendarResponse fromXContent(XContentParser parser) throws IOException { - return new PutCalendarResponse(Calendar.PARSER.parse(parser, null)); - } - - private final Calendar calendar; - - PutCalendarResponse(Calendar calendar) { - this.calendar = calendar; - } - - public Calendar getCalendar() { - return calendar; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - calendar.toXContent(builder, params); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(calendar); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - PutCalendarResponse other = (PutCalendarResponse) obj; - return Objects.equals(calendar, other.calendar); - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsRequest.java deleted file mode 100644 index 33015ed97bf97..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; -import java.util.Optional; - -public class PutDataFrameAnalyticsRequest implements ToXContentObject, Validatable { - - private final DataFrameAnalyticsConfig config; - - public PutDataFrameAnalyticsRequest(DataFrameAnalyticsConfig config) { - this.config = config; - } - - public DataFrameAnalyticsConfig getConfig() { - return config; - } - - @Override - public Optional validate() { - if (config == null) { - return Optional.of(ValidationException.withError("put requires a non-null data frame analytics config")); - } - return Optional.empty(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return config.toXContent(builder, params); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - PutDataFrameAnalyticsRequest other = (PutDataFrameAnalyticsRequest) o; - return Objects.equals(config, other.config); - } - - @Override - public int hashCode() { - return Objects.hash(config); - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsResponse.java deleted file mode 100644 index 7387de559c256..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsResponse.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -public class PutDataFrameAnalyticsResponse { - - public static PutDataFrameAnalyticsResponse fromXContent(XContentParser parser) throws IOException { - return new PutDataFrameAnalyticsResponse(DataFrameAnalyticsConfig.fromXContent(parser)); - } - - private final DataFrameAnalyticsConfig config; - - public PutDataFrameAnalyticsResponse(DataFrameAnalyticsConfig config) { - this.config = config; - } - - public DataFrameAnalyticsConfig getConfig() { - return config; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - PutDataFrameAnalyticsResponse other = (PutDataFrameAnalyticsResponse) o; - return Objects.equals(config, other.config); - } - - @Override - public int hashCode() { - return Objects.hash(config); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedRequest.java deleted file mode 100644 index d079f1b0fc8d6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedRequest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to create a new Machine Learning Datafeed given a {@link DatafeedConfig} configuration - */ -public class PutDatafeedRequest implements Validatable, ToXContentObject { - - private final DatafeedConfig datafeed; - - /** - * Construct a new PutDatafeedRequest - * - * @param datafeed a {@link DatafeedConfig} configuration to create - */ - public PutDatafeedRequest(DatafeedConfig datafeed) { - this.datafeed = datafeed; - } - - public DatafeedConfig getDatafeed() { - return datafeed; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return datafeed.toXContent(builder, params); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PutDatafeedRequest request = (PutDatafeedRequest) object; - return Objects.equals(datafeed, request.datafeed); - } - - @Override - public int hashCode() { - return Objects.hash(datafeed); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedResponse.java deleted file mode 100644 index 6abaf8deb4be3..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedResponse.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response containing the newly created {@link DatafeedConfig} - */ -public class PutDatafeedResponse implements ToXContentObject { - - private DatafeedConfig datafeed; - - public static PutDatafeedResponse fromXContent(XContentParser parser) throws IOException { - return new PutDatafeedResponse(DatafeedConfig.PARSER.parse(parser, null).build()); - } - - PutDatafeedResponse(DatafeedConfig datafeed) { - this.datafeed = datafeed; - } - - public DatafeedConfig getResponse() { - return datafeed; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - datafeed.toXContent(builder, params); - return builder; - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - if (object == null || getClass() != object.getClass()) { - return false; - } - PutDatafeedResponse response = (PutDatafeedResponse) object; - return Objects.equals(datafeed, response.datafeed); - } - - @Override - public int hashCode() { - return Objects.hash(datafeed); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterRequest.java deleted file mode 100644 index dd08f7a96c9b0..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterRequest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.MlFilter; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to create a new Machine Learning MlFilter given a {@link MlFilter} configuration - */ -public class PutFilterRequest implements Validatable, ToXContentObject { - - private final MlFilter filter; - - /** - * Construct a new PutMlFilterRequest - * - * @param filter a {@link MlFilter} configuration to create - */ - public PutFilterRequest(MlFilter filter) { - this.filter = filter; - } - - public MlFilter getMlFilter() { - return filter; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return filter.toXContent(builder, params); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PutFilterRequest request = (PutFilterRequest) object; - return Objects.equals(filter, request.filter); - } - - @Override - public int hashCode() { - return Objects.hash(filter); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterResponse.java deleted file mode 100644 index 48a850be7d228..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterResponse.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.config.MlFilter; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response containing the newly created {@link MlFilter} - */ -public class PutFilterResponse implements ToXContentObject { - - private MlFilter filter; - - public static PutFilterResponse fromXContent(XContentParser parser) throws IOException { - return new PutFilterResponse(MlFilter.PARSER.parse(parser, null).build()); - } - - PutFilterResponse(MlFilter filter) { - this.filter = filter; - } - - public MlFilter getResponse() { - return filter; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - filter.toXContent(builder, params); - return builder; - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - if (object == null || getClass() != object.getClass()) { - return false; - } - PutFilterResponse response = (PutFilterResponse) object; - return Objects.equals(filter, response.filter); - } - - @Override - public int hashCode() { - return Objects.hash(filter); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobRequest.java deleted file mode 100644 index 04bfc4f3f9169..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobRequest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to create a new Machine Learning Job given a {@link Job} configuration - */ -public class PutJobRequest implements Validatable, ToXContentObject { - - private final Job job; - - /** - * Construct a new PutJobRequest - * - * @param job a {@link Job} configuration to create - */ - public PutJobRequest(Job job) { - this.job = job; - } - - public Job getJob() { - return job; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return job.toXContent(builder, params); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PutJobRequest request = (PutJobRequest) object; - return Objects.equals(job, request.job); - } - - @Override - public int hashCode() { - return Objects.hash(job); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobResponse.java deleted file mode 100644 index 532a6f54ba30a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobResponse.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response containing the newly created {@link Job} - */ -public class PutJobResponse implements ToXContentObject { - - private Job job; - - public static PutJobResponse fromXContent(XContentParser parser) throws IOException { - return new PutJobResponse(Job.PARSER.parse(parser, null).build()); - } - - PutJobResponse(Job job) { - this.job = job; - } - - public Job getResponse() { - return job; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - job.toXContent(builder, params); - return builder; - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - if (object == null || getClass() != object.getClass()) { - return false; - } - PutJobResponse response = (PutJobResponse) object; - return Objects.equals(job, response.job); - } - - @Override - public int hashCode() { - return Objects.hash(job); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelAliasRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelAliasRequest.java deleted file mode 100644 index 7988ae35f1c6a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelAliasRequest.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -public class PutTrainedModelAliasRequest implements Validatable { - - public static final String REASSIGN = "reassign"; - - private final String modelAlias; - private final String modelId; - private final Boolean reassign; - - public PutTrainedModelAliasRequest(String modelAlias, String modelId, Boolean reassign) { - this.modelAlias = Objects.requireNonNull(modelAlias); - this.modelId = Objects.requireNonNull(modelId); - this.reassign = reassign; - } - - public String getModelAlias() { - return modelAlias; - } - - public String getModelId() { - return modelId; - } - - public Boolean getReassign() { - return reassign; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PutTrainedModelAliasRequest request = (PutTrainedModelAliasRequest) o; - return Objects.equals(modelAlias, request.modelAlias) - && Objects.equals(modelId, request.modelId) - && Objects.equals(reassign, request.reassign); - } - - @Override - public int hashCode() { - return Objects.hash(modelAlias, modelId, reassign); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelRequest.java deleted file mode 100644 index 5276713c921be..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelRequest.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.inference.TrainedModelConfig; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class PutTrainedModelRequest implements Validatable, ToXContentObject { - - private final TrainedModelConfig config; - - public PutTrainedModelRequest(TrainedModelConfig config) { - this.config = config; - } - - public TrainedModelConfig getTrainedModelConfig() { - return config; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return config.toXContent(builder, params); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PutTrainedModelRequest request = (PutTrainedModelRequest) o; - return Objects.equals(config, request.config); - } - - @Override - public int hashCode() { - return Objects.hash(config); - } - - @Override - public final String toString() { - return Strings.toString(config); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelResponse.java deleted file mode 100644 index dabcc7d24cc0f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelResponse.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.inference.TrainedModelConfig; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -public class PutTrainedModelResponse implements ToXContentObject { - - private final TrainedModelConfig trainedModelConfig; - - public static PutTrainedModelResponse fromXContent(XContentParser parser) throws IOException { - return new PutTrainedModelResponse(TrainedModelConfig.PARSER.parse(parser, null).build()); - } - - public PutTrainedModelResponse(TrainedModelConfig trainedModelConfig) { - this.trainedModelConfig = trainedModelConfig; - } - - public TrainedModelConfig getResponse() { - return trainedModelConfig; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return trainedModelConfig.toXContent(builder, params); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PutTrainedModelResponse response = (PutTrainedModelResponse) o; - return Objects.equals(trainedModelConfig, response.trainedModelConfig); - } - - @Override - public int hashCode() { - return Objects.hash(trainedModelConfig); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotRequest.java deleted file mode 100644 index 0295d72b7d9c5..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotRequest.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.process.ModelSnapshot; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to revert to a specific model snapshot for a given job - */ -public class RevertModelSnapshotRequest implements Validatable, ToXContentObject { - - public static final ParseField DELETE_INTERVENING = new ParseField("delete_intervening_results"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "revert_model_snapshots_request", - a -> new RevertModelSnapshotRequest((String) a[0], (String) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), ModelSnapshot.SNAPSHOT_ID); - PARSER.declareBoolean(RevertModelSnapshotRequest::setDeleteInterveningResults, DELETE_INTERVENING); - } - - private final String jobId; - private final String snapshotId; - private Boolean deleteInterveningResults; - - /** - * Constructs a request to revert to a given model snapshot - * @param jobId id of the job for which to revert the model snapshot - * @param snapshotId id of the snapshot to which to revert - */ - public RevertModelSnapshotRequest(String jobId, String snapshotId) { - this.jobId = Objects.requireNonNull(jobId, "[" + Job.ID + "] must not be null"); - this.snapshotId = Objects.requireNonNull(snapshotId, "[" + ModelSnapshot.SNAPSHOT_ID + "] must not be null"); - } - - public String getJobId() { - return jobId; - } - - public String getSnapshotId() { - return snapshotId; - } - - public Boolean getDeleteInterveningResults() { - return deleteInterveningResults; - } - - /** - * Sets the request flag that indicates whether or not intervening results should be deleted. - * @param deleteInterveningResults Flag that indicates whether or not intervening results should be deleted. - */ - public void setDeleteInterveningResults(Boolean deleteInterveningResults) { - this.deleteInterveningResults = deleteInterveningResults; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(ModelSnapshot.SNAPSHOT_ID.getPreferredName(), snapshotId); - if (deleteInterveningResults != null) { - builder.field(DELETE_INTERVENING.getPreferredName(), deleteInterveningResults); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - RevertModelSnapshotRequest request = (RevertModelSnapshotRequest) obj; - return Objects.equals(jobId, request.jobId) - && Objects.equals(snapshotId, request.snapshotId) - && Objects.equals(deleteInterveningResults, request.deleteInterveningResults); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, snapshotId, deleteInterveningResults); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotResponse.java deleted file mode 100644 index 6110569ac9197..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotResponse.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.process.ModelSnapshot; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * A response containing the reverted model snapshot - */ -public class RevertModelSnapshotResponse implements ToXContentObject { - - private static final ParseField MODEL = new ParseField("model"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "revert_model_snapshot_response", - true, - a -> new RevertModelSnapshotResponse((ModelSnapshot.Builder) a[0]) - ); - - static { - PARSER.declareObject(ConstructingObjectParser.constructorArg(), ModelSnapshot.PARSER, MODEL); - } - - public static RevertModelSnapshotResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - public RevertModelSnapshotResponse(ModelSnapshot.Builder modelSnapshot) { - this.model = modelSnapshot.build(); - } - - private final ModelSnapshot model; - - /** - * Get full information about the reverted model snapshot - * @return the reverted model snapshot. - */ - public ModelSnapshot getModel() { - return model; - } - - @Override - public int hashCode() { - return Objects.hash(model); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - RevertModelSnapshotResponse other = (RevertModelSnapshotResponse) obj; - return Objects.equals(model, other.model); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (model != null) { - builder.field(MODEL.getPreferredName(), model); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/SetUpgradeModeRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/SetUpgradeModeRequest.java deleted file mode 100644 index 7e03117fd13d4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/SetUpgradeModeRequest.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ParseField; - -import java.util.Objects; - -/** - * Sets ML into upgrade_mode - */ -public class SetUpgradeModeRequest implements Validatable { - - public static final ParseField ENABLED = new ParseField("enabled"); - public static final ParseField TIMEOUT = new ParseField("timeout"); - - private boolean enabled; - private TimeValue timeout; - - /** - * Create a new request - * - * @param enabled whether to enable `upgrade_mode` or not - */ - public SetUpgradeModeRequest(boolean enabled) { - this.enabled = enabled; - } - - public boolean isEnabled() { - return enabled; - } - - public void setEnabled(boolean enabled) { - this.enabled = enabled; - } - - public TimeValue getTimeout() { - return timeout; - } - - /** - * How long to wait for the request to be completed - * - * @param timeout default value of 30 seconds - */ - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - @Override - public int hashCode() { - return Objects.hash(enabled, timeout); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - SetUpgradeModeRequest that = (SetUpgradeModeRequest) other; - return Objects.equals(enabled, that.enabled) && Objects.equals(timeout, that.timeout); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsRequest.java deleted file mode 100644 index 0bb09846e7047..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; - -import java.util.Objects; -import java.util.Optional; - -public class StartDataFrameAnalyticsRequest implements Validatable { - - private final String id; - private TimeValue timeout; - - public StartDataFrameAnalyticsRequest(String id) { - this.id = id; - } - - public String getId() { - return id; - } - - public TimeValue getTimeout() { - return timeout; - } - - public StartDataFrameAnalyticsRequest setTimeout(@Nullable TimeValue timeout) { - this.timeout = timeout; - return this; - } - - @Override - public Optional validate() { - if (id == null) { - return Optional.of(ValidationException.withError("data frame analytics id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - StartDataFrameAnalyticsRequest other = (StartDataFrameAnalyticsRequest) o; - return Objects.equals(id, other.id) && Objects.equals(timeout, other.timeout); - } - - @Override - public int hashCode() { - return Objects.hash(id, timeout); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsResponse.java deleted file mode 100644 index a158ad9eae705..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsResponse.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response indicating if the Machine Learning Datafeed is now started or not - */ -public class StartDataFrameAnalyticsResponse extends AcknowledgedResponse { - - private static final ParseField NODE = new ParseField("node"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "start_data_frame_analytics_response", - true, - (a) -> new StartDataFrameAnalyticsResponse((Boolean) a[0], (String) a[1]) - ); - - static { - declareAcknowledgedField(PARSER); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), NODE); - } - - private final String node; - - public StartDataFrameAnalyticsResponse(boolean acknowledged, String node) { - super(acknowledged); - this.node = node; - } - - public static StartDataFrameAnalyticsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - /** - * The node that the job was assigned to - * - * @return The ID of a node if the job was assigned to a node. If an empty string is returned - * it means the job was allowed to open lazily and has not yet been assigned to a node. - * If null is returned it means the server version is too old to return node - * information. - */ - public String getNode() { - return node; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - StartDataFrameAnalyticsResponse that = (StartDataFrameAnalyticsResponse) other; - return isAcknowledged() == that.isAcknowledged() && Objects.equals(node, that.node); - } - - @Override - public int hashCode() { - return Objects.hash(isAcknowledged(), node); - } - - @Override - public void addCustomFields(XContentBuilder builder, Params params) throws IOException { - if (node != null) { - builder.field(NODE.getPreferredName(), node); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedRequest.java deleted file mode 100644 index 48a9d41192215..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedRequest.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to start a Datafeed - */ -public class StartDatafeedRequest implements Validatable, ToXContentObject { - - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField TIMEOUT = new ParseField("timeout"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "start_datafeed_request", - a -> new StartDatafeedRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID); - PARSER.declareString(StartDatafeedRequest::setStart, START); - PARSER.declareString(StartDatafeedRequest::setEnd, END); - PARSER.declareString((params, val) -> params.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); - } - - private final String datafeedId; - private String start; - private String end; - private TimeValue timeout; - - /** - * Create a new StartDatafeedRequest for the given DatafeedId - * - * @param datafeedId non-null existing Datafeed ID - */ - public StartDatafeedRequest(String datafeedId) { - this.datafeedId = Objects.requireNonNull(datafeedId, "[datafeed_id] must not be null"); - } - - public String getDatafeedId() { - return datafeedId; - } - - public String getStart() { - return start; - } - - /** - * The time that the datafeed should begin. This value is inclusive. - * - * If you specify a start value that is earlier than the timestamp of the latest processed record, - * the datafeed continues from 1 millisecond after the timestamp of the latest processed record. - * - * If you do not specify a start time and the datafeed is associated with a new job, - * the analysis starts from the earliest time for which data is available. - * - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO 8601 string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * The time that the datafeed should end. This value is exclusive. - * If you do not specify an end time, the datafeed runs continuously. - * - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO 8601 string - */ - public void setEnd(String end) { - this.end = end; - } - - public TimeValue getTimeout() { - return timeout; - } - - /** - * Indicates how long to wait for the cluster to respond to the request. - * - * @param timeout TimeValue for how long to wait for a response from the cluster - */ - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedId, start, end, timeout); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || obj.getClass() != getClass()) { - return false; - } - - StartDatafeedRequest other = (StartDatafeedRequest) obj; - return Objects.equals(datafeedId, other.datafeedId) - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(timeout, other.timeout); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedResponse.java deleted file mode 100644 index 25417797bb6ba..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedResponse.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response indicating if the Machine Learning Datafeed is now started or not - */ -public class StartDatafeedResponse implements ToXContentObject { - - private static final ParseField STARTED = new ParseField("started"); - private static final ParseField NODE = new ParseField("node"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "start_datafeed_response", - true, - (a) -> new StartDatafeedResponse((Boolean) a[0], (String) a[1]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), STARTED); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), NODE); - } - - private final boolean started; - private final String node; - - public StartDatafeedResponse(boolean started, String node) { - this.started = started; - this.node = node; - } - - public static StartDatafeedResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - /** - * Has the Datafeed started or not - * - * @return boolean value indicating the Datafeed started status - */ - public boolean isStarted() { - return started; - } - - /** - * The node that the datafeed was assigned to - * - * @return The ID of a node if the datafeed was assigned to a node. If an empty string is returned - * it means the datafeed was allowed to open lazily and has not yet been assigned to a node. - * If null is returned it means the server version is too old to return node - * information. - */ - public String getNode() { - return node; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - StartDatafeedResponse that = (StartDatafeedResponse) other; - return started == that.started && Objects.equals(node, that.node); - } - - @Override - public int hashCode() { - return Objects.hash(started, node); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(STARTED.getPreferredName(), started); - if (node != null) { - builder.field(NODE.getPreferredName(), node); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsRequest.java deleted file mode 100644 index c8263bed50fac..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ParseField; - -import java.util.Objects; -import java.util.Optional; - -public class StopDataFrameAnalyticsRequest implements Validatable { - - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - public static final ParseField FORCE = new ParseField("force"); - - private final String id; - private Boolean allowNoMatch; - private Boolean force; - private TimeValue timeout; - - public StopDataFrameAnalyticsRequest(String id) { - this.id = id; - } - - public String getId() { - return id; - } - - public TimeValue getTimeout() { - return timeout; - } - - public StopDataFrameAnalyticsRequest setTimeout(@Nullable TimeValue timeout) { - this.timeout = timeout; - return this; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - public StopDataFrameAnalyticsRequest setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - return this; - } - - public Boolean getForce() { - return force; - } - - public StopDataFrameAnalyticsRequest setForce(boolean force) { - this.force = force; - return this; - } - - @Override - public Optional validate() { - if (id == null) { - return Optional.of(ValidationException.withError("data frame analytics id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - StopDataFrameAnalyticsRequest other = (StopDataFrameAnalyticsRequest) o; - return Objects.equals(id, other.id) - && Objects.equals(timeout, other.timeout) - && Objects.equals(allowNoMatch, other.allowNoMatch) - && Objects.equals(force, other.force); - } - - @Override - public int hashCode() { - return Objects.hash(id, timeout, allowNoMatch, force); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsResponse.java deleted file mode 100644 index 9c4dc1d67be5c..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsResponse.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response indicating if the Machine Learning Data Frame Analytics is now stopped or not - */ -public class StopDataFrameAnalyticsResponse implements ToXContentObject { - - private static final ParseField STOPPED = new ParseField("stopped"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "stop_data_frame_analytics_response", - true, - args -> new StopDataFrameAnalyticsResponse((Boolean) args[0]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), STOPPED); - } - - public static StopDataFrameAnalyticsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final boolean stopped; - - public StopDataFrameAnalyticsResponse(boolean stopped) { - this.stopped = stopped; - } - - /** - * Has the Data Frame Analytics stopped or not - * - * @return boolean value indicating the Data Frame Analytics stopped status - */ - public boolean isStopped() { - return stopped; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - StopDataFrameAnalyticsResponse other = (StopDataFrameAnalyticsResponse) o; - return stopped == other.stopped; - } - - @Override - public int hashCode() { - return Objects.hash(stopped); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.startObject().field(STOPPED.getPreferredName(), stopped).endObject(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedRequest.java deleted file mode 100644 index 9c62b0a4d2bf4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedRequest.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.security.InvalidParameterException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request to stop Machine Learning Datafeeds - */ -public class StopDatafeedRequest implements Validatable, ToXContentObject { - - public static final ParseField TIMEOUT = new ParseField("timeout"); - public static final ParseField FORCE = new ParseField("force"); - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "stop_datafeed_request", - a -> new StopDatafeedRequest((List) a[0]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())), - DatafeedConfig.ID, - ObjectParser.ValueType.STRING_ARRAY - ); - PARSER.declareString((obj, val) -> obj.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); - PARSER.declareBoolean(StopDatafeedRequest::setForce, FORCE); - PARSER.declareBoolean(StopDatafeedRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - private static final String ALL_DATAFEEDS = "_all"; - - private final List datafeedIds; - private TimeValue timeout; - private Boolean force; - private Boolean allowNoMatch; - - /** - * Explicitly stop all datafeeds - * - * @return a {@link StopDatafeedRequest} for all existing datafeeds - */ - public static StopDatafeedRequest stopAllDatafeedsRequest() { - return new StopDatafeedRequest(ALL_DATAFEEDS); - } - - StopDatafeedRequest(List datafeedIds) { - if (datafeedIds.isEmpty()) { - throw new InvalidParameterException("datafeedIds must not be empty"); - } - if (datafeedIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("datafeedIds must not contain null values"); - } - this.datafeedIds = new ArrayList<>(datafeedIds); - } - - /** - * Close the specified Datafeeds via their unique datafeedIds - * - * @param datafeedIds must be non-null and non-empty and each datafeedId must be non-null - */ - public StopDatafeedRequest(String... datafeedIds) { - this(Arrays.asList(datafeedIds)); - } - - /** - * All the datafeedIds to be stopped - */ - public List getDatafeedIds() { - return datafeedIds; - } - - public TimeValue getTimeout() { - return timeout; - } - - /** - * How long to wait for the stop request to complete before timing out. - * - * @param timeout Default value: 30 minutes - */ - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - public Boolean getForce() { - return force; - } - - /** - * Should the stopping be forced. - * - * Use to forcefully stop a datafeed - * - * @param force When {@code true} forcefully stop the datafeed. Defaults to {@code false} - */ - public void setForce(boolean force) { - this.force = force; - } - - public Boolean getAllowNoMatch() { - return this.allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no datafeeds. - * - * This includes {@code _all} string. - * - * @param allowNoMatch When {@code true} ignore if wildcard or {@code _all} matches no datafeeds. Defaults to {@code true} - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedIds, timeout, force, allowNoMatch); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - StopDatafeedRequest that = (StopDatafeedRequest) other; - return Objects.equals(datafeedIds, that.datafeedIds) - && Objects.equals(timeout, that.timeout) - && Objects.equals(force, that.force) - && Objects.equals(allowNoMatch, that.allowNoMatch); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DatafeedConfig.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(datafeedIds)); - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - } - if (force != null) { - builder.field(FORCE.getPreferredName(), force); - } - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - builder.endObject(); - return builder; - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedResponse.java deleted file mode 100644 index 864b9ea6d4127..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedResponse.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response indicating if the Machine Learning Datafeed is now stopped or not - */ -public class StopDatafeedResponse implements ToXContentObject { - - private static final ParseField STOPPED = new ParseField("stopped"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "stop_datafeed_response", - true, - (a) -> new StopDatafeedResponse((Boolean) a[0]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), STOPPED); - } - - private final boolean stopped; - - public StopDatafeedResponse(boolean stopped) { - this.stopped = stopped; - } - - public static StopDatafeedResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - /** - * Has the Datafeed stopped or not - * - * @return boolean value indicating the Datafeed stopped status - */ - public boolean isStopped() { - return stopped; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - StopDatafeedResponse that = (StopDatafeedResponse) other; - return isStopped() == that.isStopped(); - } - - @Override - public int hashCode() { - return Objects.hash(isStopped()); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(STOPPED.getPreferredName(), stopped); - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDataFrameAnalyticsRequest.java deleted file mode 100644 index f14d4b75687bd..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfigUpdate; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; -import java.util.Optional; - -public class UpdateDataFrameAnalyticsRequest implements ToXContentObject, Validatable { - - private final DataFrameAnalyticsConfigUpdate update; - - public UpdateDataFrameAnalyticsRequest(DataFrameAnalyticsConfigUpdate update) { - this.update = update; - } - - public DataFrameAnalyticsConfigUpdate getUpdate() { - return update; - } - - @Override - public Optional validate() { - if (update == null) { - return Optional.of(ValidationException.withError("update requires a non-null data frame analytics config update")); - } - return Optional.empty(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return update.toXContent(builder, params); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - UpdateDataFrameAnalyticsRequest other = (UpdateDataFrameAnalyticsRequest) o; - return Objects.equals(update, other.update); - } - - @Override - public int hashCode() { - return Objects.hash(update); - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDatafeedRequest.java deleted file mode 100644 index d2de264c75411..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDatafeedRequest.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedUpdate; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Requests an update to a {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} with the passed {@link DatafeedUpdate} - * settings - */ -public class UpdateDatafeedRequest implements Validatable, ToXContentObject { - - private final DatafeedUpdate update; - - public UpdateDatafeedRequest(DatafeedUpdate update) { - this.update = update; - } - - public DatafeedUpdate getDatafeedUpdate() { - return update; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return update.toXContent(builder, params); - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - - if (o == null || getClass() != o.getClass()) { - return false; - } - - UpdateDatafeedRequest that = (UpdateDatafeedRequest) o; - return Objects.equals(update, that.update); - } - - @Override - public int hashCode() { - return Objects.hash(update); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateFilterRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateFilterRequest.java deleted file mode 100644 index 3a76432715067..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateFilterRequest.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.MlFilter; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collection; -import java.util.Objects; -import java.util.SortedSet; -import java.util.TreeSet; - -/** - * Updates an existing {@link MlFilter} configuration - */ -public class UpdateFilterRequest implements Validatable, ToXContentObject { - - public static final ParseField ADD_ITEMS = new ParseField("add_items"); - public static final ParseField REMOVE_ITEMS = new ParseField("remove_items"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "update_filter_request", - (a) -> new UpdateFilterRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), MlFilter.ID); - PARSER.declareStringOrNull(UpdateFilterRequest::setDescription, MlFilter.DESCRIPTION); - PARSER.declareStringArray(UpdateFilterRequest::setAddItems, ADD_ITEMS); - PARSER.declareStringArray(UpdateFilterRequest::setRemoveItems, REMOVE_ITEMS); - } - - private String filterId; - private String description; - private SortedSet addItems; - private SortedSet removeItems; - - /** - * Construct a new request referencing a non-null, existing filter_id - * @param filterId Id referencing the filter to update - */ - public UpdateFilterRequest(String filterId) { - this.filterId = Objects.requireNonNull(filterId, "[" + MlFilter.ID.getPreferredName() + "] must not be null"); - } - - public String getFilterId() { - return filterId; - } - - public String getDescription() { - return description; - } - - /** - * The new description of the filter - * @param description the updated filter description - */ - public void setDescription(String description) { - this.description = description; - } - - public SortedSet getAddItems() { - return addItems; - } - - /** - * The collection of items to add to the filter - * @param addItems non-null items to add to the filter, defaults to empty array - */ - public void setAddItems(Collection addItems) { - this.addItems = new TreeSet<>(Objects.requireNonNull(addItems, "[" + ADD_ITEMS.getPreferredName() + "] must not be null")); - } - - public SortedSet getRemoveItems() { - return removeItems; - } - - /** - * The collection of items to remove from the filter - * @param removeItems non-null items to remove from the filter, defaults to empty array - */ - public void setRemoveItems(Collection removeItems) { - this.removeItems = new TreeSet<>(Objects.requireNonNull(removeItems, "[" + REMOVE_ITEMS.getPreferredName() + "] must not be null")); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(MlFilter.ID.getPreferredName(), filterId); - if (description != null) { - builder.field(MlFilter.DESCRIPTION.getPreferredName(), description); - } - if (addItems != null) { - builder.stringListField(ADD_ITEMS.getPreferredName(), addItems); - } - if (removeItems != null) { - builder.stringListField(REMOVE_ITEMS.getPreferredName(), removeItems); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(filterId, description, addItems, removeItems); - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - UpdateFilterRequest other = (UpdateFilterRequest) obj; - return Objects.equals(filterId, other.filterId) - && Objects.equals(description, other.description) - && Objects.equals(addItems, other.addItems) - && Objects.equals(removeItems, other.removeItems); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateJobRequest.java deleted file mode 100644 index aa46d5677c77d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateJobRequest.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.JobUpdate; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Updates a {@link org.elasticsearch.client.ml.job.config.Job} with the passed {@link JobUpdate} - * settings - */ -public class UpdateJobRequest implements Validatable, ToXContentObject { - - private final JobUpdate update; - - public UpdateJobRequest(JobUpdate update) { - this.update = update; - } - - public JobUpdate getJobUpdate() { - return update; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return update.toXContent(builder, params); - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - - if (o == null || getClass() != o.getClass()) { - return false; - } - - UpdateJobRequest that = (UpdateJobRequest) o; - return Objects.equals(update, that.update); - } - - @Override - public int hashCode() { - return Objects.hash(update); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotRequest.java deleted file mode 100644 index a06880369f6fa..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotRequest.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.process.ModelSnapshot; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to update information about an existing model snapshot for a given job - */ -public class UpdateModelSnapshotRequest implements Validatable, ToXContentObject { - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "update_model_snapshot_request", - a -> new UpdateModelSnapshotRequest((String) a[0], (String) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), ModelSnapshot.SNAPSHOT_ID); - PARSER.declareStringOrNull(UpdateModelSnapshotRequest::setDescription, ModelSnapshot.DESCRIPTION); - PARSER.declareBoolean(UpdateModelSnapshotRequest::setRetain, ModelSnapshot.RETAIN); - } - - private final String jobId; - private String snapshotId; - private String description; - private Boolean retain; - - /** - * Constructs a request to update information for a snapshot of given job - * @param jobId id of the job from which to retrieve results - * @param snapshotId id of the snapshot from which to retrieve results - */ - public UpdateModelSnapshotRequest(String jobId, String snapshotId) { - this.jobId = Objects.requireNonNull(jobId, "[" + Job.ID + "] must not be null"); - this.snapshotId = Objects.requireNonNull(snapshotId, "[" + ModelSnapshot.SNAPSHOT_ID + "] must not be null"); - } - - public String getJobId() { - return jobId; - } - - public String getSnapshotId() { - return snapshotId; - } - - public String getDescription() { - return description; - } - - /** - * The new description of the snapshot. - * @param description the updated snapshot description - */ - public void setDescription(String description) { - this.description = description; - } - - public Boolean getRetain() { - return retain; - } - - /** - * The new value of the "retain" property of the snapshot - * @param retain the updated retain property - */ - public void setRetain(boolean retain) { - this.retain = retain; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(ModelSnapshot.SNAPSHOT_ID.getPreferredName(), snapshotId); - if (description != null) { - builder.field(ModelSnapshot.DESCRIPTION.getPreferredName(), description); - } - if (retain != null) { - builder.field(ModelSnapshot.RETAIN.getPreferredName(), retain); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - UpdateModelSnapshotRequest request = (UpdateModelSnapshotRequest) obj; - return Objects.equals(jobId, request.jobId) - && Objects.equals(snapshotId, request.snapshotId) - && Objects.equals(description, request.description) - && Objects.equals(retain, request.retain); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, snapshotId, description, retain); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotResponse.java deleted file mode 100644 index 82b73fe260aa9..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotResponse.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.process.ModelSnapshot; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * A response acknowledging the update of information for an existing model snapshot for a given job - */ -public class UpdateModelSnapshotResponse implements ToXContentObject { - - private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged"); - private static final ParseField MODEL = new ParseField("model"); - - public UpdateModelSnapshotResponse(boolean acknowledged, ModelSnapshot.Builder modelSnapshot) { - this.acknowledged = acknowledged; - this.model = modelSnapshot.build(); - } - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "update_model_snapshot_response", - true, - a -> new UpdateModelSnapshotResponse((Boolean) a[0], ((ModelSnapshot.Builder) a[1])) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ACKNOWLEDGED); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), ModelSnapshot.PARSER, MODEL); - } - - public static UpdateModelSnapshotResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final Boolean acknowledged; - private final ModelSnapshot model; - - /** - * Get the action acknowledgement - * @return a {@code boolean} that indicates whether the model snapshot was updated successfully. - */ - public Boolean getAcknowledged() { - return acknowledged; - } - - /** - * Get the updated snapshot of the model - * @return the updated model snapshot. - */ - public ModelSnapshot getModel() { - return model; - } - - @Override - public int hashCode() { - return Objects.hash(acknowledged, model); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (acknowledged != null) { - builder.field(ACKNOWLEDGED.getPreferredName(), acknowledged); - } - if (model != null) { - builder.field(MODEL.getPreferredName(), model); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - UpdateModelSnapshotResponse request = (UpdateModelSnapshotResponse) obj; - return Objects.equals(acknowledged, request.acknowledged) && Objects.equals(model, request.model); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotRequest.java deleted file mode 100644 index cc1660ed4dc6b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotRequest.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -public class UpgradeJobModelSnapshotRequest implements Validatable, ToXContentObject { - - public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id"); - public static final ParseField TIMEOUT = new ParseField("timeout"); - public static final ParseField WAIT_FOR_COMPLETION = new ParseField("wait_for_completion"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "upgrade_job_snapshot_request", - true, - a -> new UpgradeJobModelSnapshotRequest((String) a[0], (String) a[1], (String) a[2], (Boolean) a[3]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), SNAPSHOT_ID); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), TIMEOUT); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), WAIT_FOR_COMPLETION); - } - - private final String jobId; - private final String snapshotId; - private final TimeValue timeout; - private final Boolean waitForCompletion; - - UpgradeJobModelSnapshotRequest(String jobId, String snapshotId, String timeout, Boolean waitForCompletion) { - this(jobId, snapshotId, timeout == null ? null : TimeValue.parseTimeValue(timeout, TIMEOUT.getPreferredName()), waitForCompletion); - } - - public UpgradeJobModelSnapshotRequest(String jobId, String snapshotId, TimeValue timeValue, Boolean waitForCompletion) { - this.jobId = Objects.requireNonNull(jobId, Job.ID.getPreferredName()); - this.snapshotId = Objects.requireNonNull(snapshotId, SNAPSHOT_ID.getPreferredName()); - this.timeout = timeValue; - this.waitForCompletion = waitForCompletion; - } - - public static UpgradeJobModelSnapshotRequest fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public String getJobId() { - return jobId; - } - - public String getSnapshotId() { - return snapshotId; - } - - public TimeValue getTimeout() { - return timeout; - } - - public Boolean getWaitForCompletion() { - return waitForCompletion; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - UpgradeJobModelSnapshotRequest request = (UpgradeJobModelSnapshotRequest) o; - return Objects.equals(jobId, request.jobId) - && Objects.equals(timeout, request.timeout) - && Objects.equals(waitForCompletion, request.waitForCompletion) - && Objects.equals(snapshotId, request.snapshotId); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, snapshotId, timeout, waitForCompletion); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(SNAPSHOT_ID.getPreferredName(), snapshotId); - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - } - if (waitForCompletion != null) { - builder.field(WAIT_FOR_COMPLETION.getPreferredName(), waitForCompletion); - } - builder.endObject(); - return builder; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotResponse.java deleted file mode 100644 index b260bbaa5d22b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotResponse.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -public class UpgradeJobModelSnapshotResponse implements ToXContentObject { - - private static final ParseField COMPLETED = new ParseField("completed"); - private static final ParseField NODE = new ParseField("node"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "upgrade_job_snapshot_response", - true, - (a) -> new UpgradeJobModelSnapshotResponse((Boolean) a[0], (String) a[1]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), COMPLETED); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), NODE); - } - - private final boolean completed; - private final String node; - - public UpgradeJobModelSnapshotResponse(Boolean opened, String node) { - this.completed = opened != null && opened; - this.node = node; - } - - public static UpgradeJobModelSnapshotResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - public boolean isCompleted() { - return completed; - } - - /** - * The node that the job was assigned to - * - * @return The ID of a node if the job was assigned to a node. - */ - public String getNode() { - return node; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - UpgradeJobModelSnapshotResponse that = (UpgradeJobModelSnapshotResponse) other; - return completed == that.completed && Objects.equals(node, that.node); - } - - @Override - public int hashCode() { - return Objects.hash(completed, node); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(COMPLETED.getPreferredName(), completed); - if (node != null) { - builder.field(NODE.getPreferredName(), node); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/Calendar.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/Calendar.java deleted file mode 100644 index da7c0ac2cac52..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/Calendar.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.calendars; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -/** - * A simple calendar object for scheduled (special) events. - * The calendar consists of a name an a list of job Ids or job groups - * the events are stored separately and reference the calendar. - */ -public class Calendar implements ToXContentObject { - - public static final String CALENDAR_TYPE = "calendar"; - - public static final ParseField JOB_IDS = new ParseField("job_ids"); - public static final ParseField ID = new ParseField("calendar_id"); - public static final ParseField DESCRIPTION = new ParseField("description"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - CALENDAR_TYPE, - true, - a -> new Calendar((String) a[0], (List) a[1], (String) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), ID); - PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), JOB_IDS); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), DESCRIPTION); - } - - private final String id; - private final List jobIds; - private final String description; - - /** - * {@code jobIds} can be a mix of job groups and job Ids - * @param id The calendar Id - * @param jobIds List of job Ids or job groups - * @param description An optional description - */ - public Calendar(String id, List jobIds, @Nullable String description) { - this.id = Objects.requireNonNull(id, ID.getPreferredName() + " must not be null"); - this.jobIds = Collections.unmodifiableList(Objects.requireNonNull(jobIds, JOB_IDS.getPreferredName() + " must not be null")); - this.description = description; - } - - public String getId() { - return id; - } - - public List getJobIds() { - return jobIds; - } - - @Nullable - public String getDescription() { - return description; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ID.getPreferredName(), id); - builder.stringListField(JOB_IDS.getPreferredName(), jobIds); - if (description != null) { - builder.field(DESCRIPTION.getPreferredName(), description); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - Calendar other = (Calendar) obj; - return id.equals(other.id) && jobIds.equals(other.jobIds) && Objects.equals(description, other.description); - } - - @Override - public int hashCode() { - return Objects.hash(id, jobIds, description); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/ScheduledEvent.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/ScheduledEvent.java deleted file mode 100644 index 8aecc33d32b1a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/ScheduledEvent.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.calendars; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Date; -import java.util.Objects; - -public class ScheduledEvent implements ToXContentObject { - - public static final ParseField DESCRIPTION = new ParseField("description"); - public static final ParseField START_TIME = new ParseField("start_time"); - public static final ParseField END_TIME = new ParseField("end_time"); - public static final ParseField EVENT_ID = new ParseField("event_id"); - public static final String SCHEDULED_EVENT_TYPE = "scheduled_event"; - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - SCHEDULED_EVENT_TYPE, - true, - a -> new ScheduledEvent((String) a[0], (Date) a[1], (Date) a[2], (String) a[3], (String) a[4]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), DESCRIPTION); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, START_TIME.getPreferredName()), - START_TIME, - ObjectParser.ValueType.VALUE - ); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, END_TIME.getPreferredName()), - END_TIME, - ObjectParser.ValueType.VALUE - ); - PARSER.declareString(ConstructingObjectParser.constructorArg(), Calendar.ID); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), EVENT_ID); - } - - private final String description; - private final Date startTime; - private final Date endTime; - private final String calendarId; - private final String eventId; - - ScheduledEvent(String description, Date startTime, Date endTime, String calendarId, @Nullable String eventId) { - this.description = Objects.requireNonNull(description); - this.startTime = Objects.requireNonNull(startTime); - this.endTime = Objects.requireNonNull(endTime); - this.calendarId = Objects.requireNonNull(calendarId); - this.eventId = eventId; - } - - public String getDescription() { - return description; - } - - public Date getStartTime() { - return startTime; - } - - public Date getEndTime() { - return endTime; - } - - public String getCalendarId() { - return calendarId; - } - - public String getEventId() { - return eventId; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DESCRIPTION.getPreferredName(), description); - builder.timeField(START_TIME.getPreferredName(), START_TIME.getPreferredName() + "_string", startTime.getTime()); - builder.timeField(END_TIME.getPreferredName(), END_TIME.getPreferredName() + "_string", endTime.getTime()); - builder.field(Calendar.ID.getPreferredName(), calendarId); - if (eventId != null) { - builder.field(EVENT_ID.getPreferredName(), eventId); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - ScheduledEvent other = (ScheduledEvent) obj; - return Objects.equals(this.description, other.description) - && Objects.equals(this.startTime, other.startTime) - && Objects.equals(this.endTime, other.endTime) - && Objects.equals(this.calendarId, other.calendarId); - } - - @Override - public int hashCode() { - return Objects.hash(description, startTime, endTime, calendarId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/ChunkingConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/ChunkingConfig.java deleted file mode 100644 index 5f23f2478c070..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/ChunkingConfig.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Locale; -import java.util.Objects; - -/** - * The description of how searches should be chunked. - */ -public class ChunkingConfig implements ToXContentObject { - - public static final ParseField MODE_FIELD = new ParseField("mode"); - public static final ParseField TIME_SPAN_FIELD = new ParseField("time_span"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "chunking_config", - true, - a -> new ChunkingConfig((Mode) a[0], (TimeValue) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Mode::fromString, MODE_FIELD); - PARSER.declareString( - ConstructingObjectParser.optionalConstructorArg(), - text -> TimeValue.parseTimeValue(text, TIME_SPAN_FIELD.getPreferredName()), - TIME_SPAN_FIELD - ); - - } - - private final Mode mode; - private final TimeValue timeSpan; - - ChunkingConfig(Mode mode, @Nullable TimeValue timeSpan) { - this.mode = Objects.requireNonNull(mode, MODE_FIELD.getPreferredName()); - this.timeSpan = timeSpan; - } - - @Nullable - public TimeValue getTimeSpan() { - return timeSpan; - } - - Mode getMode() { - return mode; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(MODE_FIELD.getPreferredName(), mode); - if (timeSpan != null) { - builder.field(TIME_SPAN_FIELD.getPreferredName(), timeSpan.getStringRep()); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(mode, timeSpan); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - ChunkingConfig other = (ChunkingConfig) obj; - return Objects.equals(this.mode, other.mode) && Objects.equals(this.timeSpan, other.timeSpan); - } - - public static ChunkingConfig newAuto() { - return new ChunkingConfig(Mode.AUTO, null); - } - - public static ChunkingConfig newOff() { - return new ChunkingConfig(Mode.OFF, null); - } - - public static ChunkingConfig newManual(TimeValue timeSpan) { - return new ChunkingConfig(Mode.MANUAL, timeSpan); - } - - public enum Mode { - AUTO, - MANUAL, - OFF; - - public static Mode fromString(String value) { - return Mode.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedConfig.java deleted file mode 100644 index e1363239f4e44..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedConfig.java +++ /dev/null @@ -1,491 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.aggregations.AggregatorFactories; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xcontent.json.JsonXContent; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * The datafeed configuration object. It specifies which indices - * to get the data from and offers parameters for customizing different - * aspects of the process. - */ -public class DatafeedConfig implements ToXContentObject { - - public static final ParseField ID = new ParseField("datafeed_id"); - public static final ParseField QUERY_DELAY = new ParseField("query_delay"); - public static final ParseField FREQUENCY = new ParseField("frequency"); - public static final ParseField INDEXES = new ParseField("indexes"); - public static final ParseField INDICES = new ParseField("indices"); - public static final ParseField QUERY = new ParseField("query"); - public static final ParseField SCROLL_SIZE = new ParseField("scroll_size"); - public static final ParseField AGGREGATIONS = new ParseField("aggregations"); - public static final ParseField SCRIPT_FIELDS = new ParseField("script_fields"); - public static final ParseField CHUNKING_CONFIG = new ParseField("chunking_config"); - public static final ParseField DELAYED_DATA_CHECK_CONFIG = new ParseField("delayed_data_check_config"); - public static final ParseField MAX_EMPTY_SEARCHES = new ParseField("max_empty_searches"); - public static final ParseField INDICES_OPTIONS = new ParseField("indices_options"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "datafeed_config", - true, - a -> new Builder((String) a[0], (String) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - - PARSER.declareStringArray(Builder::setIndices, INDEXES); - PARSER.declareStringArray(Builder::setIndices, INDICES); - PARSER.declareString( - (builder, val) -> builder.setQueryDelay(TimeValue.parseTimeValue(val, QUERY_DELAY.getPreferredName())), - QUERY_DELAY - ); - PARSER.declareString( - (builder, val) -> builder.setFrequency(TimeValue.parseTimeValue(val, FREQUENCY.getPreferredName())), - FREQUENCY - ); - PARSER.declareField(Builder::setQuery, DatafeedConfig::parseBytes, QUERY, ObjectParser.ValueType.OBJECT); - PARSER.declareField(Builder::setAggregations, DatafeedConfig::parseBytes, AGGREGATIONS, ObjectParser.ValueType.OBJECT); - PARSER.declareObject(Builder::setScriptFields, (p, c) -> { - List parsedScriptFields = new ArrayList<>(); - while (p.nextToken() != XContentParser.Token.END_OBJECT) { - parsedScriptFields.add(new SearchSourceBuilder.ScriptField(p)); - } - return parsedScriptFields; - }, SCRIPT_FIELDS); - PARSER.declareInt(Builder::setScrollSize, SCROLL_SIZE); - PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSER, CHUNKING_CONFIG); - PARSER.declareObject(Builder::setDelayedDataCheckConfig, DelayedDataCheckConfig.PARSER, DELAYED_DATA_CHECK_CONFIG); - PARSER.declareInt(Builder::setMaxEmptySearches, MAX_EMPTY_SEARCHES); - PARSER.declareObject( - Builder::setIndicesOptions, - (p, c) -> IndicesOptions.fromMap(p.map(), new IndicesOptions(IndicesOptions.Option.NONE, IndicesOptions.WildcardStates.NONE)), - INDICES_OPTIONS - ); - PARSER.declareObject(Builder::setRuntimeMappings, (p, c) -> p.map(), SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD); - } - - private static BytesReference parseBytes(XContentParser parser) throws IOException { - XContentBuilder contentBuilder = JsonXContent.contentBuilder(); - contentBuilder.generator().copyCurrentStructure(parser); - return BytesReference.bytes(contentBuilder); - } - - private final String id; - private final String jobId; - private final TimeValue queryDelay; - private final TimeValue frequency; - private final List indices; - private final BytesReference query; - private final BytesReference aggregations; - private final List scriptFields; - private final Integer scrollSize; - private final ChunkingConfig chunkingConfig; - private final DelayedDataCheckConfig delayedDataCheckConfig; - private final Integer maxEmptySearches; - private final IndicesOptions indicesOptions; - private final Map runtimeMappings; - - private DatafeedConfig( - String id, - String jobId, - TimeValue queryDelay, - TimeValue frequency, - List indices, - BytesReference query, - BytesReference aggregations, - List scriptFields, - Integer scrollSize, - ChunkingConfig chunkingConfig, - DelayedDataCheckConfig delayedDataCheckConfig, - Integer maxEmptySearches, - IndicesOptions indicesOptions, - Map runtimeMappings - ) { - this.id = id; - this.jobId = jobId; - this.queryDelay = queryDelay; - this.frequency = frequency; - this.indices = indices == null ? null : Collections.unmodifiableList(indices); - this.query = query; - this.aggregations = aggregations; - this.scriptFields = scriptFields == null ? null : Collections.unmodifiableList(scriptFields); - this.scrollSize = scrollSize; - this.chunkingConfig = chunkingConfig; - this.delayedDataCheckConfig = delayedDataCheckConfig; - this.maxEmptySearches = maxEmptySearches; - this.indicesOptions = indicesOptions; - this.runtimeMappings = Collections.unmodifiableMap(runtimeMappings); - } - - public String getId() { - return id; - } - - public String getJobId() { - return jobId; - } - - public TimeValue getQueryDelay() { - return queryDelay; - } - - public TimeValue getFrequency() { - return frequency; - } - - public List getIndices() { - return indices; - } - - public Integer getScrollSize() { - return scrollSize; - } - - public BytesReference getQuery() { - return query; - } - - public BytesReference getAggregations() { - return aggregations; - } - - public List getScriptFields() { - return scriptFields == null ? Collections.emptyList() : scriptFields; - } - - public ChunkingConfig getChunkingConfig() { - return chunkingConfig; - } - - public DelayedDataCheckConfig getDelayedDataCheckConfig() { - return delayedDataCheckConfig; - } - - public Integer getMaxEmptySearches() { - return maxEmptySearches; - } - - public IndicesOptions getIndicesOptions() { - return indicesOptions; - } - - public Map getRuntimeMappings() { - return runtimeMappings; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ID.getPreferredName(), id); - builder.field(Job.ID.getPreferredName(), jobId); - if (queryDelay != null) { - builder.field(QUERY_DELAY.getPreferredName(), queryDelay.getStringRep()); - } - if (frequency != null) { - builder.field(FREQUENCY.getPreferredName(), frequency.getStringRep()); - } - if (indices != null) { - builder.field(INDICES.getPreferredName(), indices); - } - if (query != null) { - builder.field(QUERY.getPreferredName(), asMap(query)); - } - if (aggregations != null) { - builder.field(AGGREGATIONS.getPreferredName(), asMap(aggregations)); - } - if (scriptFields != null) { - builder.startObject(SCRIPT_FIELDS.getPreferredName()); - for (SearchSourceBuilder.ScriptField scriptField : scriptFields) { - scriptField.toXContent(builder, params); - } - builder.endObject(); - } - if (scrollSize != null) { - builder.field(SCROLL_SIZE.getPreferredName(), scrollSize); - } - if (chunkingConfig != null) { - builder.field(CHUNKING_CONFIG.getPreferredName(), chunkingConfig); - } - if (delayedDataCheckConfig != null) { - builder.field(DELAYED_DATA_CHECK_CONFIG.getPreferredName(), delayedDataCheckConfig); - } - if (maxEmptySearches != null) { - builder.field(MAX_EMPTY_SEARCHES.getPreferredName(), maxEmptySearches); - } - if (indicesOptions != null) { - builder.startObject(INDICES_OPTIONS.getPreferredName()); - indicesOptions.toXContent(builder, params); - builder.endObject(); - } - if (runtimeMappings.isEmpty() == false) { - builder.field(SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD.getPreferredName(), runtimeMappings); - } - - builder.endObject(); - return builder; - } - - private static Map asMap(BytesReference bytesReference) { - return bytesReference == null ? null : XContentHelper.convertToMap(bytesReference, true, XContentType.JSON).v2(); - } - - /** - * The lists of indices and types are compared for equality but they are not - * sorted first so this test could fail simply because the indices and types - * lists are in different orders. - * - * Also note this could be a heavy operation when a query or aggregations - * are set as we need to convert the bytes references into maps to correctly - * compare them. - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DatafeedConfig that = (DatafeedConfig) other; - - return Objects.equals(this.id, that.id) - && Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.frequency, that.frequency) - && Objects.equals(this.queryDelay, that.queryDelay) - && Objects.equals(this.indices, that.indices) - && Objects.equals(asMap(this.query), asMap(that.query)) - && Objects.equals(this.scrollSize, that.scrollSize) - && Objects.equals(asMap(this.aggregations), asMap(that.aggregations)) - && Objects.equals(this.scriptFields, that.scriptFields) - && Objects.equals(this.chunkingConfig, that.chunkingConfig) - && Objects.equals(this.delayedDataCheckConfig, that.delayedDataCheckConfig) - && Objects.equals(this.maxEmptySearches, that.maxEmptySearches) - && Objects.equals(this.indicesOptions, that.indicesOptions) - && Objects.equals(this.runtimeMappings, that.runtimeMappings); - } - - /** - * Note this could be a heavy operation when a query or aggregations - * are set as we need to convert the bytes references into maps to - * compute a stable hash code. - */ - @Override - public int hashCode() { - return Objects.hash( - id, - jobId, - frequency, - queryDelay, - indices, - asMap(query), - scrollSize, - asMap(aggregations), - scriptFields, - chunkingConfig, - delayedDataCheckConfig, - maxEmptySearches, - indicesOptions, - runtimeMappings - ); - } - - public static Builder builder(String id, String jobId) { - return new Builder(id, jobId); - } - - public static class Builder { - - private final String id; - private final String jobId; - private TimeValue queryDelay; - private TimeValue frequency; - private List indices; - private BytesReference query; - private BytesReference aggregations; - private List scriptFields; - private Integer scrollSize; - private ChunkingConfig chunkingConfig; - private DelayedDataCheckConfig delayedDataCheckConfig; - private Integer maxEmptySearches; - private IndicesOptions indicesOptions; - private Map runtimeMappings = Collections.emptyMap(); - - public Builder(String id, String jobId) { - this.id = Objects.requireNonNull(id, ID.getPreferredName()); - this.jobId = Objects.requireNonNull(jobId, Job.ID.getPreferredName()); - } - - public Builder(DatafeedConfig config) { - this.id = config.id; - this.jobId = config.jobId; - this.queryDelay = config.queryDelay; - this.frequency = config.frequency; - this.indices = config.indices == null ? null : new ArrayList<>(config.indices); - this.query = config.query; - this.aggregations = config.aggregations; - this.scriptFields = config.scriptFields == null ? null : new ArrayList<>(config.scriptFields); - this.scrollSize = config.scrollSize; - this.chunkingConfig = config.chunkingConfig; - this.delayedDataCheckConfig = config.getDelayedDataCheckConfig(); - this.maxEmptySearches = config.getMaxEmptySearches(); - this.indicesOptions = config.indicesOptions; - this.runtimeMappings = new HashMap<>(config.runtimeMappings); - } - - public Builder setIndices(List indices) { - this.indices = Objects.requireNonNull(indices, INDICES.getPreferredName()); - return this; - } - - public Builder setIndices(String... indices) { - return setIndices(Arrays.asList(indices)); - } - - public Builder setQueryDelay(TimeValue queryDelay) { - this.queryDelay = queryDelay; - return this; - } - - public Builder setFrequency(TimeValue frequency) { - this.frequency = frequency; - return this; - } - - private Builder setQuery(BytesReference query) { - this.query = query; - return this; - } - - public Builder setQuery(String queryAsJson) { - this.query = queryAsJson == null ? null : new BytesArray(queryAsJson); - return this; - } - - public Builder setQuery(QueryBuilder query) throws IOException { - this.query = query == null ? null : xContentToBytes(query); - return this; - } - - private Builder setAggregations(BytesReference aggregations) { - this.aggregations = aggregations; - return this; - } - - public Builder setAggregations(String aggsAsJson) { - this.aggregations = aggsAsJson == null ? null : new BytesArray(aggsAsJson); - return this; - } - - public Builder setAggregations(AggregatorFactories.Builder aggregations) throws IOException { - this.aggregations = aggregations == null ? null : xContentToBytes(aggregations); - return this; - } - - public Builder setScriptFields(List scriptFields) { - List sorted = new ArrayList<>(scriptFields); - sorted.sort(Comparator.comparing(SearchSourceBuilder.ScriptField::fieldName)); - this.scriptFields = sorted; - return this; - } - - public Builder setScrollSize(int scrollSize) { - this.scrollSize = scrollSize; - return this; - } - - public Builder setChunkingConfig(ChunkingConfig chunkingConfig) { - this.chunkingConfig = chunkingConfig; - return this; - } - - /** - * This sets the {@link DelayedDataCheckConfig} settings. - * - * See {@link DelayedDataCheckConfig} for more information. - * - * @param delayedDataCheckConfig the delayed data check configuration - * Default value is enabled, with `check_window` being null. This means the true window is - * calculated when the real-time Datafeed runs. - */ - public Builder setDelayedDataCheckConfig(DelayedDataCheckConfig delayedDataCheckConfig) { - this.delayedDataCheckConfig = delayedDataCheckConfig; - return this; - } - - public Builder setMaxEmptySearches(int maxEmptySearches) { - this.maxEmptySearches = maxEmptySearches; - return this; - } - - public Builder setIndicesOptions(IndicesOptions indicesOptions) { - this.indicesOptions = indicesOptions; - return this; - } - - public Builder setRuntimeMappings(Map runtimeMappings) { - this.runtimeMappings = Objects.requireNonNull(runtimeMappings, SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD.getPreferredName()); - return this; - } - - public DatafeedConfig build() { - return new DatafeedConfig( - id, - jobId, - queryDelay, - frequency, - indices, - query, - aggregations, - scriptFields, - scrollSize, - chunkingConfig, - delayedDataCheckConfig, - maxEmptySearches, - indicesOptions, - runtimeMappings - ); - } - - private static BytesReference xContentToBytes(ToXContentObject object) throws IOException { - try (XContentBuilder builder = JsonXContent.contentBuilder()) { - object.toXContent(builder, ToXContentObject.EMPTY_PARAMS); - return BytesReference.bytes(builder); - } - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedState.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedState.java deleted file mode 100644 index 4d309c31ab375..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedState.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.xcontent.ParseField; - -import java.util.Locale; - -/** - * Datafeed State POJO - */ -public enum DatafeedState { - - STARTED, - STOPPED, - STARTING, - STOPPING; - - public static final ParseField STATE = new ParseField("state"); - - public static DatafeedState fromString(String name) { - return valueOf(name.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedStats.java deleted file mode 100644 index b218f749a10f3..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedStats.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.client.ml.NodeAttributes; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Map; -import java.util.Objects; - -/** - * Datafeed Statistics POJO - */ -public class DatafeedStats implements ToXContentObject { - - private final String datafeedId; - private final DatafeedState datafeedState; - @Nullable - private final NodeAttributes node; - @Nullable - private final String assignmentExplanation; - @Nullable - private final DatafeedTimingStats timingStats; - - public static final ParseField ASSIGNMENT_EXPLANATION = new ParseField("assignment_explanation"); - public static final ParseField NODE = new ParseField("node"); - public static final ParseField TIMING_STATS = new ParseField("timing_stats"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("datafeed_stats", true, a -> { - String datafeedId = (String) a[0]; - DatafeedState datafeedState = DatafeedState.fromString((String) a[1]); - NodeAttributes nodeAttributes = (NodeAttributes) a[2]; - String assignmentExplanation = (String) a[3]; - DatafeedTimingStats timingStats = (DatafeedTimingStats) a[4]; - return new DatafeedStats(datafeedId, datafeedState, nodeAttributes, assignmentExplanation, timingStats); - }); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedState.STATE); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), NodeAttributes.PARSER, NODE); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), ASSIGNMENT_EXPLANATION); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), DatafeedTimingStats.PARSER, TIMING_STATS); - } - - public DatafeedStats( - String datafeedId, - DatafeedState datafeedState, - @Nullable NodeAttributes node, - @Nullable String assignmentExplanation, - @Nullable DatafeedTimingStats timingStats - ) { - this.datafeedId = Objects.requireNonNull(datafeedId); - this.datafeedState = Objects.requireNonNull(datafeedState); - this.node = node; - this.assignmentExplanation = assignmentExplanation; - this.timingStats = timingStats; - } - - public String getDatafeedId() { - return datafeedId; - } - - public DatafeedState getDatafeedState() { - return datafeedState; - } - - public NodeAttributes getNode() { - return node; - } - - public String getAssignmentExplanation() { - return assignmentExplanation; - } - - public DatafeedTimingStats getDatafeedTimingStats() { - return timingStats; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); - builder.field(DatafeedState.STATE.getPreferredName(), datafeedState.toString()); - if (node != null) { - builder.startObject("node"); - builder.field("id", node.getId()); - builder.field("name", node.getName()); - builder.field("ephemeral_id", node.getEphemeralId()); - builder.field("transport_address", node.getTransportAddress()); - - builder.startObject("attributes"); - for (Map.Entry entry : node.getAttributes().entrySet()) { - if (entry.getKey().startsWith("ml.")) { - builder.field(entry.getKey(), entry.getValue()); - } - } - builder.endObject(); - builder.endObject(); - } - if (assignmentExplanation != null) { - builder.field(ASSIGNMENT_EXPLANATION.getPreferredName(), assignmentExplanation); - } - if (timingStats != null) { - builder.field(TIMING_STATS.getPreferredName(), timingStats); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedId, datafeedState.toString(), node, assignmentExplanation, timingStats); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - DatafeedStats other = (DatafeedStats) obj; - return Objects.equals(datafeedId, other.datafeedId) - && Objects.equals(this.datafeedState, other.datafeedState) - && Objects.equals(this.node, other.node) - && Objects.equals(this.assignmentExplanation, other.assignmentExplanation) - && Objects.equals(this.timingStats, other.timingStats); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedTimingStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedTimingStats.java deleted file mode 100644 index 4aa464228f0e8..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedTimingStats.java +++ /dev/null @@ -1,166 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class DatafeedTimingStats implements ToXContentObject { - - public static final ParseField JOB_ID = new ParseField("job_id"); - public static final ParseField SEARCH_COUNT = new ParseField("search_count"); - public static final ParseField BUCKET_COUNT = new ParseField("bucket_count"); - public static final ParseField TOTAL_SEARCH_TIME_MS = new ParseField("total_search_time_ms"); - public static final ParseField AVG_SEARCH_TIME_PER_BUCKET_MS = new ParseField("average_search_time_per_bucket_ms"); - public static final ParseField EXPONENTIAL_AVG_SEARCH_TIME_PER_HOUR_MS = new ParseField("exponential_average_search_time_per_hour_ms"); - - public static final ParseField TYPE = new ParseField("datafeed_timing_stats"); - - public static final ConstructingObjectParser PARSER = createParser(); - - @SuppressWarnings("HiddenField") - private static ConstructingObjectParser createParser() { - ConstructingObjectParser parser = new ConstructingObjectParser<>("datafeed_timing_stats", true, args -> { - String jobId = (String) args[0]; - Long searchCount = (Long) args[1]; - Long bucketCount = (Long) args[2]; - Double totalSearchTimeMs = (Double) args[3]; - Double avgSearchTimePerBucketMs = (Double) args[4]; - Double exponentialAvgSearchTimePerHourMs = (Double) args[5]; - return new DatafeedTimingStats( - jobId, - getOrDefault(searchCount, 0L), - getOrDefault(bucketCount, 0L), - getOrDefault(totalSearchTimeMs, 0.0), - avgSearchTimePerBucketMs, - exponentialAvgSearchTimePerHourMs - ); - }); - parser.declareString(constructorArg(), JOB_ID); - parser.declareLong(optionalConstructorArg(), SEARCH_COUNT); - parser.declareLong(optionalConstructorArg(), BUCKET_COUNT); - parser.declareDouble(optionalConstructorArg(), TOTAL_SEARCH_TIME_MS); - parser.declareDouble(optionalConstructorArg(), AVG_SEARCH_TIME_PER_BUCKET_MS); - parser.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_SEARCH_TIME_PER_HOUR_MS); - return parser; - } - - private final String jobId; - private long searchCount; - private long bucketCount; - private double totalSearchTimeMs; - private Double avgSearchTimePerBucketMs; - private Double exponentialAvgSearchTimePerHourMs; - - public DatafeedTimingStats( - String jobId, - long searchCount, - long bucketCount, - double totalSearchTimeMs, - @Nullable Double avgSearchTimePerBucketMs, - @Nullable Double exponentialAvgSearchTimePerHourMs - ) { - this.jobId = Objects.requireNonNull(jobId); - this.searchCount = searchCount; - this.bucketCount = bucketCount; - this.totalSearchTimeMs = totalSearchTimeMs; - this.avgSearchTimePerBucketMs = avgSearchTimePerBucketMs; - this.exponentialAvgSearchTimePerHourMs = exponentialAvgSearchTimePerHourMs; - } - - public String getJobId() { - return jobId; - } - - public long getSearchCount() { - return searchCount; - } - - public long getBucketCount() { - return bucketCount; - } - - public double getTotalSearchTimeMs() { - return totalSearchTimeMs; - } - - public Double getAvgSearchTimePerBucketMs() { - return avgSearchTimePerBucketMs; - } - - public Double getExponentialAvgSearchTimePerHourMs() { - return exponentialAvgSearchTimePerHourMs; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(JOB_ID.getPreferredName(), jobId); - builder.field(SEARCH_COUNT.getPreferredName(), searchCount); - builder.field(BUCKET_COUNT.getPreferredName(), bucketCount); - builder.field(TOTAL_SEARCH_TIME_MS.getPreferredName(), totalSearchTimeMs); - if (avgSearchTimePerBucketMs != null) { - builder.field(AVG_SEARCH_TIME_PER_BUCKET_MS.getPreferredName(), avgSearchTimePerBucketMs); - } - if (exponentialAvgSearchTimePerHourMs != null) { - builder.field(EXPONENTIAL_AVG_SEARCH_TIME_PER_HOUR_MS.getPreferredName(), exponentialAvgSearchTimePerHourMs); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - DatafeedTimingStats other = (DatafeedTimingStats) obj; - return Objects.equals(this.jobId, other.jobId) - && this.searchCount == other.searchCount - && this.bucketCount == other.bucketCount - && this.totalSearchTimeMs == other.totalSearchTimeMs - && Objects.equals(this.avgSearchTimePerBucketMs, other.avgSearchTimePerBucketMs) - && Objects.equals(this.exponentialAvgSearchTimePerHourMs, other.exponentialAvgSearchTimePerHourMs); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - searchCount, - bucketCount, - totalSearchTimeMs, - avgSearchTimePerBucketMs, - exponentialAvgSearchTimePerHourMs - ); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - private static T getOrDefault(@Nullable T value, T defaultValue) { - return value != null ? value : defaultValue; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java deleted file mode 100644 index 3b4be882a868b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java +++ /dev/null @@ -1,455 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.aggregations.AggregatorFactories; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xcontent.json.JsonXContent; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * A datafeed update contains partial properties to update a {@link DatafeedConfig}. - * The main difference between this class and {@link DatafeedConfig} is that here all - * fields are nullable. - */ -public class DatafeedUpdate implements ToXContentObject { - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "datafeed_update", - true, - a -> new Builder((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID); - - PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDEXES); - PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDICES); - PARSER.declareString( - (builder, val) -> builder.setQueryDelay(TimeValue.parseTimeValue(val, DatafeedConfig.QUERY_DELAY.getPreferredName())), - DatafeedConfig.QUERY_DELAY - ); - PARSER.declareString( - (builder, val) -> builder.setFrequency(TimeValue.parseTimeValue(val, DatafeedConfig.FREQUENCY.getPreferredName())), - DatafeedConfig.FREQUENCY - ); - PARSER.declareField(Builder::setQuery, DatafeedUpdate::parseBytes, DatafeedConfig.QUERY, ObjectParser.ValueType.OBJECT); - PARSER.declareField( - Builder::setAggregations, - DatafeedUpdate::parseBytes, - DatafeedConfig.AGGREGATIONS, - ObjectParser.ValueType.OBJECT - ); - PARSER.declareObject(Builder::setScriptFields, (p, c) -> { - List parsedScriptFields = new ArrayList<>(); - while (p.nextToken() != XContentParser.Token.END_OBJECT) { - parsedScriptFields.add(new SearchSourceBuilder.ScriptField(p)); - } - return parsedScriptFields; - }, DatafeedConfig.SCRIPT_FIELDS); - PARSER.declareInt(Builder::setScrollSize, DatafeedConfig.SCROLL_SIZE); - PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSER, DatafeedConfig.CHUNKING_CONFIG); - PARSER.declareObject(Builder::setDelayedDataCheckConfig, DelayedDataCheckConfig.PARSER, DatafeedConfig.DELAYED_DATA_CHECK_CONFIG); - PARSER.declareInt(Builder::setMaxEmptySearches, DatafeedConfig.MAX_EMPTY_SEARCHES); - PARSER.declareObject( - Builder::setIndicesOptions, - (p, c) -> IndicesOptions.fromMap(p.map(), new IndicesOptions(IndicesOptions.Option.NONE, IndicesOptions.WildcardStates.NONE)), - DatafeedConfig.INDICES_OPTIONS - ); - PARSER.declareObject(Builder::setRuntimeMappings, (p, c) -> p.map(), SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD); - } - - private static BytesReference parseBytes(XContentParser parser) throws IOException { - XContentBuilder contentBuilder = JsonXContent.contentBuilder(); - contentBuilder.generator().copyCurrentStructure(parser); - return BytesReference.bytes(contentBuilder); - } - - private final String id; - private final TimeValue queryDelay; - private final TimeValue frequency; - private final List indices; - private final BytesReference query; - private final BytesReference aggregations; - private final List scriptFields; - private final Integer scrollSize; - private final ChunkingConfig chunkingConfig; - private final DelayedDataCheckConfig delayedDataCheckConfig; - private final Integer maxEmptySearches; - private final IndicesOptions indicesOptions; - private final Map runtimeMappings; - - private DatafeedUpdate( - String id, - TimeValue queryDelay, - TimeValue frequency, - List indices, - BytesReference query, - BytesReference aggregations, - List scriptFields, - Integer scrollSize, - ChunkingConfig chunkingConfig, - DelayedDataCheckConfig delayedDataCheckConfig, - Integer maxEmptySearches, - IndicesOptions indicesOptions, - Map runtimeMappings - ) { - this.id = id; - this.queryDelay = queryDelay; - this.frequency = frequency; - this.indices = indices; - this.query = query; - this.aggregations = aggregations; - this.scriptFields = scriptFields; - this.scrollSize = scrollSize; - this.chunkingConfig = chunkingConfig; - this.delayedDataCheckConfig = delayedDataCheckConfig; - this.maxEmptySearches = maxEmptySearches; - this.indicesOptions = indicesOptions; - this.runtimeMappings = runtimeMappings; - } - - /** - * Get the id of the datafeed to update - */ - public String getId() { - return id; - } - - public Map getRuntimeMappings() { - return runtimeMappings; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DatafeedConfig.ID.getPreferredName(), id); - if (queryDelay != null) { - builder.field(DatafeedConfig.QUERY_DELAY.getPreferredName(), queryDelay.getStringRep()); - } - if (frequency != null) { - builder.field(DatafeedConfig.FREQUENCY.getPreferredName(), frequency.getStringRep()); - } - addOptionalField(builder, DatafeedConfig.INDICES, indices); - if (query != null) { - builder.field(DatafeedConfig.QUERY.getPreferredName(), asMap(query)); - } - if (aggregations != null) { - builder.field(DatafeedConfig.AGGREGATIONS.getPreferredName(), asMap(aggregations)); - } - if (scriptFields != null) { - builder.startObject(DatafeedConfig.SCRIPT_FIELDS.getPreferredName()); - for (SearchSourceBuilder.ScriptField scriptField : scriptFields) { - scriptField.toXContent(builder, params); - } - builder.endObject(); - } - if (delayedDataCheckConfig != null) { - builder.field(DatafeedConfig.DELAYED_DATA_CHECK_CONFIG.getPreferredName(), delayedDataCheckConfig); - } - addOptionalField(builder, DatafeedConfig.SCROLL_SIZE, scrollSize); - addOptionalField(builder, DatafeedConfig.CHUNKING_CONFIG, chunkingConfig); - addOptionalField(builder, DatafeedConfig.MAX_EMPTY_SEARCHES, maxEmptySearches); - if (indicesOptions != null) { - builder.startObject(DatafeedConfig.INDICES_OPTIONS.getPreferredName()); - indicesOptions.toXContent(builder, params); - builder.endObject(); - } - addOptionalField(builder, SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD, runtimeMappings); - builder.endObject(); - return builder; - } - - private void addOptionalField(XContentBuilder builder, ParseField field, Object value) throws IOException { - if (value != null) { - builder.field(field.getPreferredName(), value); - } - } - - public TimeValue getQueryDelay() { - return queryDelay; - } - - public TimeValue getFrequency() { - return frequency; - } - - public List getIndices() { - return indices; - } - - public Integer getScrollSize() { - return scrollSize; - } - - public BytesReference getQuery() { - return query; - } - - public BytesReference getAggregations() { - return aggregations; - } - - public List getScriptFields() { - return scriptFields == null ? Collections.emptyList() : scriptFields; - } - - public ChunkingConfig getChunkingConfig() { - return chunkingConfig; - } - - public DelayedDataCheckConfig getDelayedDataCheckConfig() { - return delayedDataCheckConfig; - } - - public Integer getMaxEmptySearches() { - return maxEmptySearches; - } - - public IndicesOptions getIndicesOptions() { - return indicesOptions; - } - - private static Map asMap(BytesReference bytesReference) { - return bytesReference == null ? null : XContentHelper.convertToMap(bytesReference, true, XContentType.JSON).v2(); - } - - /** - * The lists of indices and types are compared for equality but they are not - * sorted first so this test could fail simply because the indices and types - * lists are in different orders. - * - * Also note this could be a heavy operation when a query or aggregations - * are set as we need to convert the bytes references into maps to correctly - * compare them. - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DatafeedUpdate that = (DatafeedUpdate) other; - - return Objects.equals(this.id, that.id) - && Objects.equals(this.frequency, that.frequency) - && Objects.equals(this.queryDelay, that.queryDelay) - && Objects.equals(this.indices, that.indices) - && Objects.equals(asMap(this.query), asMap(that.query)) - && Objects.equals(this.scrollSize, that.scrollSize) - && Objects.equals(asMap(this.aggregations), asMap(that.aggregations)) - && Objects.equals(this.delayedDataCheckConfig, that.delayedDataCheckConfig) - && Objects.equals(this.scriptFields, that.scriptFields) - && Objects.equals(this.chunkingConfig, that.chunkingConfig) - && Objects.equals(this.maxEmptySearches, that.maxEmptySearches) - && Objects.equals(this.indicesOptions, that.indicesOptions) - && Objects.equals(this.runtimeMappings, that.runtimeMappings); - } - - /** - * Note this could be a heavy operation when a query or aggregations - * are set as we need to convert the bytes references into maps to - * compute a stable hash code. - */ - @Override - public int hashCode() { - return Objects.hash( - id, - frequency, - queryDelay, - indices, - asMap(query), - scrollSize, - asMap(aggregations), - scriptFields, - chunkingConfig, - delayedDataCheckConfig, - maxEmptySearches, - indicesOptions, - runtimeMappings - ); - } - - public static Builder builder(String id) { - return new Builder(id); - } - - public static class Builder { - - private String id; - private TimeValue queryDelay; - private TimeValue frequency; - private List indices; - private BytesReference query; - private BytesReference aggregations; - private List scriptFields; - private Integer scrollSize; - private ChunkingConfig chunkingConfig; - private DelayedDataCheckConfig delayedDataCheckConfig; - private Integer maxEmptySearches; - private IndicesOptions indicesOptions; - private Map runtimeMappings; - - public Builder(String id) { - this.id = Objects.requireNonNull(id, DatafeedConfig.ID.getPreferredName()); - } - - public Builder(DatafeedUpdate config) { - this.id = config.id; - this.queryDelay = config.queryDelay; - this.frequency = config.frequency; - this.indices = config.indices; - this.query = config.query; - this.aggregations = config.aggregations; - this.scriptFields = config.scriptFields; - this.scrollSize = config.scrollSize; - this.chunkingConfig = config.chunkingConfig; - this.delayedDataCheckConfig = config.delayedDataCheckConfig; - this.maxEmptySearches = config.maxEmptySearches; - this.indicesOptions = config.indicesOptions; - this.runtimeMappings = config.runtimeMappings != null ? new HashMap<>(config.runtimeMappings) : null; - } - - public Builder setIndices(List indices) { - this.indices = indices; - return this; - } - - public Builder setIndices(String... indices) { - return setIndices(Arrays.asList(indices)); - } - - public Builder setQueryDelay(TimeValue queryDelay) { - this.queryDelay = queryDelay; - return this; - } - - public Builder setFrequency(TimeValue frequency) { - this.frequency = frequency; - return this; - } - - private Builder setQuery(BytesReference query) { - this.query = query; - return this; - } - - public Builder setQuery(String queryAsJson) { - this.query = queryAsJson == null ? null : new BytesArray(queryAsJson); - return this; - } - - public Builder setQuery(QueryBuilder query) throws IOException { - this.query = query == null ? null : xContentToBytes(query); - return this; - } - - private Builder setAggregations(BytesReference aggregations) { - this.aggregations = aggregations; - return this; - } - - public Builder setAggregations(String aggsAsJson) { - this.aggregations = aggsAsJson == null ? null : new BytesArray(aggsAsJson); - return this; - } - - public Builder setAggregations(AggregatorFactories.Builder aggregations) throws IOException { - this.aggregations = aggregations == null ? null : xContentToBytes(aggregations); - return this; - } - - public Builder setScriptFields(List scriptFields) { - List sorted = new ArrayList<>(scriptFields); - sorted.sort(Comparator.comparing(SearchSourceBuilder.ScriptField::fieldName)); - this.scriptFields = sorted; - return this; - } - - public Builder setScrollSize(int scrollSize) { - this.scrollSize = scrollSize; - return this; - } - - public Builder setChunkingConfig(ChunkingConfig chunkingConfig) { - this.chunkingConfig = chunkingConfig; - return this; - } - - public Builder setDelayedDataCheckConfig(DelayedDataCheckConfig delayedDataCheckConfig) { - this.delayedDataCheckConfig = delayedDataCheckConfig; - return this; - } - - public Builder setMaxEmptySearches(int maxEmptySearches) { - this.maxEmptySearches = maxEmptySearches; - return this; - } - - public Builder setIndicesOptions(IndicesOptions indicesOptions) { - this.indicesOptions = indicesOptions; - return this; - } - - public Builder setRuntimeMappings(Map runtimeMappings) { - this.runtimeMappings = runtimeMappings; - return this; - } - - public DatafeedUpdate build() { - return new DatafeedUpdate( - id, - queryDelay, - frequency, - indices, - query, - aggregations, - scriptFields, - scrollSize, - chunkingConfig, - delayedDataCheckConfig, - maxEmptySearches, - indicesOptions, - runtimeMappings - ); - } - - private static BytesReference xContentToBytes(ToXContentObject object) throws IOException { - try (XContentBuilder builder = JsonXContent.contentBuilder()) { - object.toXContent(builder, ToXContentObject.EMPTY_PARAMS); - return BytesReference.bytes(builder); - } - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DelayedDataCheckConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DelayedDataCheckConfig.java deleted file mode 100644 index 4c55662f8b833..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DelayedDataCheckConfig.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * The configuration object containing the delayed data check settings. - * - * See {@link DelayedDataCheckConfig#enabledDelayedDataCheckConfig(TimeValue)} for creating a new - * enabled datacheck with the given check_window - * - * See {@link DelayedDataCheckConfig#disabledDelayedDataCheckConfig()} for creating a config for disabling - * delayed data checking. - */ -public class DelayedDataCheckConfig implements ToXContentObject { - - public static final ParseField ENABLED = new ParseField("enabled"); - public static final ParseField CHECK_WINDOW = new ParseField("check_window"); - - // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "delayed_data_check_config", - true, - a -> new DelayedDataCheckConfig((Boolean) a[0], (TimeValue) a[1]) - ); - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED); - PARSER.declareString( - ConstructingObjectParser.optionalConstructorArg(), - text -> TimeValue.parseTimeValue(text, CHECK_WINDOW.getPreferredName()), - CHECK_WINDOW - ); - } - - /** - * This creates a new DelayedDataCheckConfig that has a check_window of the passed `timeValue` - * - * We query the index to the latest finalized bucket from this TimeValue in the past looking to see if any data has been indexed - * since the data was read with the Datafeed. - * - * The window must be larger than the {@link org.elasticsearch.client.ml.job.config.AnalysisConfig#bucketSpan}, less than - * 24 hours, and span less than 10,000x buckets. - * - * - * @param timeValue The time length in the past from the latest finalized bucket to look for latent data. - * If `null` is provided, the appropriate window is calculated when it is used - **/ - public static DelayedDataCheckConfig enabledDelayedDataCheckConfig(TimeValue timeValue) { - return new DelayedDataCheckConfig(true, timeValue); - } - - /** - * This creates a new DelayedDataCheckConfig that disables the data check. - */ - public static DelayedDataCheckConfig disabledDelayedDataCheckConfig() { - return new DelayedDataCheckConfig(false, null); - } - - private final boolean enabled; - private final TimeValue checkWindow; - - DelayedDataCheckConfig(Boolean enabled, TimeValue checkWindow) { - this.enabled = enabled; - this.checkWindow = checkWindow; - } - - public boolean isEnabled() { - return enabled; - } - - @Nullable - public TimeValue getCheckWindow() { - return checkWindow; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ENABLED.getPreferredName(), enabled); - if (checkWindow != null) { - builder.field(CHECK_WINDOW.getPreferredName(), checkWindow.getStringRep()); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(enabled, checkWindow); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - DelayedDataCheckConfig other = (DelayedDataCheckConfig) obj; - return Objects.equals(this.enabled, other.enabled) && Objects.equals(this.checkWindow, other.checkWindow); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Classification.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Classification.java deleted file mode 100644 index b1fe4a5d1b87c..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Classification.java +++ /dev/null @@ -1,550 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.client.ml.inference.NamedXContentObjectHelper; -import org.elasticsearch.client.ml.inference.preprocessing.PreProcessor; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Locale; -import java.util.Objects; - -public class Classification implements DataFrameAnalysis { - - public static Classification fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static Builder builder(String dependentVariable) { - return new Builder(dependentVariable); - } - - public static final ParseField NAME = new ParseField("classification"); - - static final ParseField DEPENDENT_VARIABLE = new ParseField("dependent_variable"); - static final ParseField LAMBDA = new ParseField("lambda"); - static final ParseField GAMMA = new ParseField("gamma"); - static final ParseField ETA = new ParseField("eta"); - static final ParseField MAX_TREES = new ParseField("max_trees"); - static final ParseField FEATURE_BAG_FRACTION = new ParseField("feature_bag_fraction"); - static final ParseField NUM_TOP_FEATURE_IMPORTANCE_VALUES = new ParseField("num_top_feature_importance_values"); - static final ParseField PREDICTION_FIELD_NAME = new ParseField("prediction_field_name"); - static final ParseField TRAINING_PERCENT = new ParseField("training_percent"); - static final ParseField CLASS_ASSIGNMENT_OBJECTIVE = new ParseField("class_assignment_objective"); - static final ParseField NUM_TOP_CLASSES = new ParseField("num_top_classes"); - static final ParseField RANDOMIZE_SEED = new ParseField("randomize_seed"); - static final ParseField FEATURE_PROCESSORS = new ParseField("feature_processors"); - static final ParseField ALPHA = new ParseField("alpha"); - static final ParseField ETA_GROWTH_RATE_PER_TREE = new ParseField("eta_growth_rate_per_tree"); - static final ParseField SOFT_TREE_DEPTH_LIMIT = new ParseField("soft_tree_depth_limit"); - static final ParseField SOFT_TREE_DEPTH_TOLERANCE = new ParseField("soft_tree_depth_tolerance"); - static final ParseField DOWNSAMPLE_FACTOR = new ParseField("downsample_factor"); - static final ParseField MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER = new ParseField("max_optimization_rounds_per_hyperparameter"); - static final ParseField EARLY_STOPPING_ENABLED = new ParseField("early_stopping_enabled"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - a -> new Classification( - (String) a[0], - (Double) a[1], - (Double) a[2], - (Double) a[3], - (Integer) a[4], - (Double) a[5], - (Integer) a[6], - (String) a[7], - (Double) a[8], - (Integer) a[9], - (Long) a[10], - (ClassAssignmentObjective) a[11], - (List) a[12], - (Double) a[13], - (Double) a[14], - (Double) a[15], - (Double) a[16], - (Double) a[17], - (Integer) a[18], - (Boolean) a[19] - ) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), DEPENDENT_VARIABLE); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), LAMBDA); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), GAMMA); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ETA); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_TREES); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), FEATURE_BAG_FRACTION); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), NUM_TOP_FEATURE_IMPORTANCE_VALUES); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), PREDICTION_FIELD_NAME); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), TRAINING_PERCENT); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), NUM_TOP_CLASSES); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), RANDOMIZE_SEED); - PARSER.declareString( - ConstructingObjectParser.optionalConstructorArg(), - ClassAssignmentObjective::fromString, - CLASS_ASSIGNMENT_OBJECTIVE - ); - PARSER.declareNamedObjects( - ConstructingObjectParser.optionalConstructorArg(), - (p, c, n) -> p.namedObject(PreProcessor.class, n, c), - (classification) -> {}, - FEATURE_PROCESSORS - ); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ALPHA); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ETA_GROWTH_RATE_PER_TREE); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), SOFT_TREE_DEPTH_LIMIT); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), SOFT_TREE_DEPTH_TOLERANCE); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), DOWNSAMPLE_FACTOR); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), EARLY_STOPPING_ENABLED); - } - - private final String dependentVariable; - private final Double lambda; - private final Double gamma; - private final Double eta; - private final Integer maxTrees; - private final Double featureBagFraction; - private final Integer numTopFeatureImportanceValues; - private final String predictionFieldName; - private final Double trainingPercent; - private final ClassAssignmentObjective classAssignmentObjective; - private final Integer numTopClasses; - private final Long randomizeSeed; - private final List featureProcessors; - private final Double alpha; - private final Double etaGrowthRatePerTree; - private final Double softTreeDepthLimit; - private final Double softTreeDepthTolerance; - private final Double downsampleFactor; - private final Integer maxOptimizationRoundsPerHyperparameter; - private final Boolean earlyStoppingEnabled; - - private Classification( - String dependentVariable, - @Nullable Double lambda, - @Nullable Double gamma, - @Nullable Double eta, - @Nullable Integer maxTrees, - @Nullable Double featureBagFraction, - @Nullable Integer numTopFeatureImportanceValues, - @Nullable String predictionFieldName, - @Nullable Double trainingPercent, - @Nullable Integer numTopClasses, - @Nullable Long randomizeSeed, - @Nullable ClassAssignmentObjective classAssignmentObjective, - @Nullable List featureProcessors, - @Nullable Double alpha, - @Nullable Double etaGrowthRatePerTree, - @Nullable Double softTreeDepthLimit, - @Nullable Double softTreeDepthTolerance, - @Nullable Double downsampleFactor, - @Nullable Integer maxOptimizationRoundsPerHyperparameter, - @Nullable Boolean earlyStoppingEnabled - ) { - this.dependentVariable = Objects.requireNonNull(dependentVariable); - this.lambda = lambda; - this.gamma = gamma; - this.eta = eta; - this.maxTrees = maxTrees; - this.featureBagFraction = featureBagFraction; - this.numTopFeatureImportanceValues = numTopFeatureImportanceValues; - this.predictionFieldName = predictionFieldName; - this.trainingPercent = trainingPercent; - this.classAssignmentObjective = classAssignmentObjective; - this.numTopClasses = numTopClasses; - this.randomizeSeed = randomizeSeed; - this.featureProcessors = featureProcessors; - this.alpha = alpha; - this.etaGrowthRatePerTree = etaGrowthRatePerTree; - this.softTreeDepthLimit = softTreeDepthLimit; - this.softTreeDepthTolerance = softTreeDepthTolerance; - this.downsampleFactor = downsampleFactor; - this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter; - this.earlyStoppingEnabled = earlyStoppingEnabled; - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } - - public String getDependentVariable() { - return dependentVariable; - } - - public Double getLambda() { - return lambda; - } - - public Double getGamma() { - return gamma; - } - - public Double getEta() { - return eta; - } - - public Integer getMaxTrees() { - return maxTrees; - } - - public Double getFeatureBagFraction() { - return featureBagFraction; - } - - public Integer getNumTopFeatureImportanceValues() { - return numTopFeatureImportanceValues; - } - - public String getPredictionFieldName() { - return predictionFieldName; - } - - public Double getTrainingPercent() { - return trainingPercent; - } - - public Long getRandomizeSeed() { - return randomizeSeed; - } - - public ClassAssignmentObjective getClassAssignmentObjective() { - return classAssignmentObjective; - } - - public Integer getNumTopClasses() { - return numTopClasses; - } - - public List getFeatureProcessors() { - return featureProcessors; - } - - public Double getAlpha() { - return alpha; - } - - public Double getEtaGrowthRatePerTree() { - return etaGrowthRatePerTree; - } - - public Double getSoftTreeDepthLimit() { - return softTreeDepthLimit; - } - - public Double getSoftTreeDepthTolerance() { - return softTreeDepthTolerance; - } - - public Double getDownsampleFactor() { - return downsampleFactor; - } - - public Integer getMaxOptimizationRoundsPerHyperparameter() { - return maxOptimizationRoundsPerHyperparameter; - } - - public Boolean getEarlyStoppingEnable() { - return earlyStoppingEnabled; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DEPENDENT_VARIABLE.getPreferredName(), dependentVariable); - if (lambda != null) { - builder.field(LAMBDA.getPreferredName(), lambda); - } - if (gamma != null) { - builder.field(GAMMA.getPreferredName(), gamma); - } - if (eta != null) { - builder.field(ETA.getPreferredName(), eta); - } - if (maxTrees != null) { - builder.field(MAX_TREES.getPreferredName(), maxTrees); - } - if (featureBagFraction != null) { - builder.field(FEATURE_BAG_FRACTION.getPreferredName(), featureBagFraction); - } - if (numTopFeatureImportanceValues != null) { - builder.field(NUM_TOP_FEATURE_IMPORTANCE_VALUES.getPreferredName(), numTopFeatureImportanceValues); - } - if (predictionFieldName != null) { - builder.field(PREDICTION_FIELD_NAME.getPreferredName(), predictionFieldName); - } - if (trainingPercent != null) { - builder.field(TRAINING_PERCENT.getPreferredName(), trainingPercent); - } - if (randomizeSeed != null) { - builder.field(RANDOMIZE_SEED.getPreferredName(), randomizeSeed); - } - if (classAssignmentObjective != null) { - builder.field(CLASS_ASSIGNMENT_OBJECTIVE.getPreferredName(), classAssignmentObjective); - } - if (numTopClasses != null) { - builder.field(NUM_TOP_CLASSES.getPreferredName(), numTopClasses); - } - if (featureProcessors != null) { - NamedXContentObjectHelper.writeNamedObjects(builder, params, true, FEATURE_PROCESSORS.getPreferredName(), featureProcessors); - } - if (alpha != null) { - builder.field(ALPHA.getPreferredName(), alpha); - } - if (etaGrowthRatePerTree != null) { - builder.field(ETA_GROWTH_RATE_PER_TREE.getPreferredName(), etaGrowthRatePerTree); - } - if (softTreeDepthLimit != null) { - builder.field(SOFT_TREE_DEPTH_LIMIT.getPreferredName(), softTreeDepthLimit); - } - if (softTreeDepthTolerance != null) { - builder.field(SOFT_TREE_DEPTH_TOLERANCE.getPreferredName(), softTreeDepthTolerance); - } - if (downsampleFactor != null) { - builder.field(DOWNSAMPLE_FACTOR.getPreferredName(), downsampleFactor); - } - if (maxOptimizationRoundsPerHyperparameter != null) { - builder.field(MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER.getPreferredName(), maxOptimizationRoundsPerHyperparameter); - } - if (earlyStoppingEnabled != null) { - builder.field(EARLY_STOPPING_ENABLED.getPreferredName(), earlyStoppingEnabled); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash( - dependentVariable, - lambda, - gamma, - eta, - maxTrees, - featureBagFraction, - numTopFeatureImportanceValues, - predictionFieldName, - trainingPercent, - randomizeSeed, - numTopClasses, - classAssignmentObjective, - featureProcessors, - alpha, - etaGrowthRatePerTree, - softTreeDepthLimit, - softTreeDepthTolerance, - downsampleFactor, - maxOptimizationRoundsPerHyperparameter, - earlyStoppingEnabled - ); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Classification that = (Classification) o; - return Objects.equals(dependentVariable, that.dependentVariable) - && Objects.equals(lambda, that.lambda) - && Objects.equals(gamma, that.gamma) - && Objects.equals(eta, that.eta) - && Objects.equals(maxTrees, that.maxTrees) - && Objects.equals(featureBagFraction, that.featureBagFraction) - && Objects.equals(numTopFeatureImportanceValues, that.numTopFeatureImportanceValues) - && Objects.equals(predictionFieldName, that.predictionFieldName) - && Objects.equals(trainingPercent, that.trainingPercent) - && Objects.equals(randomizeSeed, that.randomizeSeed) - && Objects.equals(numTopClasses, that.numTopClasses) - && Objects.equals(classAssignmentObjective, that.classAssignmentObjective) - && Objects.equals(featureProcessors, that.featureProcessors) - && Objects.equals(alpha, that.alpha) - && Objects.equals(etaGrowthRatePerTree, that.etaGrowthRatePerTree) - && Objects.equals(softTreeDepthLimit, that.softTreeDepthLimit) - && Objects.equals(softTreeDepthTolerance, that.softTreeDepthTolerance) - && Objects.equals(downsampleFactor, that.downsampleFactor) - && Objects.equals(maxOptimizationRoundsPerHyperparameter, that.maxOptimizationRoundsPerHyperparameter) - && Objects.equals(earlyStoppingEnabled, that.earlyStoppingEnabled); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public enum ClassAssignmentObjective { - MAXIMIZE_ACCURACY, - MAXIMIZE_MINIMUM_RECALL; - - public static ClassAssignmentObjective fromString(String value) { - return ClassAssignmentObjective.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - public static class Builder { - private String dependentVariable; - private Double lambda; - private Double gamma; - private Double eta; - private Integer maxTrees; - private Double featureBagFraction; - private Integer numTopFeatureImportanceValues; - private String predictionFieldName; - private Double trainingPercent; - private Integer numTopClasses; - private Long randomizeSeed; - private ClassAssignmentObjective classAssignmentObjective; - private List featureProcessors; - private Double alpha; - private Double etaGrowthRatePerTree; - private Double softTreeDepthLimit; - private Double softTreeDepthTolerance; - private Double downsampleFactor; - private Integer maxOptimizationRoundsPerHyperparameter; - private Boolean earlyStoppingEnabled; - - private Builder(String dependentVariable) { - this.dependentVariable = Objects.requireNonNull(dependentVariable); - } - - public Builder setLambda(Double lambda) { - this.lambda = lambda; - return this; - } - - public Builder setGamma(Double gamma) { - this.gamma = gamma; - return this; - } - - public Builder setEta(Double eta) { - this.eta = eta; - return this; - } - - public Builder setMaxTrees(Integer maxTrees) { - this.maxTrees = maxTrees; - return this; - } - - public Builder setFeatureBagFraction(Double featureBagFraction) { - this.featureBagFraction = featureBagFraction; - return this; - } - - public Builder setNumTopFeatureImportanceValues(Integer numTopFeatureImportanceValues) { - this.numTopFeatureImportanceValues = numTopFeatureImportanceValues; - return this; - } - - public Builder setPredictionFieldName(String predictionFieldName) { - this.predictionFieldName = predictionFieldName; - return this; - } - - public Builder setTrainingPercent(Double trainingPercent) { - this.trainingPercent = trainingPercent; - return this; - } - - public Builder setRandomizeSeed(Long randomizeSeed) { - this.randomizeSeed = randomizeSeed; - return this; - } - - public Builder setNumTopClasses(Integer numTopClasses) { - this.numTopClasses = numTopClasses; - return this; - } - - public Builder setClassAssignmentObjective(ClassAssignmentObjective classAssignmentObjective) { - this.classAssignmentObjective = classAssignmentObjective; - return this; - } - - public Builder setFeatureProcessors(List featureProcessors) { - this.featureProcessors = featureProcessors; - return this; - } - - public Builder setAlpha(Double alpha) { - this.alpha = alpha; - return this; - } - - public Builder setEtaGrowthRatePerTree(Double etaGrowthRatePerTree) { - this.etaGrowthRatePerTree = etaGrowthRatePerTree; - return this; - } - - public Builder setSoftTreeDepthLimit(Double softTreeDepthLimit) { - this.softTreeDepthLimit = softTreeDepthLimit; - return this; - } - - public Builder setSoftTreeDepthTolerance(Double softTreeDepthTolerance) { - this.softTreeDepthTolerance = softTreeDepthTolerance; - return this; - } - - public Builder setDownsampleFactor(Double downsampleFactor) { - this.downsampleFactor = downsampleFactor; - return this; - } - - public Builder setMaxOptimizationRoundsPerHyperparameter(Integer maxOptimizationRoundsPerHyperparameter) { - this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter; - return this; - } - - public Builder setEarlyStoppingEnabled(Boolean earlyStoppingEnabled) { - this.earlyStoppingEnabled = earlyStoppingEnabled; - return this; - } - - public Classification build() { - return new Classification( - dependentVariable, - lambda, - gamma, - eta, - maxTrees, - featureBagFraction, - numTopFeatureImportanceValues, - predictionFieldName, - trainingPercent, - numTopClasses, - randomizeSeed, - classAssignmentObjective, - featureProcessors, - alpha, - etaGrowthRatePerTree, - softTreeDepthLimit, - softTreeDepthTolerance, - downsampleFactor, - maxOptimizationRoundsPerHyperparameter, - earlyStoppingEnabled - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalysis.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalysis.java deleted file mode 100644 index e7c13da72880a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalysis.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.xcontent.ToXContentObject; - -public interface DataFrameAnalysis extends ToXContentObject { - - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfig.java deleted file mode 100644 index 53c92d792fe20..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfig.java +++ /dev/null @@ -1,341 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.Version; -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.xcontent.XContentParserUtils; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.search.fetch.subphase.FetchSourceContext; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.time.Instant; -import java.util.Objects; - -public class DataFrameAnalyticsConfig implements ToXContentObject { - - public static DataFrameAnalyticsConfig fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - public static Builder builder() { - return new Builder(); - } - - static final ParseField ID = new ParseField("id"); - static final ParseField DESCRIPTION = new ParseField("description"); - static final ParseField SOURCE = new ParseField("source"); - static final ParseField DEST = new ParseField("dest"); - static final ParseField ANALYSIS = new ParseField("analysis"); - static final ParseField ANALYZED_FIELDS = new ParseField("analyzed_fields"); - static final ParseField MODEL_MEMORY_LIMIT = new ParseField("model_memory_limit"); - static final ParseField CREATE_TIME = new ParseField("create_time"); - static final ParseField VERSION = new ParseField("version"); - static final ParseField ALLOW_LAZY_START = new ParseField("allow_lazy_start"); - static final ParseField MAX_NUM_THREADS = new ParseField("max_num_threads"); - - private static final ObjectParser PARSER = new ObjectParser<>("data_frame_analytics_config", true, Builder::new); - - static { - PARSER.declareString(Builder::setId, ID); - PARSER.declareString(Builder::setDescription, DESCRIPTION); - PARSER.declareObject(Builder::setSource, (p, c) -> DataFrameAnalyticsSource.fromXContent(p), SOURCE); - PARSER.declareObject(Builder::setDest, (p, c) -> DataFrameAnalyticsDest.fromXContent(p), DEST); - PARSER.declareObject(Builder::setAnalysis, (p, c) -> parseAnalysis(p), ANALYSIS); - PARSER.declareField( - Builder::setAnalyzedFields, - (p, c) -> FetchSourceContext.fromXContent(p), - ANALYZED_FIELDS, - ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING - ); - PARSER.declareField( - Builder::setModelMemoryLimit, - (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MODEL_MEMORY_LIMIT.getPreferredName()), - MODEL_MEMORY_LIMIT, - ValueType.VALUE - ); - PARSER.declareField( - Builder::setCreateTime, - p -> TimeUtil.parseTimeFieldToInstant(p, CREATE_TIME.getPreferredName()), - CREATE_TIME, - ValueType.VALUE - ); - PARSER.declareString(Builder::setVersion, Version::fromString, VERSION); - PARSER.declareBoolean(Builder::setAllowLazyStart, ALLOW_LAZY_START); - PARSER.declareInt(Builder::setMaxNumThreads, MAX_NUM_THREADS); - } - - private static DataFrameAnalysis parseAnalysis(XContentParser parser) throws IOException { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); - DataFrameAnalysis analysis = parser.namedObject(DataFrameAnalysis.class, parser.currentName(), true); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); - return analysis; - } - - private final String id; - private final String description; - private final DataFrameAnalyticsSource source; - private final DataFrameAnalyticsDest dest; - private final DataFrameAnalysis analysis; - private final FetchSourceContext analyzedFields; - private final ByteSizeValue modelMemoryLimit; - private final Instant createTime; - private final Version version; - private final Boolean allowLazyStart; - private final Integer maxNumThreads; - - private DataFrameAnalyticsConfig( - @Nullable String id, - @Nullable String description, - @Nullable DataFrameAnalyticsSource source, - @Nullable DataFrameAnalyticsDest dest, - @Nullable DataFrameAnalysis analysis, - @Nullable FetchSourceContext analyzedFields, - @Nullable ByteSizeValue modelMemoryLimit, - @Nullable Instant createTime, - @Nullable Version version, - @Nullable Boolean allowLazyStart, - @Nullable Integer maxNumThreads - ) { - this.id = id; - this.description = description; - this.source = source; - this.dest = dest; - this.analysis = analysis; - this.analyzedFields = analyzedFields; - this.modelMemoryLimit = modelMemoryLimit; - this.createTime = createTime == null ? null : Instant.ofEpochMilli(createTime.toEpochMilli()); - ; - this.version = version; - this.allowLazyStart = allowLazyStart; - this.maxNumThreads = maxNumThreads; - } - - public String getId() { - return id; - } - - public String getDescription() { - return description; - } - - public DataFrameAnalyticsSource getSource() { - return source; - } - - public DataFrameAnalyticsDest getDest() { - return dest; - } - - public DataFrameAnalysis getAnalysis() { - return analysis; - } - - public FetchSourceContext getAnalyzedFields() { - return analyzedFields; - } - - public ByteSizeValue getModelMemoryLimit() { - return modelMemoryLimit; - } - - public Instant getCreateTime() { - return createTime; - } - - public Version getVersion() { - return version; - } - - public Boolean getAllowLazyStart() { - return allowLazyStart; - } - - public Integer getMaxNumThreads() { - return maxNumThreads; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (id != null) { - builder.field(ID.getPreferredName(), id); - } - if (description != null) { - builder.field(DESCRIPTION.getPreferredName(), description); - } - if (source != null) { - builder.field(SOURCE.getPreferredName(), source); - } - if (dest != null) { - builder.field(DEST.getPreferredName(), dest); - } - if (analysis != null) { - builder.startObject(ANALYSIS.getPreferredName()).field(analysis.getName(), analysis).endObject(); - } - if (analyzedFields != null) { - builder.field(ANALYZED_FIELDS.getPreferredName(), analyzedFields); - } - if (modelMemoryLimit != null) { - builder.field(MODEL_MEMORY_LIMIT.getPreferredName(), modelMemoryLimit.getStringRep()); - } - if (createTime != null) { - builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + "_string", createTime.toEpochMilli()); - } - if (version != null) { - builder.field(VERSION.getPreferredName(), version); - } - if (allowLazyStart != null) { - builder.field(ALLOW_LAZY_START.getPreferredName(), allowLazyStart); - } - if (maxNumThreads != null) { - builder.field(MAX_NUM_THREADS.getPreferredName(), maxNumThreads); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - - DataFrameAnalyticsConfig other = (DataFrameAnalyticsConfig) o; - return Objects.equals(id, other.id) - && Objects.equals(description, other.description) - && Objects.equals(source, other.source) - && Objects.equals(dest, other.dest) - && Objects.equals(analysis, other.analysis) - && Objects.equals(analyzedFields, other.analyzedFields) - && Objects.equals(modelMemoryLimit, other.modelMemoryLimit) - && Objects.equals(createTime, other.createTime) - && Objects.equals(version, other.version) - && Objects.equals(allowLazyStart, other.allowLazyStart) - && Objects.equals(maxNumThreads, other.maxNumThreads); - } - - @Override - public int hashCode() { - return Objects.hash( - id, - description, - source, - dest, - analysis, - analyzedFields, - modelMemoryLimit, - createTime, - version, - allowLazyStart, - maxNumThreads - ); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public static class Builder { - - private String id; - private String description; - private DataFrameAnalyticsSource source; - private DataFrameAnalyticsDest dest; - private DataFrameAnalysis analysis; - private FetchSourceContext analyzedFields; - private ByteSizeValue modelMemoryLimit; - private Instant createTime; - private Version version; - private Boolean allowLazyStart; - private Integer maxNumThreads; - - private Builder() {} - - public Builder setId(String id) { - this.id = Objects.requireNonNull(id); - return this; - } - - public Builder setDescription(String description) { - this.description = description; - return this; - } - - public Builder setSource(DataFrameAnalyticsSource source) { - this.source = Objects.requireNonNull(source); - return this; - } - - public Builder setDest(DataFrameAnalyticsDest dest) { - this.dest = Objects.requireNonNull(dest); - return this; - } - - public Builder setAnalysis(DataFrameAnalysis analysis) { - this.analysis = Objects.requireNonNull(analysis); - return this; - } - - public Builder setAnalyzedFields(FetchSourceContext fields) { - this.analyzedFields = fields; - return this; - } - - public Builder setModelMemoryLimit(ByteSizeValue modelMemoryLimit) { - this.modelMemoryLimit = modelMemoryLimit; - return this; - } - - Builder setCreateTime(Instant createTime) { - this.createTime = createTime; - return this; - } - - Builder setVersion(Version version) { - this.version = version; - return this; - } - - public Builder setAllowLazyStart(Boolean allowLazyStart) { - this.allowLazyStart = allowLazyStart; - return this; - } - - public Builder setMaxNumThreads(Integer maxNumThreads) { - this.maxNumThreads = maxNumThreads; - return this; - } - - public DataFrameAnalyticsConfig build() { - return new DataFrameAnalyticsConfig( - id, - description, - source, - dest, - analysis, - analyzedFields, - modelMemoryLimit, - createTime, - version, - allowLazyStart, - maxNumThreads - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfigUpdate.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfigUpdate.java deleted file mode 100644 index 4dccee1019ce1..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfigUpdate.java +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ObjectParser.ValueType.VALUE; - -public class DataFrameAnalyticsConfigUpdate implements ToXContentObject { - - public static DataFrameAnalyticsConfigUpdate fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - public static Builder builder() { - return new Builder(); - } - - public static final ObjectParser PARSER = new ObjectParser<>("data_frame_analytics_config_update", true, Builder::new); - - static { - PARSER.declareString(Builder::setId, DataFrameAnalyticsConfig.ID); - PARSER.declareStringOrNull(Builder::setDescription, DataFrameAnalyticsConfig.DESCRIPTION); - PARSER.declareField( - Builder::setModelMemoryLimit, - (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), DataFrameAnalyticsConfig.MODEL_MEMORY_LIMIT.getPreferredName()), - DataFrameAnalyticsConfig.MODEL_MEMORY_LIMIT, - VALUE - ); - PARSER.declareBoolean(Builder::setAllowLazyStart, DataFrameAnalyticsConfig.ALLOW_LAZY_START); - PARSER.declareInt(Builder::setMaxNumThreads, DataFrameAnalyticsConfig.MAX_NUM_THREADS); - } - - private final String id; - private final String description; - private final ByteSizeValue modelMemoryLimit; - private final Boolean allowLazyStart; - private final Integer maxNumThreads; - - private DataFrameAnalyticsConfigUpdate( - String id, - @Nullable String description, - @Nullable ByteSizeValue modelMemoryLimit, - @Nullable Boolean allowLazyStart, - @Nullable Integer maxNumThreads - ) { - this.id = id; - this.description = description; - this.modelMemoryLimit = modelMemoryLimit; - this.allowLazyStart = allowLazyStart; - this.maxNumThreads = maxNumThreads; - } - - public String getId() { - return id; - } - - public String getDescription() { - return description; - } - - public ByteSizeValue getModelMemoryLimit() { - return modelMemoryLimit; - } - - public Boolean isAllowLazyStart() { - return allowLazyStart; - } - - public Integer getMaxNumThreads() { - return maxNumThreads; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DataFrameAnalyticsConfig.ID.getPreferredName(), id); - if (description != null) { - builder.field(DataFrameAnalyticsConfig.DESCRIPTION.getPreferredName(), description); - } - if (modelMemoryLimit != null) { - builder.field(DataFrameAnalyticsConfig.MODEL_MEMORY_LIMIT.getPreferredName(), modelMemoryLimit.getStringRep()); - } - if (allowLazyStart != null) { - builder.field(DataFrameAnalyticsConfig.ALLOW_LAZY_START.getPreferredName(), allowLazyStart); - } - if (maxNumThreads != null) { - builder.field(DataFrameAnalyticsConfig.MAX_NUM_THREADS.getPreferredName(), maxNumThreads); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other instanceof DataFrameAnalyticsConfigUpdate == false) { - return false; - } - - DataFrameAnalyticsConfigUpdate that = (DataFrameAnalyticsConfigUpdate) other; - - return Objects.equals(this.id, that.id) - && Objects.equals(this.description, that.description) - && Objects.equals(this.modelMemoryLimit, that.modelMemoryLimit) - && Objects.equals(this.allowLazyStart, that.allowLazyStart) - && Objects.equals(this.maxNumThreads, that.maxNumThreads); - } - - @Override - public int hashCode() { - return Objects.hash(id, description, modelMemoryLimit, allowLazyStart, maxNumThreads); - } - - public static class Builder { - - private String id; - private String description; - private ByteSizeValue modelMemoryLimit; - private Boolean allowLazyStart; - private Integer maxNumThreads; - - private Builder() {} - - public String getId() { - return id; - } - - public Builder setId(String id) { - this.id = id; - return this; - } - - public Builder setDescription(String description) { - this.description = description; - return this; - } - - public Builder setModelMemoryLimit(ByteSizeValue modelMemoryLimit) { - this.modelMemoryLimit = modelMemoryLimit; - return this; - } - - public Builder setAllowLazyStart(Boolean allowLazyStart) { - this.allowLazyStart = allowLazyStart; - return this; - } - - public Builder setMaxNumThreads(Integer maxNumThreads) { - this.maxNumThreads = maxNumThreads; - return this; - } - - public DataFrameAnalyticsConfigUpdate build() { - return new DataFrameAnalyticsConfigUpdate(id, description, modelMemoryLimit, allowLazyStart, maxNumThreads); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsDest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsDest.java deleted file mode 100644 index fe576411f131b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsDest.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static java.util.Objects.requireNonNull; - -public class DataFrameAnalyticsDest implements ToXContentObject { - - public static DataFrameAnalyticsDest fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - public static Builder builder() { - return new Builder(); - } - - private static final ParseField INDEX = new ParseField("index"); - private static final ParseField RESULTS_FIELD = new ParseField("results_field"); - - private static final ObjectParser PARSER = new ObjectParser<>("data_frame_analytics_dest", true, Builder::new); - - static { - PARSER.declareString(Builder::setIndex, INDEX); - PARSER.declareString(Builder::setResultsField, RESULTS_FIELD); - } - - private final String index; - private final String resultsField; - - private DataFrameAnalyticsDest(String index, @Nullable String resultsField) { - this.index = requireNonNull(index); - this.resultsField = resultsField; - } - - public String getIndex() { - return index; - } - - public String getResultsField() { - return resultsField; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(INDEX.getPreferredName(), index); - if (resultsField != null) { - builder.field(RESULTS_FIELD.getPreferredName(), resultsField); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - - DataFrameAnalyticsDest other = (DataFrameAnalyticsDest) o; - return Objects.equals(index, other.index) && Objects.equals(resultsField, other.resultsField); - } - - @Override - public int hashCode() { - return Objects.hash(index, resultsField); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public static class Builder { - - private String index; - private String resultsField; - - private Builder() {} - - public Builder setIndex(String index) { - this.index = index; - return this; - } - - public Builder setResultsField(String resultsField) { - this.resultsField = resultsField; - return this; - } - - public DataFrameAnalyticsDest build() { - return new DataFrameAnalyticsDest(index, resultsField); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsSource.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsSource.java deleted file mode 100644 index da9cf7aa15b44..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsSource.java +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.fetch.subphase.FetchSourceContext; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -public class DataFrameAnalyticsSource implements ToXContentObject { - - public static DataFrameAnalyticsSource fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - public static Builder builder() { - return new Builder(); - } - - private static final ParseField INDEX = new ParseField("index"); - private static final ParseField QUERY = new ParseField("query"); - public static final ParseField _SOURCE = new ParseField("_source"); - - private static final ObjectParser PARSER = new ObjectParser<>("data_frame_analytics_source", true, Builder::new); - - static { - PARSER.declareStringArray(Builder::setIndex, INDEX); - PARSER.declareObject(Builder::setQueryConfig, (p, c) -> QueryConfig.fromXContent(p), QUERY); - PARSER.declareField( - Builder::setSourceFiltering, - (p, c) -> FetchSourceContext.fromXContent(p), - _SOURCE, - ObjectParser.ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING - ); - PARSER.declareObject(Builder::setRuntimeMappings, (p, c) -> p.map(), SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD); - } - - private final String[] index; - private final QueryConfig queryConfig; - private final FetchSourceContext sourceFiltering; - private final Map runtimeMappings; - - private DataFrameAnalyticsSource( - String[] index, - @Nullable QueryConfig queryConfig, - @Nullable FetchSourceContext sourceFiltering, - @Nullable Map runtimeMappings - ) { - this.index = Objects.requireNonNull(index); - this.queryConfig = queryConfig; - this.sourceFiltering = sourceFiltering; - this.runtimeMappings = runtimeMappings; - } - - public String[] getIndex() { - return index; - } - - public QueryConfig getQueryConfig() { - return queryConfig; - } - - public FetchSourceContext getSourceFiltering() { - return sourceFiltering; - } - - public Map getRuntimeMappings() { - return runtimeMappings; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(INDEX.getPreferredName(), index); - if (queryConfig != null) { - builder.field(QUERY.getPreferredName(), queryConfig.getQuery()); - } - if (sourceFiltering != null) { - builder.field(_SOURCE.getPreferredName(), sourceFiltering); - } - if (runtimeMappings != null) { - builder.field(SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD.getPreferredName(), runtimeMappings); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - - DataFrameAnalyticsSource other = (DataFrameAnalyticsSource) o; - return Arrays.equals(index, other.index) - && Objects.equals(queryConfig, other.queryConfig) - && Objects.equals(sourceFiltering, other.sourceFiltering) - && Objects.equals(runtimeMappings, other.runtimeMappings); - } - - @Override - public int hashCode() { - return Objects.hash(Arrays.asList(index), queryConfig, sourceFiltering, runtimeMappings); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public static class Builder { - - private String[] index; - private QueryConfig queryConfig; - private FetchSourceContext sourceFiltering; - private Map runtimeMappings; - - private Builder() {} - - public Builder setIndex(String... index) { - this.index = index; - return this; - } - - public Builder setIndex(List index) { - this.index = index.toArray(new String[0]); - return this; - } - - public Builder setQueryConfig(QueryConfig queryConfig) { - this.queryConfig = queryConfig; - return this; - } - - public Builder setSourceFiltering(FetchSourceContext sourceFiltering) { - this.sourceFiltering = sourceFiltering; - return this; - } - - public Builder setRuntimeMappings(Map runtimeMappings) { - this.runtimeMappings = runtimeMappings; - return this; - } - - public DataFrameAnalyticsSource build() { - return new DataFrameAnalyticsSource(index, queryConfig, sourceFiltering, runtimeMappings); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsState.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsState.java deleted file mode 100644 index 157ebe614f761..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsState.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import java.util.Locale; - -public enum DataFrameAnalyticsState { - STARTED, - REINDEXING, - ANALYZING, - STOPPING, - STOPPED, - STARTING, - FAILED; - - public static DataFrameAnalyticsState fromString(String name) { - return valueOf(name.trim().toUpperCase(Locale.ROOT)); - } - - public String value() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsStats.java deleted file mode 100644 index 75eb216aed402..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsStats.java +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.client.ml.NodeAttributes; -import org.elasticsearch.client.ml.dataframe.stats.AnalysisStats; -import org.elasticsearch.client.ml.dataframe.stats.common.DataCounts; -import org.elasticsearch.client.ml.dataframe.stats.common.MemoryUsage; -import org.elasticsearch.common.inject.internal.ToStringBuilder; -import org.elasticsearch.common.xcontent.XContentParserUtils; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class DataFrameAnalyticsStats { - - public static DataFrameAnalyticsStats fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - static final ParseField ID = new ParseField("id"); - static final ParseField STATE = new ParseField("state"); - static final ParseField FAILURE_REASON = new ParseField("failure_reason"); - static final ParseField PROGRESS = new ParseField("progress"); - static final ParseField DATA_COUNTS = new ParseField("data_counts"); - static final ParseField MEMORY_USAGE = new ParseField("memory_usage"); - static final ParseField ANALYSIS_STATS = new ParseField("analysis_stats"); - static final ParseField NODE = new ParseField("node"); - static final ParseField ASSIGNMENT_EXPLANATION = new ParseField("assignment_explanation"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "data_frame_analytics_stats", - true, - args -> new DataFrameAnalyticsStats( - (String) args[0], - (DataFrameAnalyticsState) args[1], - (String) args[2], - (List) args[3], - (DataCounts) args[4], - (MemoryUsage) args[5], - (AnalysisStats) args[6], - (NodeAttributes) args[7], - (String) args[8] - ) - ); - - static { - PARSER.declareString(constructorArg(), ID); - PARSER.declareString(constructorArg(), DataFrameAnalyticsState::fromString, STATE); - PARSER.declareString(optionalConstructorArg(), FAILURE_REASON); - PARSER.declareObjectArray(optionalConstructorArg(), PhaseProgress.PARSER, PROGRESS); - PARSER.declareObject(optionalConstructorArg(), DataCounts.PARSER, DATA_COUNTS); - PARSER.declareObject(optionalConstructorArg(), MemoryUsage.PARSER, MEMORY_USAGE); - PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseAnalysisStats(p), ANALYSIS_STATS); - PARSER.declareObject(optionalConstructorArg(), NodeAttributes.PARSER, NODE); - PARSER.declareString(optionalConstructorArg(), ASSIGNMENT_EXPLANATION); - } - - private static AnalysisStats parseAnalysisStats(XContentParser parser) throws IOException { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); - AnalysisStats analysisStats = parser.namedObject(AnalysisStats.class, parser.currentName(), true); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); - return analysisStats; - } - - private final String id; - private final DataFrameAnalyticsState state; - private final String failureReason; - private final List progress; - private final DataCounts dataCounts; - private final MemoryUsage memoryUsage; - private final AnalysisStats analysisStats; - private final NodeAttributes node; - private final String assignmentExplanation; - - public DataFrameAnalyticsStats( - String id, - DataFrameAnalyticsState state, - @Nullable String failureReason, - @Nullable List progress, - @Nullable DataCounts dataCounts, - @Nullable MemoryUsage memoryUsage, - @Nullable AnalysisStats analysisStats, - @Nullable NodeAttributes node, - @Nullable String assignmentExplanation - ) { - this.id = id; - this.state = state; - this.failureReason = failureReason; - this.progress = progress; - this.dataCounts = dataCounts; - this.memoryUsage = memoryUsage; - this.analysisStats = analysisStats; - this.node = node; - this.assignmentExplanation = assignmentExplanation; - } - - public String getId() { - return id; - } - - public DataFrameAnalyticsState getState() { - return state; - } - - public String getFailureReason() { - return failureReason; - } - - public List getProgress() { - return progress; - } - - @Nullable - public DataCounts getDataCounts() { - return dataCounts; - } - - @Nullable - public MemoryUsage getMemoryUsage() { - return memoryUsage; - } - - @Nullable - public AnalysisStats getAnalysisStats() { - return analysisStats; - } - - public NodeAttributes getNode() { - return node; - } - - public String getAssignmentExplanation() { - return assignmentExplanation; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - DataFrameAnalyticsStats other = (DataFrameAnalyticsStats) o; - return Objects.equals(id, other.id) - && Objects.equals(state, other.state) - && Objects.equals(failureReason, other.failureReason) - && Objects.equals(progress, other.progress) - && Objects.equals(dataCounts, other.dataCounts) - && Objects.equals(memoryUsage, other.memoryUsage) - && Objects.equals(analysisStats, other.analysisStats) - && Objects.equals(node, other.node) - && Objects.equals(assignmentExplanation, other.assignmentExplanation); - } - - @Override - public int hashCode() { - return Objects.hash(id, state, failureReason, progress, dataCounts, memoryUsage, analysisStats, node, assignmentExplanation); - } - - @Override - public String toString() { - return new ToStringBuilder(getClass()).add("id", id) - .add("state", state) - .add("failureReason", failureReason) - .add("progress", progress) - .add("dataCounts", dataCounts) - .add("memoryUsage", memoryUsage) - .add("analysisStats", analysisStats) - .add("node", node) - .add("assignmentExplanation", assignmentExplanation) - .toString(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/MlDataFrameAnalysisNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/MlDataFrameAnalysisNamedXContentProvider.java deleted file mode 100644 index 562409b53df8d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/MlDataFrameAnalysisNamedXContentProvider.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.plugins.spi.NamedXContentProvider; -import org.elasticsearch.xcontent.NamedXContentRegistry; - -import java.util.Arrays; -import java.util.List; - -public class MlDataFrameAnalysisNamedXContentProvider implements NamedXContentProvider { - - @Override - public List getNamedXContentParsers() { - return Arrays.asList( - new NamedXContentRegistry.Entry(DataFrameAnalysis.class, OutlierDetection.NAME, (p, c) -> OutlierDetection.fromXContent(p)), - new NamedXContentRegistry.Entry(DataFrameAnalysis.class, Regression.NAME, (p, c) -> Regression.fromXContent(p)), - new NamedXContentRegistry.Entry(DataFrameAnalysis.class, Classification.NAME, (p, c) -> Classification.fromXContent(p)) - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/OutlierDetection.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/OutlierDetection.java deleted file mode 100644 index d514de3558db6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/OutlierDetection.java +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Locale; -import java.util.Objects; - -public class OutlierDetection implements DataFrameAnalysis { - - public static OutlierDetection fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - public static OutlierDetection createDefault() { - return builder().build(); - } - - public static Builder builder() { - return new Builder(); - } - - public static final ParseField NAME = new ParseField("outlier_detection"); - static final ParseField N_NEIGHBORS = new ParseField("n_neighbors"); - static final ParseField METHOD = new ParseField("method"); - public static final ParseField FEATURE_INFLUENCE_THRESHOLD = new ParseField("feature_influence_threshold"); - static final ParseField COMPUTE_FEATURE_INFLUENCE = new ParseField("compute_feature_influence"); - static final ParseField OUTLIER_FRACTION = new ParseField("outlier_fraction"); - static final ParseField STANDARDIZATION_ENABLED = new ParseField("standardization_enabled"); - - private static final ObjectParser PARSER = new ObjectParser<>(NAME.getPreferredName(), true, Builder::new); - - static { - PARSER.declareInt(Builder::setNNeighbors, N_NEIGHBORS); - PARSER.declareString(Builder::setMethod, Method::fromString, METHOD); - PARSER.declareDouble(Builder::setFeatureInfluenceThreshold, FEATURE_INFLUENCE_THRESHOLD); - PARSER.declareBoolean(Builder::setComputeFeatureInfluence, COMPUTE_FEATURE_INFLUENCE); - PARSER.declareDouble(Builder::setOutlierFraction, OUTLIER_FRACTION); - PARSER.declareBoolean(Builder::setStandardizationEnabled, STANDARDIZATION_ENABLED); - } - - /** - * The number of neighbors. Leave unspecified for dynamic detection. - */ - private final Integer nNeighbors; - - /** - * The method. Leave unspecified for a dynamic mixture of methods. - */ - private final Method method; - - /** - * The min outlier score required to calculate feature influence. Defaults to 0.1. - */ - private final Double featureInfluenceThreshold; - - /** - * Whether to compute feature influence or not. Defaults to true. - */ - private final Boolean computeFeatureInfluence; - - /** - * The proportion of data assumed to be outlying prior to outlier detection. Defaults to 0.05. - */ - private final Double outlierFraction; - - /** - * Whether to perform standardization. - */ - private final Boolean standardizationEnabled; - - private OutlierDetection( - Integer nNeighbors, - Method method, - Double featureInfluenceThreshold, - Boolean computeFeatureInfluence, - Double outlierFraction, - Boolean standardizationEnabled - ) { - this.nNeighbors = nNeighbors; - this.method = method; - this.featureInfluenceThreshold = featureInfluenceThreshold; - this.computeFeatureInfluence = computeFeatureInfluence; - this.outlierFraction = outlierFraction; - this.standardizationEnabled = standardizationEnabled; - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } - - public Integer getNNeighbors() { - return nNeighbors; - } - - public Method getMethod() { - return method; - } - - public Double getFeatureInfluenceThreshold() { - return featureInfluenceThreshold; - } - - public Boolean getComputeFeatureInfluence() { - return computeFeatureInfluence; - } - - public Double getOutlierFraction() { - return outlierFraction; - } - - public Boolean getStandardizationEnabled() { - return standardizationEnabled; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (nNeighbors != null) { - builder.field(N_NEIGHBORS.getPreferredName(), nNeighbors); - } - if (method != null) { - builder.field(METHOD.getPreferredName(), method); - } - if (featureInfluenceThreshold != null) { - builder.field(FEATURE_INFLUENCE_THRESHOLD.getPreferredName(), featureInfluenceThreshold); - } - if (computeFeatureInfluence != null) { - builder.field(COMPUTE_FEATURE_INFLUENCE.getPreferredName(), computeFeatureInfluence); - } - if (outlierFraction != null) { - builder.field(OUTLIER_FRACTION.getPreferredName(), outlierFraction); - } - if (standardizationEnabled != null) { - builder.field(STANDARDIZATION_ENABLED.getPreferredName(), standardizationEnabled); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - OutlierDetection other = (OutlierDetection) o; - return Objects.equals(nNeighbors, other.nNeighbors) - && Objects.equals(method, other.method) - && Objects.equals(featureInfluenceThreshold, other.featureInfluenceThreshold) - && Objects.equals(computeFeatureInfluence, other.computeFeatureInfluence) - && Objects.equals(outlierFraction, other.outlierFraction) - && Objects.equals(standardizationEnabled, other.standardizationEnabled); - } - - @Override - public int hashCode() { - return Objects.hash( - nNeighbors, - method, - featureInfluenceThreshold, - computeFeatureInfluence, - outlierFraction, - standardizationEnabled - ); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public enum Method { - LOF, - LDOF, - DISTANCE_KTH_NN, - DISTANCE_KNN; - - public static Method fromString(String value) { - return Method.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - public static class Builder { - - private Integer nNeighbors; - private Method method; - private Double featureInfluenceThreshold; - private Boolean computeFeatureInfluence; - private Double outlierFraction; - private Boolean standardizationEnabled; - - private Builder() {} - - public Builder setNNeighbors(Integer nNeighborsValue) { - this.nNeighbors = nNeighborsValue; - return this; - } - - public Builder setMethod(Method method) { - this.method = method; - return this; - } - - public Builder setFeatureInfluenceThreshold(Double featureInfluenceThreshold) { - this.featureInfluenceThreshold = featureInfluenceThreshold; - return this; - } - - public Builder setComputeFeatureInfluence(Boolean computeFeatureInfluence) { - this.computeFeatureInfluence = computeFeatureInfluence; - return this; - } - - public Builder setOutlierFraction(Double outlierFraction) { - this.outlierFraction = outlierFraction; - return this; - } - - public Builder setStandardizationEnabled(Boolean standardizationEnabled) { - this.standardizationEnabled = standardizationEnabled; - return this; - } - - public OutlierDetection build() { - return new OutlierDetection( - nNeighbors, - method, - featureInfluenceThreshold, - computeFeatureInfluence, - outlierFraction, - standardizationEnabled - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/PhaseProgress.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/PhaseProgress.java deleted file mode 100644 index f8d629586d2e7..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/PhaseProgress.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.common.inject.internal.ToStringBuilder; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A class that describes a phase and its progress as a percentage - */ -public class PhaseProgress implements ToXContentObject { - - static final ParseField PHASE = new ParseField("phase"); - static final ParseField PROGRESS_PERCENT = new ParseField("progress_percent"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "phase_progress", - true, - a -> new PhaseProgress((String) a[0], (int) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), PHASE); - PARSER.declareInt(ConstructingObjectParser.constructorArg(), PROGRESS_PERCENT); - } - - private final String phase; - private final int progressPercent; - - public PhaseProgress(String phase, int progressPercent) { - this.phase = Objects.requireNonNull(phase); - this.progressPercent = progressPercent; - } - - public String getPhase() { - return phase; - } - - public int getProgressPercent() { - return progressPercent; - } - - @Override - public int hashCode() { - return Objects.hash(phase, progressPercent); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PhaseProgress that = (PhaseProgress) o; - return Objects.equals(phase, that.phase) && progressPercent == that.progressPercent; - } - - @Override - public String toString() { - return new ToStringBuilder(getClass()).add(PHASE.getPreferredName(), phase) - .add(PROGRESS_PERCENT.getPreferredName(), progressPercent) - .toString(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(PhaseProgress.PHASE.getPreferredName(), phase); - builder.field(PhaseProgress.PROGRESS_PERCENT.getPreferredName(), progressPercent); - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/QueryConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/QueryConfig.java deleted file mode 100644 index 73dd0d82b2221..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/QueryConfig.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.index.query.AbstractQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static java.util.Objects.requireNonNull; - -/** - * Object for encapsulating the desired Query for a DataFrameAnalysis - */ -public class QueryConfig implements ToXContentObject { - - public static QueryConfig fromXContent(XContentParser parser) throws IOException { - QueryBuilder query = AbstractQueryBuilder.parseInnerQueryBuilder(parser); - return new QueryConfig(query); - } - - private final QueryBuilder query; - - public QueryConfig(QueryBuilder query) { - this.query = requireNonNull(query); - } - - public QueryConfig(QueryConfig queryConfig) { - this(requireNonNull(queryConfig).query); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - query.toXContent(builder, params); - return builder; - } - - public QueryBuilder getQuery() { - return query; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - QueryConfig other = (QueryConfig) o; - return Objects.equals(query, other.query); - } - - @Override - public int hashCode() { - return Objects.hash(query); - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Regression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Regression.java deleted file mode 100644 index 04f61d09305c2..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Regression.java +++ /dev/null @@ -1,549 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.client.ml.inference.NamedXContentObjectHelper; -import org.elasticsearch.client.ml.inference.preprocessing.PreProcessor; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Locale; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class Regression implements DataFrameAnalysis { - - public static Regression fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static Builder builder(String dependentVariable) { - return new Builder(dependentVariable); - } - - public static final ParseField NAME = new ParseField("regression"); - - static final ParseField DEPENDENT_VARIABLE = new ParseField("dependent_variable"); - static final ParseField LAMBDA = new ParseField("lambda"); - static final ParseField GAMMA = new ParseField("gamma"); - static final ParseField ETA = new ParseField("eta"); - static final ParseField MAX_TREES = new ParseField("max_trees"); - static final ParseField FEATURE_BAG_FRACTION = new ParseField("feature_bag_fraction"); - static final ParseField NUM_TOP_FEATURE_IMPORTANCE_VALUES = new ParseField("num_top_feature_importance_values"); - static final ParseField PREDICTION_FIELD_NAME = new ParseField("prediction_field_name"); - static final ParseField TRAINING_PERCENT = new ParseField("training_percent"); - static final ParseField RANDOMIZE_SEED = new ParseField("randomize_seed"); - static final ParseField LOSS_FUNCTION = new ParseField("loss_function"); - static final ParseField LOSS_FUNCTION_PARAMETER = new ParseField("loss_function_parameter"); - static final ParseField FEATURE_PROCESSORS = new ParseField("feature_processors"); - static final ParseField ALPHA = new ParseField("alpha"); - static final ParseField ETA_GROWTH_RATE_PER_TREE = new ParseField("eta_growth_rate_per_tree"); - static final ParseField SOFT_TREE_DEPTH_LIMIT = new ParseField("soft_tree_depth_limit"); - static final ParseField SOFT_TREE_DEPTH_TOLERANCE = new ParseField("soft_tree_depth_tolerance"); - static final ParseField DOWNSAMPLE_FACTOR = new ParseField("downsample_factor"); - static final ParseField MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER = new ParseField("max_optimization_rounds_per_hyperparameter"); - static final ParseField EARLY_STOPPING_ENABLED = new ParseField("early_stopping_enabled"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - a -> new Regression( - (String) a[0], - (Double) a[1], - (Double) a[2], - (Double) a[3], - (Integer) a[4], - (Double) a[5], - (Integer) a[6], - (String) a[7], - (Double) a[8], - (Long) a[9], - (LossFunction) a[10], - (Double) a[11], - (List) a[12], - (Double) a[13], - (Double) a[14], - (Double) a[15], - (Double) a[16], - (Double) a[17], - (Integer) a[18], - (Boolean) a[19] - ) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), DEPENDENT_VARIABLE); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), LAMBDA); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), GAMMA); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ETA); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_TREES); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), FEATURE_BAG_FRACTION); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), NUM_TOP_FEATURE_IMPORTANCE_VALUES); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), PREDICTION_FIELD_NAME); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), TRAINING_PERCENT); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), RANDOMIZE_SEED); - PARSER.declareString(optionalConstructorArg(), LossFunction::fromString, LOSS_FUNCTION); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), LOSS_FUNCTION_PARAMETER); - PARSER.declareNamedObjects( - ConstructingObjectParser.optionalConstructorArg(), - (p, c, n) -> p.namedObject(PreProcessor.class, n, c), - (regression) -> {}, - FEATURE_PROCESSORS - ); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ALPHA); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ETA_GROWTH_RATE_PER_TREE); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), SOFT_TREE_DEPTH_LIMIT); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), SOFT_TREE_DEPTH_TOLERANCE); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), DOWNSAMPLE_FACTOR); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), EARLY_STOPPING_ENABLED); - } - - private final String dependentVariable; - private final Double lambda; - private final Double gamma; - private final Double eta; - private final Integer maxTrees; - private final Double featureBagFraction; - private final Integer numTopFeatureImportanceValues; - private final String predictionFieldName; - private final Double trainingPercent; - private final Long randomizeSeed; - private final LossFunction lossFunction; - private final Double lossFunctionParameter; - private final List featureProcessors; - private final Double alpha; - private final Double etaGrowthRatePerTree; - private final Double softTreeDepthLimit; - private final Double softTreeDepthTolerance; - private final Double downsampleFactor; - private final Integer maxOptimizationRoundsPerHyperparameter; - private final Boolean earlyStoppingEnabled; - - private Regression( - String dependentVariable, - @Nullable Double lambda, - @Nullable Double gamma, - @Nullable Double eta, - @Nullable Integer maxTrees, - @Nullable Double featureBagFraction, - @Nullable Integer numTopFeatureImportanceValues, - @Nullable String predictionFieldName, - @Nullable Double trainingPercent, - @Nullable Long randomizeSeed, - @Nullable LossFunction lossFunction, - @Nullable Double lossFunctionParameter, - @Nullable List featureProcessors, - @Nullable Double alpha, - @Nullable Double etaGrowthRatePerTree, - @Nullable Double softTreeDepthLimit, - @Nullable Double softTreeDepthTolerance, - @Nullable Double downsampleFactor, - @Nullable Integer maxOptimizationRoundsPerHyperparameter, - @Nullable Boolean earlyStoppingEnabled - ) { - this.dependentVariable = Objects.requireNonNull(dependentVariable); - this.lambda = lambda; - this.gamma = gamma; - this.eta = eta; - this.maxTrees = maxTrees; - this.featureBagFraction = featureBagFraction; - this.numTopFeatureImportanceValues = numTopFeatureImportanceValues; - this.predictionFieldName = predictionFieldName; - this.trainingPercent = trainingPercent; - this.randomizeSeed = randomizeSeed; - this.lossFunction = lossFunction; - this.lossFunctionParameter = lossFunctionParameter; - this.featureProcessors = featureProcessors; - this.alpha = alpha; - this.etaGrowthRatePerTree = etaGrowthRatePerTree; - this.softTreeDepthLimit = softTreeDepthLimit; - this.softTreeDepthTolerance = softTreeDepthTolerance; - this.downsampleFactor = downsampleFactor; - this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter; - this.earlyStoppingEnabled = earlyStoppingEnabled; - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } - - public String getDependentVariable() { - return dependentVariable; - } - - public Double getLambda() { - return lambda; - } - - public Double getGamma() { - return gamma; - } - - public Double getEta() { - return eta; - } - - public Integer getMaxTrees() { - return maxTrees; - } - - public Double getFeatureBagFraction() { - return featureBagFraction; - } - - public Integer getNumTopFeatureImportanceValues() { - return numTopFeatureImportanceValues; - } - - public String getPredictionFieldName() { - return predictionFieldName; - } - - public Double getTrainingPercent() { - return trainingPercent; - } - - public Long getRandomizeSeed() { - return randomizeSeed; - } - - public LossFunction getLossFunction() { - return lossFunction; - } - - public Double getLossFunctionParameter() { - return lossFunctionParameter; - } - - public List getFeatureProcessors() { - return featureProcessors; - } - - public Double getAlpha() { - return alpha; - } - - public Double getEtaGrowthRatePerTree() { - return etaGrowthRatePerTree; - } - - public Double getSoftTreeDepthLimit() { - return softTreeDepthLimit; - } - - public Double getSoftTreeDepthTolerance() { - return softTreeDepthTolerance; - } - - public Double getDownsampleFactor() { - return downsampleFactor; - } - - public Integer getMaxOptimizationRoundsPerHyperparameter() { - return maxOptimizationRoundsPerHyperparameter; - } - - public Boolean getEarlyStoppingEnabled() { - return earlyStoppingEnabled; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DEPENDENT_VARIABLE.getPreferredName(), dependentVariable); - if (lambda != null) { - builder.field(LAMBDA.getPreferredName(), lambda); - } - if (gamma != null) { - builder.field(GAMMA.getPreferredName(), gamma); - } - if (eta != null) { - builder.field(ETA.getPreferredName(), eta); - } - if (maxTrees != null) { - builder.field(MAX_TREES.getPreferredName(), maxTrees); - } - if (featureBagFraction != null) { - builder.field(FEATURE_BAG_FRACTION.getPreferredName(), featureBagFraction); - } - if (numTopFeatureImportanceValues != null) { - builder.field(NUM_TOP_FEATURE_IMPORTANCE_VALUES.getPreferredName(), numTopFeatureImportanceValues); - } - if (predictionFieldName != null) { - builder.field(PREDICTION_FIELD_NAME.getPreferredName(), predictionFieldName); - } - if (trainingPercent != null) { - builder.field(TRAINING_PERCENT.getPreferredName(), trainingPercent); - } - if (randomizeSeed != null) { - builder.field(RANDOMIZE_SEED.getPreferredName(), randomizeSeed); - } - if (lossFunction != null) { - builder.field(LOSS_FUNCTION.getPreferredName(), lossFunction); - } - if (lossFunctionParameter != null) { - builder.field(LOSS_FUNCTION_PARAMETER.getPreferredName(), lossFunctionParameter); - } - if (featureProcessors != null) { - NamedXContentObjectHelper.writeNamedObjects(builder, params, true, FEATURE_PROCESSORS.getPreferredName(), featureProcessors); - } - if (alpha != null) { - builder.field(ALPHA.getPreferredName(), alpha); - } - if (etaGrowthRatePerTree != null) { - builder.field(ETA_GROWTH_RATE_PER_TREE.getPreferredName(), etaGrowthRatePerTree); - } - if (softTreeDepthLimit != null) { - builder.field(SOFT_TREE_DEPTH_LIMIT.getPreferredName(), softTreeDepthLimit); - } - if (softTreeDepthTolerance != null) { - builder.field(SOFT_TREE_DEPTH_TOLERANCE.getPreferredName(), softTreeDepthTolerance); - } - if (downsampleFactor != null) { - builder.field(DOWNSAMPLE_FACTOR.getPreferredName(), downsampleFactor); - } - if (maxOptimizationRoundsPerHyperparameter != null) { - builder.field(MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER.getPreferredName(), maxOptimizationRoundsPerHyperparameter); - } - if (earlyStoppingEnabled != null) { - builder.field(EARLY_STOPPING_ENABLED.getPreferredName(), earlyStoppingEnabled); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash( - dependentVariable, - lambda, - gamma, - eta, - maxTrees, - featureBagFraction, - numTopFeatureImportanceValues, - predictionFieldName, - trainingPercent, - randomizeSeed, - lossFunction, - lossFunctionParameter, - featureProcessors, - alpha, - etaGrowthRatePerTree, - softTreeDepthLimit, - softTreeDepthTolerance, - downsampleFactor, - maxOptimizationRoundsPerHyperparameter, - earlyStoppingEnabled - ); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Regression that = (Regression) o; - return Objects.equals(dependentVariable, that.dependentVariable) - && Objects.equals(lambda, that.lambda) - && Objects.equals(gamma, that.gamma) - && Objects.equals(eta, that.eta) - && Objects.equals(maxTrees, that.maxTrees) - && Objects.equals(featureBagFraction, that.featureBagFraction) - && Objects.equals(numTopFeatureImportanceValues, that.numTopFeatureImportanceValues) - && Objects.equals(predictionFieldName, that.predictionFieldName) - && Objects.equals(trainingPercent, that.trainingPercent) - && Objects.equals(randomizeSeed, that.randomizeSeed) - && Objects.equals(lossFunction, that.lossFunction) - && Objects.equals(lossFunctionParameter, that.lossFunctionParameter) - && Objects.equals(featureProcessors, that.featureProcessors) - && Objects.equals(alpha, that.alpha) - && Objects.equals(etaGrowthRatePerTree, that.etaGrowthRatePerTree) - && Objects.equals(softTreeDepthLimit, that.softTreeDepthLimit) - && Objects.equals(softTreeDepthTolerance, that.softTreeDepthTolerance) - && Objects.equals(downsampleFactor, that.downsampleFactor) - && Objects.equals(maxOptimizationRoundsPerHyperparameter, that.maxOptimizationRoundsPerHyperparameter) - && Objects.equals(earlyStoppingEnabled, that.earlyStoppingEnabled); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public static class Builder { - private String dependentVariable; - private Double lambda; - private Double gamma; - private Double eta; - private Integer maxTrees; - private Double featureBagFraction; - private Integer numTopFeatureImportanceValues; - private String predictionFieldName; - private Double trainingPercent; - private Long randomizeSeed; - private LossFunction lossFunction; - private Double lossFunctionParameter; - private List featureProcessors; - private Double alpha; - private Double etaGrowthRatePerTree; - private Double softTreeDepthLimit; - private Double softTreeDepthTolerance; - private Double downsampleFactor; - private Integer maxOptimizationRoundsPerHyperparameter; - private Boolean earlyStoppingEnabled; - - private Builder(String dependentVariable) { - this.dependentVariable = Objects.requireNonNull(dependentVariable); - } - - public Builder setLambda(Double lambda) { - this.lambda = lambda; - return this; - } - - public Builder setGamma(Double gamma) { - this.gamma = gamma; - return this; - } - - public Builder setEta(Double eta) { - this.eta = eta; - return this; - } - - public Builder setMaxTrees(Integer maxTrees) { - this.maxTrees = maxTrees; - return this; - } - - public Builder setFeatureBagFraction(Double featureBagFraction) { - this.featureBagFraction = featureBagFraction; - return this; - } - - public Builder setNumTopFeatureImportanceValues(Integer numTopFeatureImportanceValues) { - this.numTopFeatureImportanceValues = numTopFeatureImportanceValues; - return this; - } - - public Builder setPredictionFieldName(String predictionFieldName) { - this.predictionFieldName = predictionFieldName; - return this; - } - - public Builder setTrainingPercent(Double trainingPercent) { - this.trainingPercent = trainingPercent; - return this; - } - - public Builder setRandomizeSeed(Long randomizeSeed) { - this.randomizeSeed = randomizeSeed; - return this; - } - - public Builder setLossFunction(LossFunction lossFunction) { - this.lossFunction = lossFunction; - return this; - } - - public Builder setLossFunctionParameter(Double lossFunctionParameter) { - this.lossFunctionParameter = lossFunctionParameter; - return this; - } - - public Builder setFeatureProcessors(List featureProcessors) { - this.featureProcessors = featureProcessors; - return this; - } - - public Builder setAlpha(Double alpha) { - this.alpha = alpha; - return this; - } - - public Builder setEtaGrowthRatePerTree(Double etaGrowthRatePerTree) { - this.etaGrowthRatePerTree = etaGrowthRatePerTree; - return this; - } - - public Builder setSoftTreeDepthLimit(Double softTreeDepthLimit) { - this.softTreeDepthLimit = softTreeDepthLimit; - return this; - } - - public Builder setSoftTreeDepthTolerance(Double softTreeDepthTolerance) { - this.softTreeDepthTolerance = softTreeDepthTolerance; - return this; - } - - public Builder setDownsampleFactor(Double downsampleFactor) { - this.downsampleFactor = downsampleFactor; - return this; - } - - public Builder setMaxOptimizationRoundsPerHyperparameter(Integer maxOptimizationRoundsPerHyperparameter) { - this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter; - return this; - } - - public Builder setEarlyStoppingEnabled(Boolean earlyStoppingEnabled) { - this.earlyStoppingEnabled = earlyStoppingEnabled; - return this; - } - - public Regression build() { - return new Regression( - dependentVariable, - lambda, - gamma, - eta, - maxTrees, - featureBagFraction, - numTopFeatureImportanceValues, - predictionFieldName, - trainingPercent, - randomizeSeed, - lossFunction, - lossFunctionParameter, - featureProcessors, - alpha, - etaGrowthRatePerTree, - softTreeDepthLimit, - softTreeDepthTolerance, - downsampleFactor, - maxOptimizationRoundsPerHyperparameter, - earlyStoppingEnabled - ); - } - } - - public enum LossFunction { - MSE, - MSLE, - HUBER; - - private static LossFunction fromString(String value) { - return LossFunction.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/Evaluation.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/Evaluation.java deleted file mode 100644 index e5f3189a5920f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/Evaluation.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation; - -import org.elasticsearch.xcontent.ToXContentObject; - -/** - * Defines an evaluation - */ -public interface Evaluation extends ToXContentObject { - - /** - * Returns the evaluation name - */ - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/EvaluationMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/EvaluationMetric.java deleted file mode 100644 index daa1051a92b9b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/EvaluationMetric.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation; - -import org.elasticsearch.xcontent.ToXContentObject; - -/** - * Defines an evaluation metric - */ -public interface EvaluationMetric extends ToXContentObject { - - /** - * Returns the name of the metric - */ - String getName(); - - /** - * The result of an evaluation metric - */ - interface Result extends ToXContentObject { - - /** - * Returns the name of the metric - */ - String getMetricName(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/MlEvaluationNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/MlEvaluationNamedXContentProvider.java deleted file mode 100644 index da1d66785f386..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/MlEvaluationNamedXContentProvider.java +++ /dev/null @@ -1,216 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation; - -import org.elasticsearch.client.ml.dataframe.evaluation.classification.AccuracyMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.classification.AucRocMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.classification.Classification; -import org.elasticsearch.client.ml.dataframe.evaluation.classification.MulticlassConfusionMatrixMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.common.AucRocResult; -import org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.ConfusionMatrixMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.OutlierDetection; -import org.elasticsearch.client.ml.dataframe.evaluation.regression.HuberMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.regression.MeanSquaredErrorMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.regression.MeanSquaredLogarithmicErrorMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.regression.RSquaredMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.regression.Regression; -import org.elasticsearch.plugins.spi.NamedXContentProvider; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; - -import java.util.Arrays; -import java.util.List; - -public class MlEvaluationNamedXContentProvider implements NamedXContentProvider { - - /** - * Constructs the name under which a metric (or metric result) is registered. - * The name is prefixed with evaluation name so that registered names are unique. - * - * @param evaluationName name of the evaluation - * @param metricName name of the metric - * @return name appropriate for registering a metric (or metric result) in {@link NamedXContentRegistry} - */ - public static String registeredMetricName(String evaluationName, String metricName) { - return evaluationName + "." + metricName; - } - - @Override - public List getNamedXContentParsers() { - return Arrays.asList( - // Evaluations - new NamedXContentRegistry.Entry(Evaluation.class, new ParseField(OutlierDetection.NAME), OutlierDetection::fromXContent), - new NamedXContentRegistry.Entry(Evaluation.class, new ParseField(Classification.NAME), Classification::fromXContent), - new NamedXContentRegistry.Entry(Evaluation.class, new ParseField(Regression.NAME), Regression::fromXContent), - // Evaluation metrics - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField( - registeredMetricName( - OutlierDetection.NAME, - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.AucRocMetric.NAME - ) - ), - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.AucRocMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField( - registeredMetricName( - OutlierDetection.NAME, - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.PrecisionMetric.NAME - ) - ), - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.PrecisionMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField( - registeredMetricName( - OutlierDetection.NAME, - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.RecallMetric.NAME - ) - ), - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.RecallMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(OutlierDetection.NAME, ConfusionMatrixMetric.NAME)), - ConfusionMatrixMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Classification.NAME, AucRocMetric.NAME)), - AucRocMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Classification.NAME, AccuracyMetric.NAME)), - AccuracyMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Classification.NAME, PrecisionMetric.NAME)), - PrecisionMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Classification.NAME, RecallMetric.NAME)), - RecallMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Classification.NAME, MulticlassConfusionMatrixMetric.NAME)), - MulticlassConfusionMatrixMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Regression.NAME, MeanSquaredErrorMetric.NAME)), - MeanSquaredErrorMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Regression.NAME, MeanSquaredLogarithmicErrorMetric.NAME)), - MeanSquaredLogarithmicErrorMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Regression.NAME, HuberMetric.NAME)), - HuberMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Regression.NAME, RSquaredMetric.NAME)), - RSquaredMetric::fromXContent - ), - // Evaluation metrics results - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField( - registeredMetricName( - OutlierDetection.NAME, - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.AucRocMetric.NAME - ) - ), - AucRocResult::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField( - registeredMetricName( - OutlierDetection.NAME, - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.PrecisionMetric.NAME - ) - ), - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.PrecisionMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField( - registeredMetricName( - OutlierDetection.NAME, - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.RecallMetric.NAME - ) - ), - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.RecallMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(OutlierDetection.NAME, ConfusionMatrixMetric.NAME)), - ConfusionMatrixMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Classification.NAME, AucRocMetric.NAME)), - AucRocResult::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Classification.NAME, AccuracyMetric.NAME)), - AccuracyMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Classification.NAME, PrecisionMetric.NAME)), - PrecisionMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Classification.NAME, RecallMetric.NAME)), - RecallMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Classification.NAME, MulticlassConfusionMatrixMetric.NAME)), - MulticlassConfusionMatrixMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Regression.NAME, MeanSquaredErrorMetric.NAME)), - MeanSquaredErrorMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Regression.NAME, MeanSquaredLogarithmicErrorMetric.NAME)), - MeanSquaredLogarithmicErrorMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Regression.NAME, HuberMetric.NAME)), - HuberMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Regression.NAME, RSquaredMetric.NAME)), - RSquaredMetric.Result::fromXContent - ) - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AccuracyMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AccuracyMetric.java deleted file mode 100644 index f95b8a0b77344..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AccuracyMetric.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * {@link AccuracyMetric} is a metric that answers the following two questions: - * - * 1. What is the fraction of documents for which predicted class equals the actual class? - * - * equation: overall_accuracy = 1/n * Σ(y == y') - * where: n = total number of documents - * y = document's actual class - * y' = document's predicted class - * - * 2. For any given class X, what is the fraction of documents for which either - * a) both actual and predicted class are equal to X (true positives) - * or - * b) both actual and predicted class are not equal to X (true negatives) - * - * equation: accuracy(X) = 1/n * (TP(X) + TN(X)) - * where: X = class being examined - * n = total number of documents - * TP(X) = number of true positives wrt X - * TN(X) = number of true negatives wrt X - */ -public class AccuracyMetric implements EvaluationMetric { - - public static final String NAME = "accuracy"; - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, AccuracyMetric::new); - - public static AccuracyMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public AccuracyMetric() {} - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hashCode(NAME); - } - - public static class Result implements EvaluationMetric.Result { - - private static final ParseField CLASSES = new ParseField("classes"); - private static final ParseField OVERALL_ACCURACY = new ParseField("overall_accuracy"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "accuracy_result", - true, - a -> new Result((List) a[0], (double) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), PerClassSingleValue.PARSER, CLASSES); - PARSER.declareDouble(constructorArg(), OVERALL_ACCURACY); - } - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** List of per-class results. */ - private final List classes; - /** Fraction of documents for which predicted class equals the actual class. */ - private final double overallAccuracy; - - public Result(List classes, double overallAccuracy) { - this.classes = Collections.unmodifiableList(Objects.requireNonNull(classes)); - this.overallAccuracy = overallAccuracy; - } - - @Override - public String getMetricName() { - return NAME; - } - - public List getClasses() { - return classes; - } - - public double getOverallAccuracy() { - return overallAccuracy; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CLASSES.getPreferredName(), classes); - builder.field(OVERALL_ACCURACY.getPreferredName(), overallAccuracy); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(this.classes, that.classes) && this.overallAccuracy == that.overallAccuracy; - } - - @Override - public int hashCode() { - return Objects.hash(classes, overallAccuracy); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AucRocMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AucRocMetric.java deleted file mode 100644 index f8a85d7d665b7..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AucRocMetric.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.common.AucRocResult; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Area under the curve (AUC) of the receiver operating characteristic (ROC). - * The ROC curve is a plot of the TPR (true positive rate) against - * the FPR (false positive rate) over a varying threshold. - */ -public class AucRocMetric implements EvaluationMetric { - - public static final String NAME = AucRocResult.NAME; - - public static final ParseField CLASS_NAME = new ParseField("class_name"); - public static final ParseField INCLUDE_CURVE = new ParseField("include_curve"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - args -> new AucRocMetric((String) args[0], (Boolean) args[1]) - ); - - static { - PARSER.declareString(constructorArg(), CLASS_NAME); - PARSER.declareBoolean(optionalConstructorArg(), INCLUDE_CURVE); - } - - public static AucRocMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static AucRocMetric forClass(String className) { - return new AucRocMetric(className, false); - } - - public static AucRocMetric forClassWithCurve(String className) { - return new AucRocMetric(className, true); - } - - private final String className; - private final Boolean includeCurve; - - public AucRocMetric(String className, Boolean includeCurve) { - this.className = Objects.requireNonNull(className); - this.includeCurve = includeCurve; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(CLASS_NAME.getPreferredName(), className); - if (includeCurve != null) { - builder.field(INCLUDE_CURVE.getPreferredName(), includeCurve); - } - builder.endObject(); - return builder; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AucRocMetric that = (AucRocMetric) o; - return Objects.equals(className, that.className) && Objects.equals(includeCurve, that.includeCurve); - } - - @Override - public int hashCode() { - return Objects.hash(className, includeCurve); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/Classification.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/Classification.java deleted file mode 100644 index 7f394ff30a046..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/Classification.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation; -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider.registeredMetricName; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Evaluation of classification results. - */ -public class Classification implements Evaluation { - - public static final String NAME = "classification"; - - private static final ParseField ACTUAL_FIELD = new ParseField("actual_field"); - private static final ParseField PREDICTED_FIELD = new ParseField("predicted_field"); - private static final ParseField TOP_CLASSES_FIELD = new ParseField("top_classes_field"); - - private static final ParseField METRICS = new ParseField("metrics"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new Classification((String) a[0], (String) a[1], (String) a[2], (List) a[3]) - ); - - static { - PARSER.declareString(constructorArg(), ACTUAL_FIELD); - PARSER.declareString(optionalConstructorArg(), PREDICTED_FIELD); - PARSER.declareString(optionalConstructorArg(), TOP_CLASSES_FIELD); - PARSER.declareNamedObjects( - optionalConstructorArg(), - (p, c, n) -> p.namedObject(EvaluationMetric.class, registeredMetricName(NAME, n), c), - METRICS - ); - } - - public static Classification fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** - * The field containing the actual value - */ - private final String actualField; - - /** - * The field containing the predicted value - */ - private final String predictedField; - - /** - * The field containing the array of top classes - */ - private final String topClassesField; - - /** - * The list of metrics to calculate - */ - private final List metrics; - - public Classification(String actualField, String predictedField, String topClassesField) { - this(actualField, predictedField, topClassesField, (List) null); - } - - public Classification(String actualField, String predictedField, String topClassesField, EvaluationMetric... metrics) { - this(actualField, predictedField, topClassesField, Arrays.asList(metrics)); - } - - public Classification( - String actualField, - @Nullable String predictedField, - @Nullable String topClassesField, - @Nullable List metrics - ) { - this.actualField = Objects.requireNonNull(actualField); - this.predictedField = predictedField; - this.topClassesField = topClassesField; - if (metrics != null) { - metrics.sort(Comparator.comparing(EvaluationMetric::getName)); - } - this.metrics = metrics; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ACTUAL_FIELD.getPreferredName(), actualField); - if (predictedField != null) { - builder.field(PREDICTED_FIELD.getPreferredName(), predictedField); - } - if (topClassesField != null) { - builder.field(TOP_CLASSES_FIELD.getPreferredName(), topClassesField); - } - if (metrics != null) { - builder.startObject(METRICS.getPreferredName()); - for (EvaluationMetric metric : metrics) { - builder.field(metric.getName(), metric); - } - builder.endObject(); - } - - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Classification that = (Classification) o; - return Objects.equals(that.actualField, this.actualField) - && Objects.equals(that.predictedField, this.predictedField) - && Objects.equals(that.topClassesField, this.topClassesField) - && Objects.equals(that.metrics, this.metrics); - } - - @Override - public int hashCode() { - return Objects.hash(actualField, predictedField, topClassesField, metrics); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/MulticlassConfusionMatrixMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/MulticlassConfusionMatrixMetric.java deleted file mode 100644 index ae55246c11dc5..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/MulticlassConfusionMatrixMetric.java +++ /dev/null @@ -1,289 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Calculates the multiclass confusion matrix. - */ -public class MulticlassConfusionMatrixMetric implements EvaluationMetric { - - public static final String NAME = "multiclass_confusion_matrix"; - - public static final ParseField SIZE = new ParseField("size"); - - private static final ConstructingObjectParser PARSER = createParser(); - - private static ConstructingObjectParser createParser() { - ConstructingObjectParser parser = new ConstructingObjectParser<>( - NAME, - true, - args -> new MulticlassConfusionMatrixMetric((Integer) args[0]) - ); - parser.declareInt(optionalConstructorArg(), SIZE); - return parser; - } - - public static MulticlassConfusionMatrixMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final Integer size; - - public MulticlassConfusionMatrixMetric() { - this(null); - } - - public MulticlassConfusionMatrixMetric(@Nullable Integer size) { - this.size = size; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (size != null) { - builder.field(SIZE.getPreferredName(), size); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - MulticlassConfusionMatrixMetric that = (MulticlassConfusionMatrixMetric) o; - return Objects.equals(this.size, that.size); - } - - @Override - public int hashCode() { - return Objects.hash(size); - } - - public static class Result implements EvaluationMetric.Result { - - private static final ParseField CONFUSION_MATRIX = new ParseField("confusion_matrix"); - private static final ParseField OTHER_ACTUAL_CLASS_COUNT = new ParseField("other_actual_class_count"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "multiclass_confusion_matrix_result", - true, - a -> new Result((List) a[0], (Long) a[1]) - ); - - static { - PARSER.declareObjectArray(optionalConstructorArg(), ActualClass.PARSER, CONFUSION_MATRIX); - PARSER.declareLong(optionalConstructorArg(), OTHER_ACTUAL_CLASS_COUNT); - } - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List confusionMatrix; - private final Long otherActualClassCount; - - public Result(@Nullable List confusionMatrix, @Nullable Long otherActualClassCount) { - this.confusionMatrix = confusionMatrix != null ? Collections.unmodifiableList(Objects.requireNonNull(confusionMatrix)) : null; - this.otherActualClassCount = otherActualClassCount; - } - - @Override - public String getMetricName() { - return NAME; - } - - public List getConfusionMatrix() { - return confusionMatrix; - } - - public Long getOtherActualClassCount() { - return otherActualClassCount; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (confusionMatrix != null) { - builder.field(CONFUSION_MATRIX.getPreferredName(), confusionMatrix); - } - if (otherActualClassCount != null) { - builder.field(OTHER_ACTUAL_CLASS_COUNT.getPreferredName(), otherActualClassCount); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(this.confusionMatrix, that.confusionMatrix) - && Objects.equals(this.otherActualClassCount, that.otherActualClassCount); - } - - @Override - public int hashCode() { - return Objects.hash(confusionMatrix, otherActualClassCount); - } - } - - public static class ActualClass implements ToXContentObject { - - private static final ParseField ACTUAL_CLASS = new ParseField("actual_class"); - private static final ParseField ACTUAL_CLASS_DOC_COUNT = new ParseField("actual_class_doc_count"); - private static final ParseField PREDICTED_CLASSES = new ParseField("predicted_classes"); - private static final ParseField OTHER_PREDICTED_CLASS_DOC_COUNT = new ParseField("other_predicted_class_doc_count"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "multiclass_confusion_matrix_actual_class", - true, - a -> new ActualClass((String) a[0], (Long) a[1], (List) a[2], (Long) a[3]) - ); - - static { - PARSER.declareString(optionalConstructorArg(), ACTUAL_CLASS); - PARSER.declareLong(optionalConstructorArg(), ACTUAL_CLASS_DOC_COUNT); - PARSER.declareObjectArray(optionalConstructorArg(), PredictedClass.PARSER, PREDICTED_CLASSES); - PARSER.declareLong(optionalConstructorArg(), OTHER_PREDICTED_CLASS_DOC_COUNT); - } - - private final String actualClass; - private final Long actualClassDocCount; - private final List predictedClasses; - private final Long otherPredictedClassDocCount; - - public ActualClass( - @Nullable String actualClass, - @Nullable Long actualClassDocCount, - @Nullable List predictedClasses, - @Nullable Long otherPredictedClassDocCount - ) { - this.actualClass = actualClass; - this.actualClassDocCount = actualClassDocCount; - this.predictedClasses = predictedClasses != null ? Collections.unmodifiableList(predictedClasses) : null; - this.otherPredictedClassDocCount = otherPredictedClassDocCount; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (actualClass != null) { - builder.field(ACTUAL_CLASS.getPreferredName(), actualClass); - } - if (actualClassDocCount != null) { - builder.field(ACTUAL_CLASS_DOC_COUNT.getPreferredName(), actualClassDocCount); - } - if (predictedClasses != null) { - builder.field(PREDICTED_CLASSES.getPreferredName(), predictedClasses); - } - if (otherPredictedClassDocCount != null) { - builder.field(OTHER_PREDICTED_CLASS_DOC_COUNT.getPreferredName(), otherPredictedClassDocCount); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ActualClass that = (ActualClass) o; - return Objects.equals(this.actualClass, that.actualClass) - && Objects.equals(this.actualClassDocCount, that.actualClassDocCount) - && Objects.equals(this.predictedClasses, that.predictedClasses) - && Objects.equals(this.otherPredictedClassDocCount, that.otherPredictedClassDocCount); - } - - @Override - public int hashCode() { - return Objects.hash(actualClass, actualClassDocCount, predictedClasses, otherPredictedClassDocCount); - } - - @Override - public String toString() { - return Strings.toString(this); - } - } - - public static class PredictedClass implements ToXContentObject { - - private static final ParseField PREDICTED_CLASS = new ParseField("predicted_class"); - private static final ParseField COUNT = new ParseField("count"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "multiclass_confusion_matrix_predicted_class", - true, - a -> new PredictedClass((String) a[0], (Long) a[1]) - ); - - static { - PARSER.declareString(optionalConstructorArg(), PREDICTED_CLASS); - PARSER.declareLong(optionalConstructorArg(), COUNT); - } - - private final String predictedClass; - private final Long count; - - public PredictedClass(@Nullable String predictedClass, @Nullable Long count) { - this.predictedClass = predictedClass; - this.count = count; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (predictedClass != null) { - builder.field(PREDICTED_CLASS.getPreferredName(), predictedClass); - } - if (count != null) { - builder.field(COUNT.getPreferredName(), count); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PredictedClass that = (PredictedClass) o; - return Objects.equals(this.predictedClass, that.predictedClass) && Objects.equals(this.count, that.count); - } - - @Override - public int hashCode() { - return Objects.hash(predictedClass, count); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PerClassSingleValue.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PerClassSingleValue.java deleted file mode 100644 index 703468b5ec282..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PerClassSingleValue.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class PerClassSingleValue implements ToXContentObject { - private static final ParseField CLASS_NAME = new ParseField("class_name"); - private static final ParseField VALUE = new ParseField("value"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "per_class_result", - true, - a -> new PerClassSingleValue((String) a[0], (double) a[1]) - ); - - static { - PARSER.declareString(constructorArg(), CLASS_NAME); - PARSER.declareDouble(constructorArg(), VALUE); - } - - private final String className; - private final double value; - - public PerClassSingleValue(String className, double value) { - this.className = Objects.requireNonNull(className); - this.value = value; - } - - public String getClassName() { - return className; - } - - public double getValue() { - return value; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CLASS_NAME.getPreferredName(), className); - builder.field(VALUE.getPreferredName(), value); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PerClassSingleValue that = (PerClassSingleValue) o; - return Objects.equals(this.className, that.className) && this.value == that.value; - } - - @Override - public int hashCode() { - return Objects.hash(className, value); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PrecisionMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PrecisionMetric.java deleted file mode 100644 index 168eeed66d67d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PrecisionMetric.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * {@link PrecisionMetric} is a metric that answers the question: - * "What fraction of documents classified as X actually belongs to X?" - * for any given class X - * - * equation: precision(X) = TP(X) / (TP(X) + FP(X)) - * where: TP(X) - number of true positives wrt X - * FP(X) - number of false positives wrt X - */ -public class PrecisionMetric implements EvaluationMetric { - - public static final String NAME = "precision"; - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, PrecisionMetric::new); - - public static PrecisionMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public PrecisionMetric() {} - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hashCode(NAME); - } - - public static class Result implements EvaluationMetric.Result { - - private static final ParseField CLASSES = new ParseField("classes"); - private static final ParseField AVG_PRECISION = new ParseField("avg_precision"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "precision_result", - true, - a -> new Result((List) a[0], (double) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), PerClassSingleValue.PARSER, CLASSES); - PARSER.declareDouble(constructorArg(), AVG_PRECISION); - } - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** List of per-class results. */ - private final List classes; - /** Average of per-class precisions. */ - private final double avgPrecision; - - public Result(List classes, double avgPrecision) { - this.classes = Collections.unmodifiableList(Objects.requireNonNull(classes)); - this.avgPrecision = avgPrecision; - } - - @Override - public String getMetricName() { - return NAME; - } - - public List getClasses() { - return classes; - } - - public double getAvgPrecision() { - return avgPrecision; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CLASSES.getPreferredName(), classes); - builder.field(AVG_PRECISION.getPreferredName(), avgPrecision); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(this.classes, that.classes) && this.avgPrecision == that.avgPrecision; - } - - @Override - public int hashCode() { - return Objects.hash(classes, avgPrecision); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/RecallMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/RecallMetric.java deleted file mode 100644 index 689d441944e7a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/RecallMetric.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * {@link RecallMetric} is a metric that answers the question: - * "What fraction of documents belonging to X have been predicted as X by the classifier?" - * for any given class X - * - * equation: recall(X) = TP(X) / (TP(X) + FN(X)) - * where: TP(X) - number of true positives wrt X - * FN(X) - number of false negatives wrt X - */ -public class RecallMetric implements EvaluationMetric { - - public static final String NAME = "recall"; - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, RecallMetric::new); - - public static RecallMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public RecallMetric() {} - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hashCode(NAME); - } - - public static class Result implements EvaluationMetric.Result { - - private static final ParseField CLASSES = new ParseField("classes"); - private static final ParseField AVG_RECALL = new ParseField("avg_recall"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "recall_result", - true, - a -> new Result((List) a[0], (double) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), PerClassSingleValue.PARSER, CLASSES); - PARSER.declareDouble(constructorArg(), AVG_RECALL); - } - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** List of per-class results. */ - private final List classes; - /** Average of per-class recalls. */ - private final double avgRecall; - - public Result(List classes, double avgRecall) { - this.classes = Collections.unmodifiableList(Objects.requireNonNull(classes)); - this.avgRecall = avgRecall; - } - - @Override - public String getMetricName() { - return NAME; - } - - public List getClasses() { - return classes; - } - - public double getAvgRecall() { - return avgRecall; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CLASSES.getPreferredName(), classes); - builder.field(AVG_RECALL.getPreferredName(), avgRecall); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(this.classes, that.classes) && this.avgRecall == that.avgRecall; - } - - @Override - public int hashCode() { - return Objects.hash(classes, avgRecall); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocPoint.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocPoint.java deleted file mode 100644 index 08e5122181269..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocPoint.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.common; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class AucRocPoint implements ToXContentObject { - - public static AucRocPoint fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ParseField TPR = new ParseField("tpr"); - private static final ParseField FPR = new ParseField("fpr"); - private static final ParseField THRESHOLD = new ParseField("threshold"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "auc_roc_point", - true, - args -> new AucRocPoint((double) args[0], (double) args[1], (double) args[2]) - ); - - static { - PARSER.declareDouble(constructorArg(), TPR); - PARSER.declareDouble(constructorArg(), FPR); - PARSER.declareDouble(constructorArg(), THRESHOLD); - } - - private final double tpr; - private final double fpr; - private final double threshold; - - public AucRocPoint(double tpr, double fpr, double threshold) { - this.tpr = tpr; - this.fpr = fpr; - this.threshold = threshold; - } - - public double getTruePositiveRate() { - return tpr; - } - - public double getFalsePositiveRate() { - return fpr; - } - - public double getThreshold() { - return threshold; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.startObject() - .field(TPR.getPreferredName(), tpr) - .field(FPR.getPreferredName(), fpr) - .field(THRESHOLD.getPreferredName(), threshold) - .endObject(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AucRocPoint that = (AucRocPoint) o; - return tpr == that.tpr && fpr == that.fpr && threshold == that.threshold; - } - - @Override - public int hashCode() { - return Objects.hash(tpr, fpr, threshold); - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocResult.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocResult.java deleted file mode 100644 index d661115b67291..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocResult.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.common; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class AucRocResult implements EvaluationMetric.Result { - - public static final String NAME = "auc_roc"; - - public static AucRocResult fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ParseField VALUE = new ParseField("value"); - private static final ParseField CURVE = new ParseField("curve"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - args -> new AucRocResult((double) args[0], (List) args[1]) - ); - - static { - PARSER.declareDouble(constructorArg(), VALUE); - PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> AucRocPoint.fromXContent(p), CURVE); - } - - private final double value; - private final List curve; - - public AucRocResult(double value, @Nullable List curve) { - this.value = value; - this.curve = curve; - } - - @Override - public String getMetricName() { - return NAME; - } - - public double getValue() { - return value; - } - - public List getCurve() { - return curve == null ? null : Collections.unmodifiableList(curve); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(VALUE.getPreferredName(), value); - if (curve != null && curve.isEmpty() == false) { - builder.field(CURVE.getPreferredName(), curve); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AucRocResult that = (AucRocResult) o; - return value == that.value && Objects.equals(curve, that.curve); - } - - @Override - public int hashCode() { - return Objects.hash(value, curve); - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AbstractConfusionMatrixMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AbstractConfusionMatrixMetric.java deleted file mode 100644 index e39af0d143c4b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AbstractConfusionMatrixMetric.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -abstract class AbstractConfusionMatrixMetric implements EvaluationMetric { - - protected static final ParseField AT = new ParseField("at"); - - protected final double[] thresholds; - - protected AbstractConfusionMatrixMetric(List at) { - this.thresholds = Objects.requireNonNull(at).stream().mapToDouble(Double::doubleValue).toArray(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return builder.startObject().field(AT.getPreferredName(), thresholds).endObject(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AucRocMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AucRocMetric.java deleted file mode 100644 index 7c8ea07ab6c77..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AucRocMetric.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.common.AucRocResult; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Area under the curve (AUC) of the receiver operating characteristic (ROC). - * The ROC curve is a plot of the TPR (true positive rate) against - * the FPR (false positive rate) over a varying threshold. - */ -public class AucRocMetric implements EvaluationMetric { - - public static final String NAME = AucRocResult.NAME; - - public static final ParseField INCLUDE_CURVE = new ParseField("include_curve"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - args -> new AucRocMetric((Boolean) args[0]) - ); - - static { - PARSER.declareBoolean(optionalConstructorArg(), INCLUDE_CURVE); - } - - public static AucRocMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static AucRocMetric withCurve() { - return new AucRocMetric(true); - } - - private final Boolean includeCurve; - - public AucRocMetric(Boolean includeCurve) { - this.includeCurve = includeCurve; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (includeCurve != null) { - builder.field(INCLUDE_CURVE.getPreferredName(), includeCurve); - } - builder.endObject(); - return builder; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AucRocMetric that = (AucRocMetric) o; - return Objects.equals(includeCurve, that.includeCurve); - } - - @Override - public int hashCode() { - return Objects.hash(includeCurve); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/ConfusionMatrixMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/ConfusionMatrixMetric.java deleted file mode 100644 index 0d4617baeb56f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/ConfusionMatrixMetric.java +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class ConfusionMatrixMetric extends AbstractConfusionMatrixMetric { - - public static final String NAME = "confusion_matrix"; - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - args -> new ConfusionMatrixMetric((List) args[0]) - ); - - static { - PARSER.declareDoubleArray(constructorArg(), AT); - } - - public static ConfusionMatrixMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static ConfusionMatrixMetric at(Double... at) { - return new ConfusionMatrixMetric(Arrays.asList(at)); - } - - public ConfusionMatrixMetric(List at) { - super(at); - } - - @Override - public String getName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ConfusionMatrixMetric that = (ConfusionMatrixMetric) o; - return Arrays.equals(thresholds, that.thresholds); - } - - @Override - public int hashCode() { - return Arrays.hashCode(thresholds); - } - - public static class Result implements EvaluationMetric.Result { - - public static Result fromXContent(XContentParser parser) throws IOException { - return new Result(parser.map(LinkedHashMap::new, ConfusionMatrix::fromXContent)); - } - - private final Map results; - - public Result(Map results) { - this.results = Objects.requireNonNull(results); - } - - @Override - public String getMetricName() { - return NAME; - } - - public ConfusionMatrix getScoreByThreshold(String threshold) { - return results.get(threshold); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return builder.map(results); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(results, that.results); - } - - @Override - public int hashCode() { - return Objects.hash(results); - } - - @Override - public String toString() { - return Strings.toString(this); - } - } - - public static final class ConfusionMatrix implements ToXContentObject { - - public static ConfusionMatrix fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ParseField TP = new ParseField("tp"); - private static final ParseField FP = new ParseField("fp"); - private static final ParseField TN = new ParseField("tn"); - private static final ParseField FN = new ParseField("fn"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "confusion_matrix", - true, - args -> new ConfusionMatrix((long) args[0], (long) args[1], (long) args[2], (long) args[3]) - ); - - static { - PARSER.declareLong(constructorArg(), TP); - PARSER.declareLong(constructorArg(), FP); - PARSER.declareLong(constructorArg(), TN); - PARSER.declareLong(constructorArg(), FN); - } - - private final long tp; - private final long fp; - private final long tn; - private final long fn; - - public ConfusionMatrix(long tp, long fp, long tn, long fn) { - this.tp = tp; - this.fp = fp; - this.tn = tn; - this.fn = fn; - } - - public long getTruePositives() { - return tp; - } - - public long getFalsePositives() { - return fp; - } - - public long getTrueNegatives() { - return tn; - } - - public long getFalseNegatives() { - return fn; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.startObject() - .field(TP.getPreferredName(), tp) - .field(FP.getPreferredName(), fp) - .field(TN.getPreferredName(), tn) - .field(FN.getPreferredName(), fn) - .endObject(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ConfusionMatrix that = (ConfusionMatrix) o; - return tp == that.tp && fp == that.fp && tn == that.tn && fn == that.fn; - } - - @Override - public int hashCode() { - return Objects.hash(tp, fp, tn, fn); - } - - @Override - public String toString() { - return Strings.toString(this); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java deleted file mode 100644 index 7372e85d0bf05..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection; - -import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation; -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider.registeredMetricName; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Evaluation of outlier detection results. - */ -public class OutlierDetection implements Evaluation { - - public static final String NAME = "outlier_detection"; - - private static final ParseField ACTUAL_FIELD = new ParseField("actual_field"); - private static final ParseField PREDICTED_PROBABILITY_FIELD = new ParseField("predicted_probability_field"); - private static final ParseField METRICS = new ParseField("metrics"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - args -> new OutlierDetection((String) args[0], (String) args[1], (List) args[2]) - ); - - static { - PARSER.declareString(constructorArg(), ACTUAL_FIELD); - PARSER.declareString(constructorArg(), PREDICTED_PROBABILITY_FIELD); - PARSER.declareNamedObjects( - optionalConstructorArg(), - (p, c, n) -> p.namedObject(EvaluationMetric.class, registeredMetricName(NAME, n), null), - METRICS - ); - } - - public static OutlierDetection fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** - * The field where the actual class is marked up. - * The value of this field is assumed to either be 1 or 0, or true or false. - */ - private final String actualField; - - /** - * The field of the predicted probability in [0.0, 1.0]. - */ - private final String predictedProbabilityField; - - /** - * The list of metrics to calculate - */ - private final List metrics; - - public OutlierDetection(String actualField, String predictedField) { - this(actualField, predictedField, (List) null); - } - - public OutlierDetection(String actualField, String predictedProbabilityField, EvaluationMetric... metric) { - this(actualField, predictedProbabilityField, Arrays.asList(metric)); - } - - public OutlierDetection(String actualField, String predictedProbabilityField, @Nullable List metrics) { - this.actualField = Objects.requireNonNull(actualField); - this.predictedProbabilityField = Objects.requireNonNull(predictedProbabilityField); - if (metrics != null) { - metrics.sort(Comparator.comparing(EvaluationMetric::getName)); - } - this.metrics = metrics; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(ACTUAL_FIELD.getPreferredName(), actualField); - builder.field(PREDICTED_PROBABILITY_FIELD.getPreferredName(), predictedProbabilityField); - - if (metrics != null) { - builder.startObject(METRICS.getPreferredName()); - for (EvaluationMetric metric : metrics) { - builder.field(metric.getName(), metric); - } - builder.endObject(); - } - - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - OutlierDetection that = (OutlierDetection) o; - return Objects.equals(actualField, that.actualField) - && Objects.equals(predictedProbabilityField, that.predictedProbabilityField) - && Objects.equals(metrics, that.metrics); - } - - @Override - public int hashCode() { - return Objects.hash(actualField, predictedProbabilityField, metrics); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/PrecisionMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/PrecisionMetric.java deleted file mode 100644 index 4f992615d79af..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/PrecisionMetric.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class PrecisionMetric extends AbstractConfusionMatrixMetric { - - public static final String NAME = "precision"; - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - args -> new PrecisionMetric((List) args[0]) - ); - - static { - PARSER.declareDoubleArray(constructorArg(), AT); - } - - public static PrecisionMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static PrecisionMetric at(Double... at) { - return new PrecisionMetric(Arrays.asList(at)); - } - - public PrecisionMetric(List at) { - super(at); - } - - @Override - public String getName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PrecisionMetric that = (PrecisionMetric) o; - return Arrays.equals(thresholds, that.thresholds); - } - - @Override - public int hashCode() { - return Arrays.hashCode(thresholds); - } - - public static class Result implements EvaluationMetric.Result { - - public static Result fromXContent(XContentParser parser) throws IOException { - return new Result(parser.map(LinkedHashMap::new, p -> p.doubleValue())); - } - - private final Map results; - - public Result(Map results) { - this.results = Objects.requireNonNull(results); - } - - @Override - public String getMetricName() { - return NAME; - } - - public Double getScoreByThreshold(String threshold) { - return results.get(threshold); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return builder.map(results); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(results, that.results); - } - - @Override - public int hashCode() { - return Objects.hash(results); - } - - @Override - public String toString() { - return Strings.toString(this); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/RecallMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/RecallMetric.java deleted file mode 100644 index 531c62f825722..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/RecallMetric.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class RecallMetric extends AbstractConfusionMatrixMetric { - - public static final String NAME = "recall"; - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - args -> new RecallMetric((List) args[0]) - ); - - static { - PARSER.declareDoubleArray(constructorArg(), AT); - } - - public static RecallMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static RecallMetric at(Double... at) { - return new RecallMetric(Arrays.asList(at)); - } - - public RecallMetric(List at) { - super(at); - } - - @Override - public String getName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - RecallMetric that = (RecallMetric) o; - return Arrays.equals(thresholds, that.thresholds); - } - - @Override - public int hashCode() { - return Arrays.hashCode(thresholds); - } - - public static class Result implements EvaluationMetric.Result { - - public static Result fromXContent(XContentParser parser) throws IOException { - return new Result(parser.map(LinkedHashMap::new, p -> p.doubleValue())); - } - - private final Map results; - - public Result(Map results) { - this.results = Objects.requireNonNull(results); - } - - @Override - public String getMetricName() { - return NAME; - } - - public Double getScoreByThreshold(String threshold) { - return results.get(threshold); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return builder.map(results); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(results, that.results); - } - - @Override - public int hashCode() { - return Objects.hash(results); - } - - @Override - public String toString() { - return Strings.toString(this); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/HuberMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/HuberMetric.java deleted file mode 100644 index 038c659324da4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/HuberMetric.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.regression; - -import org.elasticsearch.client.ml.dataframe.Regression.LossFunction; -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Calculates the pseudo Huber loss function. - * - * equation: huber = 1/n * Σ(δ^2 * sqrt(1 + a^2 / δ^2) - 1) - * where: a = y - y´ - * δ - parameter that controls the steepness - */ -public class HuberMetric implements EvaluationMetric { - - public static final String NAME = LossFunction.HUBER.toString(); - - public static final ParseField DELTA = new ParseField("delta"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - args -> new HuberMetric((Double) args[0]) - ); - - static { - PARSER.declareDouble(optionalConstructorArg(), DELTA); - } - - public static HuberMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final Double delta; - - public HuberMetric(@Nullable Double delta) { - this.delta = delta; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (delta != null) { - builder.field(DELTA.getPreferredName(), delta); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - HuberMetric that = (HuberMetric) o; - return Objects.equals(this.delta, that.delta); - } - - @Override - public int hashCode() { - return Objects.hash(delta); - } - - public static class Result implements EvaluationMetric.Result { - - public static final ParseField VALUE = new ParseField("value"); - private final double value; - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME + "_result", - true, - args -> new Result((double) args[0]) - ); - - static { - PARSER.declareDouble(constructorArg(), VALUE); - } - - public Result(double value) { - this.value = value; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(VALUE.getPreferredName(), value); - builder.endObject(); - return builder; - } - - public double getValue() { - return value; - } - - @Override - public String getMetricName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return this.value == that.value; - } - - @Override - public int hashCode() { - return Double.hashCode(value); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredErrorMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredErrorMetric.java deleted file mode 100644 index 4c593dc75db4e..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredErrorMetric.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.regression; - -import org.elasticsearch.client.ml.dataframe.Regression.LossFunction; -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Calculates the mean squared error between two known numerical fields. - * - * equation: mse = 1/n * Σ(y - y´)^2 - */ -public class MeanSquaredErrorMetric implements EvaluationMetric { - - public static final String NAME = LossFunction.MSE.toString(); - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, MeanSquaredErrorMetric::new); - - public static MeanSquaredErrorMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public MeanSquaredErrorMetric() {} - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - // create static hash code from name as there are currently no unique fields per class instance - return Objects.hashCode(NAME); - } - - public static class Result implements EvaluationMetric.Result { - - public static final ParseField VALUE = new ParseField("value"); - private final double value; - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME + "_result", - true, - args -> new Result((double) args[0]) - ); - - static { - PARSER.declareDouble(constructorArg(), VALUE); - } - - public Result(double value) { - this.value = value; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(VALUE.getPreferredName(), value); - builder.endObject(); - return builder; - } - - public double getValue() { - return value; - } - - @Override - public String getMetricName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return this.value == that.value; - } - - @Override - public int hashCode() { - return Double.hashCode(value); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicErrorMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicErrorMetric.java deleted file mode 100644 index 676ee74cb3f83..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicErrorMetric.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.regression; - -import org.elasticsearch.client.ml.dataframe.Regression.LossFunction; -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Calculates the mean squared error between two known numerical fields. - * - * equation: msle = 1/n * Σ(log(y + offset) - log(y´ + offset))^2 - * where offset is used to make sure the argument to log function is always positive - */ -public class MeanSquaredLogarithmicErrorMetric implements EvaluationMetric { - - public static final String NAME = LossFunction.MSLE.toString(); - - public static final ParseField OFFSET = new ParseField("offset"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - args -> new MeanSquaredLogarithmicErrorMetric((Double) args[0]) - ); - - static { - PARSER.declareDouble(optionalConstructorArg(), OFFSET); - } - - public static MeanSquaredLogarithmicErrorMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final Double offset; - - public MeanSquaredLogarithmicErrorMetric(@Nullable Double offset) { - this.offset = offset; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (offset != null) { - builder.field(OFFSET.getPreferredName(), offset); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - MeanSquaredLogarithmicErrorMetric that = (MeanSquaredLogarithmicErrorMetric) o; - return Objects.equals(this.offset, that.offset); - } - - @Override - public int hashCode() { - return Objects.hash(offset); - } - - public static class Result implements EvaluationMetric.Result { - - public static final ParseField VALUE = new ParseField("value"); - private final double value; - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME + "_result", - true, - args -> new Result((double) args[0]) - ); - - static { - PARSER.declareDouble(constructorArg(), VALUE); - } - - public Result(double value) { - this.value = value; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(VALUE.getPreferredName(), value); - builder.endObject(); - return builder; - } - - public double getValue() { - return value; - } - - @Override - public String getMetricName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return this.value == that.value; - } - - @Override - public int hashCode() { - return Double.hashCode(value); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/RSquaredMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/RSquaredMetric.java deleted file mode 100644 index 496a3d55c0e51..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/RSquaredMetric.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.regression; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Calculates R-Squared between two known numerical fields. - * - * equation: mse = 1 - SSres/SStot - * such that, - * SSres = Σ(y - y´)^2 - * SStot = Σ(y - y_mean)^2 - */ -public class RSquaredMetric implements EvaluationMetric { - - public static final String NAME = "r_squared"; - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, RSquaredMetric::new); - - public static RSquaredMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public RSquaredMetric() {} - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - // create static hash code from name as there are currently no unique fields per class instance - return Objects.hashCode(NAME); - } - - public static class Result implements EvaluationMetric.Result { - - public static final ParseField VALUE = new ParseField("value"); - private final double value; - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME + "_result", - true, - args -> new Result((double) args[0]) - ); - - static { - PARSER.declareDouble(constructorArg(), VALUE); - } - - public Result(double value) { - this.value = value; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(VALUE.getPreferredName(), value); - builder.endObject(); - return builder; - } - - public double getValue() { - return value; - } - - @Override - public String getMetricName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return this.value == that.value; - } - - @Override - public int hashCode() { - return Double.hashCode(value); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/Regression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/Regression.java deleted file mode 100644 index 622013957281e..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/Regression.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.regression; - -import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation; -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider.registeredMetricName; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Evaluation of regression results. - */ -public class Regression implements Evaluation { - - public static final String NAME = "regression"; - - private static final ParseField ACTUAL_FIELD = new ParseField("actual_field"); - private static final ParseField PREDICTED_FIELD = new ParseField("predicted_field"); - private static final ParseField METRICS = new ParseField("metrics"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new Regression((String) a[0], (String) a[1], (List) a[2]) - ); - - static { - PARSER.declareString(constructorArg(), ACTUAL_FIELD); - PARSER.declareString(constructorArg(), PREDICTED_FIELD); - PARSER.declareNamedObjects( - optionalConstructorArg(), - (p, c, n) -> p.namedObject(EvaluationMetric.class, registeredMetricName(NAME, n), c), - METRICS - ); - } - - public static Regression fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** - * The field containing the actual value - * The value of this field is assumed to be numeric - */ - private final String actualField; - - /** - * The field containing the predicted value - * The value of this field is assumed to be numeric - */ - private final String predictedField; - - /** - * The list of metrics to calculate - */ - private final List metrics; - - public Regression(String actualField, String predictedField) { - this(actualField, predictedField, (List) null); - } - - public Regression(String actualField, String predictedField, EvaluationMetric... metrics) { - this(actualField, predictedField, Arrays.asList(metrics)); - } - - public Regression(String actualField, String predictedField, @Nullable List metrics) { - this.actualField = Objects.requireNonNull(actualField); - this.predictedField = Objects.requireNonNull(predictedField); - if (metrics != null) { - metrics.sort(Comparator.comparing(EvaluationMetric::getName)); - } - this.metrics = metrics; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(ACTUAL_FIELD.getPreferredName(), actualField); - builder.field(PREDICTED_FIELD.getPreferredName(), predictedField); - - if (metrics != null) { - builder.startObject(METRICS.getPreferredName()); - for (EvaluationMetric metric : metrics) { - builder.field(metric.getName(), metric); - } - builder.endObject(); - } - - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Regression that = (Regression) o; - return Objects.equals(that.actualField, this.actualField) - && Objects.equals(that.predictedField, this.predictedField) - && Objects.equals(that.metrics, this.metrics); - } - - @Override - public int hashCode() { - return Objects.hash(actualField, predictedField, metrics); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/FieldSelection.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/FieldSelection.java deleted file mode 100644 index e6a0362e3c0ca..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/FieldSelection.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.explain; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Locale; -import java.util.Objects; -import java.util.Set; - -public class FieldSelection implements ToXContentObject { - - private static final ParseField NAME = new ParseField("name"); - private static final ParseField MAPPING_TYPES = new ParseField("mapping_types"); - private static final ParseField IS_INCLUDED = new ParseField("is_included"); - private static final ParseField IS_REQUIRED = new ParseField("is_required"); - private static final ParseField FEATURE_TYPE = new ParseField("feature_type"); - private static final ParseField REASON = new ParseField("reason"); - - public enum FeatureType { - CATEGORICAL, - NUMERICAL; - - public static FeatureType fromString(String value) { - return FeatureType.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "field_selection", - true, - a -> new FieldSelection( - (String) a[0], - new HashSet<>((List) a[1]), - (boolean) a[2], - (boolean) a[3], - (FeatureType) a[4], - (String) a[5] - ) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME); - PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), MAPPING_TYPES); - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), IS_INCLUDED); - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), IS_REQUIRED); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FeatureType::fromString, FEATURE_TYPE); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), REASON); - } - - private final String name; - private final Set mappingTypes; - private final boolean isIncluded; - private final boolean isRequired; - private final FeatureType featureType; - private final String reason; - - public static FieldSelection included(String name, Set mappingTypes, boolean isRequired, FeatureType featureType) { - return new FieldSelection(name, mappingTypes, true, isRequired, featureType, null); - } - - public static FieldSelection excluded(String name, Set mappingTypes, String reason) { - return new FieldSelection(name, mappingTypes, false, false, null, reason); - } - - FieldSelection( - String name, - Set mappingTypes, - boolean isIncluded, - boolean isRequired, - @Nullable FeatureType featureType, - @Nullable String reason - ) { - this.name = Objects.requireNonNull(name); - this.mappingTypes = Collections.unmodifiableSet(mappingTypes); - this.isIncluded = isIncluded; - this.isRequired = isRequired; - this.featureType = featureType; - this.reason = reason; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(NAME.getPreferredName(), name); - builder.field(MAPPING_TYPES.getPreferredName(), mappingTypes); - builder.field(IS_INCLUDED.getPreferredName(), isIncluded); - builder.field(IS_REQUIRED.getPreferredName(), isRequired); - if (featureType != null) { - builder.field(FEATURE_TYPE.getPreferredName(), featureType); - } - if (reason != null) { - builder.field(REASON.getPreferredName(), reason); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - FieldSelection that = (FieldSelection) o; - return Objects.equals(name, that.name) - && Objects.equals(mappingTypes, that.mappingTypes) - && isIncluded == that.isIncluded - && isRequired == that.isRequired - && Objects.equals(featureType, that.featureType) - && Objects.equals(reason, that.reason); - } - - @Override - public int hashCode() { - return Objects.hash(name, mappingTypes, isIncluded, isRequired, featureType, reason); - } - - public String getName() { - return name; - } - - public Set getMappingTypes() { - return mappingTypes; - } - - public boolean isIncluded() { - return isIncluded; - } - - public boolean isRequired() { - return isRequired; - } - - @Nullable - public FeatureType getFeatureType() { - return featureType; - } - - @Nullable - public String getReason() { - return reason; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/MemoryEstimation.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/MemoryEstimation.java deleted file mode 100644 index 54525134853aa..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/MemoryEstimation.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.explain; - -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class MemoryEstimation implements ToXContentObject { - - public static final ParseField EXPECTED_MEMORY_WITHOUT_DISK = new ParseField("expected_memory_without_disk"); - public static final ParseField EXPECTED_MEMORY_WITH_DISK = new ParseField("expected_memory_with_disk"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "memory_estimation", - true, - a -> new MemoryEstimation((ByteSizeValue) a[0], (ByteSizeValue) a[1]) - ); - - static { - PARSER.declareField( - optionalConstructorArg(), - (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), EXPECTED_MEMORY_WITHOUT_DISK.getPreferredName()), - EXPECTED_MEMORY_WITHOUT_DISK, - ObjectParser.ValueType.VALUE - ); - PARSER.declareField( - optionalConstructorArg(), - (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), EXPECTED_MEMORY_WITH_DISK.getPreferredName()), - EXPECTED_MEMORY_WITH_DISK, - ObjectParser.ValueType.VALUE - ); - } - - private final ByteSizeValue expectedMemoryWithoutDisk; - private final ByteSizeValue expectedMemoryWithDisk; - - public MemoryEstimation(@Nullable ByteSizeValue expectedMemoryWithoutDisk, @Nullable ByteSizeValue expectedMemoryWithDisk) { - this.expectedMemoryWithoutDisk = expectedMemoryWithoutDisk; - this.expectedMemoryWithDisk = expectedMemoryWithDisk; - } - - public ByteSizeValue getExpectedMemoryWithoutDisk() { - return expectedMemoryWithoutDisk; - } - - public ByteSizeValue getExpectedMemoryWithDisk() { - return expectedMemoryWithDisk; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (expectedMemoryWithoutDisk != null) { - builder.field(EXPECTED_MEMORY_WITHOUT_DISK.getPreferredName(), expectedMemoryWithoutDisk.getStringRep()); - } - if (expectedMemoryWithDisk != null) { - builder.field(EXPECTED_MEMORY_WITH_DISK.getPreferredName(), expectedMemoryWithDisk.getStringRep()); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - if (other == null || getClass() != other.getClass()) { - return false; - } - - MemoryEstimation that = (MemoryEstimation) other; - return Objects.equals(expectedMemoryWithoutDisk, that.expectedMemoryWithoutDisk) - && Objects.equals(expectedMemoryWithDisk, that.expectedMemoryWithDisk); - } - - @Override - public int hashCode() { - return Objects.hash(expectedMemoryWithoutDisk, expectedMemoryWithDisk); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStats.java deleted file mode 100644 index dcd21d6f6b3e1..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStats.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats; - -import org.elasticsearch.xcontent.ToXContentObject; - -/** - * Statistics for the data frame analysis - */ -public interface AnalysisStats extends ToXContentObject { - - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStatsNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStatsNamedXContentProvider.java deleted file mode 100644 index 4da0981fa87d3..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStatsNamedXContentProvider.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats; - -import org.elasticsearch.client.ml.dataframe.stats.classification.ClassificationStats; -import org.elasticsearch.client.ml.dataframe.stats.outlierdetection.OutlierDetectionStats; -import org.elasticsearch.client.ml.dataframe.stats.regression.RegressionStats; -import org.elasticsearch.plugins.spi.NamedXContentProvider; -import org.elasticsearch.xcontent.NamedXContentRegistry; - -import java.util.Arrays; -import java.util.List; - -public class AnalysisStatsNamedXContentProvider implements NamedXContentProvider { - - @Override - public List getNamedXContentParsers() { - return Arrays.asList( - new NamedXContentRegistry.Entry( - AnalysisStats.class, - ClassificationStats.NAME, - (p, c) -> ClassificationStats.PARSER.apply(p, null) - ), - new NamedXContentRegistry.Entry( - AnalysisStats.class, - OutlierDetectionStats.NAME, - (p, c) -> OutlierDetectionStats.PARSER.apply(p, null) - ), - new NamedXContentRegistry.Entry(AnalysisStats.class, RegressionStats.NAME, (p, c) -> RegressionStats.PARSER.apply(p, null)) - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ClassificationStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ClassificationStats.java deleted file mode 100644 index e8367ae13c95e..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ClassificationStats.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.classification; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.dataframe.stats.AnalysisStats; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.time.Instant; -import java.util.Objects; - -public class ClassificationStats implements AnalysisStats { - - public static final ParseField NAME = new ParseField("classification_stats"); - - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - public static final ParseField ITERATION = new ParseField("iteration"); - public static final ParseField HYPERPARAMETERS = new ParseField("hyperparameters"); - public static final ParseField TIMING_STATS = new ParseField("timing_stats"); - public static final ParseField VALIDATION_LOSS = new ParseField("validation_loss"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - a -> new ClassificationStats((Instant) a[0], (Integer) a[1], (Hyperparameters) a[2], (TimingStats) a[3], (ValidationLoss) a[4]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> TimeUtil.parseTimeFieldToInstant(p, TIMESTAMP.getPreferredName()), - TIMESTAMP, - ObjectParser.ValueType.VALUE - ); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), ITERATION); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), Hyperparameters.PARSER, HYPERPARAMETERS); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), TimingStats.PARSER, TIMING_STATS); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), ValidationLoss.PARSER, VALIDATION_LOSS); - } - - private final Instant timestamp; - private final Integer iteration; - private final Hyperparameters hyperparameters; - private final TimingStats timingStats; - private final ValidationLoss validationLoss; - - public ClassificationStats( - Instant timestamp, - Integer iteration, - Hyperparameters hyperparameters, - TimingStats timingStats, - ValidationLoss validationLoss - ) { - this.timestamp = Instant.ofEpochMilli(Objects.requireNonNull(timestamp).toEpochMilli()); - this.iteration = iteration; - this.hyperparameters = Objects.requireNonNull(hyperparameters); - this.timingStats = Objects.requireNonNull(timingStats); - this.validationLoss = Objects.requireNonNull(validationLoss); - } - - public Instant getTimestamp() { - return timestamp; - } - - public Integer getIteration() { - return iteration; - } - - public Hyperparameters getHyperparameters() { - return hyperparameters; - } - - public TimingStats getTimingStats() { - return timingStats; - } - - public ValidationLoss getValidationLoss() { - return validationLoss; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.toEpochMilli()); - if (iteration != null) { - builder.field(ITERATION.getPreferredName(), iteration); - } - builder.field(HYPERPARAMETERS.getPreferredName(), hyperparameters); - builder.field(TIMING_STATS.getPreferredName(), timingStats); - builder.field(VALIDATION_LOSS.getPreferredName(), validationLoss); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ClassificationStats that = (ClassificationStats) o; - return Objects.equals(timestamp, that.timestamp) - && Objects.equals(iteration, that.iteration) - && Objects.equals(hyperparameters, that.hyperparameters) - && Objects.equals(timingStats, that.timingStats) - && Objects.equals(validationLoss, that.validationLoss); - } - - @Override - public int hashCode() { - return Objects.hash(timestamp, iteration, hyperparameters, timingStats, validationLoss); - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/Hyperparameters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/Hyperparameters.java deleted file mode 100644 index c136928aeb76f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/Hyperparameters.java +++ /dev/null @@ -1,285 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.classification; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class Hyperparameters implements ToXContentObject { - - public static final ParseField CLASS_ASSIGNMENT_OBJECTIVE = new ParseField("class_assignment_objective"); - public static final ParseField ALPHA = new ParseField("alpha"); - public static final ParseField DOWNSAMPLE_FACTOR = new ParseField("downsample_factor"); - public static final ParseField ETA = new ParseField("eta"); - public static final ParseField ETA_GROWTH_RATE_PER_TREE = new ParseField("eta_growth_rate_per_tree"); - public static final ParseField FEATURE_BAG_FRACTION = new ParseField("feature_bag_fraction"); - public static final ParseField GAMMA = new ParseField("gamma"); - public static final ParseField LAMBDA = new ParseField("lambda"); - public static final ParseField MAX_ATTEMPTS_TO_ADD_TREE = new ParseField("max_attempts_to_add_tree"); - public static final ParseField MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER = new ParseField( - "max_optimization_rounds_per_hyperparameter" - ); - public static final ParseField MAX_TREES = new ParseField("max_trees"); - public static final ParseField NUM_FOLDS = new ParseField("num_folds"); - public static final ParseField NUM_SPLITS_PER_FEATURE = new ParseField("num_splits_per_feature"); - public static final ParseField SOFT_TREE_DEPTH_LIMIT = new ParseField("soft_tree_depth_limit"); - public static final ParseField SOFT_TREE_DEPTH_TOLERANCE = new ParseField("soft_tree_depth_tolerance"); - - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "classification_hyperparameters", - true, - a -> new Hyperparameters( - (String) a[0], - (Double) a[1], - (Double) a[2], - (Double) a[3], - (Double) a[4], - (Double) a[5], - (Double) a[6], - (Double) a[7], - (Integer) a[8], - (Integer) a[9], - (Integer) a[10], - (Integer) a[11], - (Integer) a[12], - (Double) a[13], - (Double) a[14] - ) - ); - - static { - PARSER.declareString(optionalConstructorArg(), CLASS_ASSIGNMENT_OBJECTIVE); - PARSER.declareDouble(optionalConstructorArg(), ALPHA); - PARSER.declareDouble(optionalConstructorArg(), DOWNSAMPLE_FACTOR); - PARSER.declareDouble(optionalConstructorArg(), ETA); - PARSER.declareDouble(optionalConstructorArg(), ETA_GROWTH_RATE_PER_TREE); - PARSER.declareDouble(optionalConstructorArg(), FEATURE_BAG_FRACTION); - PARSER.declareDouble(optionalConstructorArg(), GAMMA); - PARSER.declareDouble(optionalConstructorArg(), LAMBDA); - PARSER.declareInt(optionalConstructorArg(), MAX_ATTEMPTS_TO_ADD_TREE); - PARSER.declareInt(optionalConstructorArg(), MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER); - PARSER.declareInt(optionalConstructorArg(), MAX_TREES); - PARSER.declareInt(optionalConstructorArg(), NUM_FOLDS); - PARSER.declareInt(optionalConstructorArg(), NUM_SPLITS_PER_FEATURE); - PARSER.declareDouble(optionalConstructorArg(), SOFT_TREE_DEPTH_LIMIT); - PARSER.declareDouble(optionalConstructorArg(), SOFT_TREE_DEPTH_TOLERANCE); - } - - private final String classAssignmentObjective; - private final Double alpha; - private final Double downsampleFactor; - private final Double eta; - private final Double etaGrowthRatePerTree; - private final Double featureBagFraction; - private final Double gamma; - private final Double lambda; - private final Integer maxAttemptsToAddTree; - private final Integer maxOptimizationRoundsPerHyperparameter; - private final Integer maxTrees; - private final Integer numFolds; - private final Integer numSplitsPerFeature; - private final Double softTreeDepthLimit; - private final Double softTreeDepthTolerance; - - public Hyperparameters( - String classAssignmentObjective, - Double alpha, - Double downsampleFactor, - Double eta, - Double etaGrowthRatePerTree, - Double featureBagFraction, - Double gamma, - Double lambda, - Integer maxAttemptsToAddTree, - Integer maxOptimizationRoundsPerHyperparameter, - Integer maxTrees, - Integer numFolds, - Integer numSplitsPerFeature, - Double softTreeDepthLimit, - Double softTreeDepthTolerance - ) { - this.classAssignmentObjective = classAssignmentObjective; - this.alpha = alpha; - this.downsampleFactor = downsampleFactor; - this.eta = eta; - this.etaGrowthRatePerTree = etaGrowthRatePerTree; - this.featureBagFraction = featureBagFraction; - this.gamma = gamma; - this.lambda = lambda; - this.maxAttemptsToAddTree = maxAttemptsToAddTree; - this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter; - this.maxTrees = maxTrees; - this.numFolds = numFolds; - this.numSplitsPerFeature = numSplitsPerFeature; - this.softTreeDepthLimit = softTreeDepthLimit; - this.softTreeDepthTolerance = softTreeDepthTolerance; - } - - public String getClassAssignmentObjective() { - return classAssignmentObjective; - } - - public Double getAlpha() { - return alpha; - } - - public Double getDownsampleFactor() { - return downsampleFactor; - } - - public Double getEta() { - return eta; - } - - public Double getEtaGrowthRatePerTree() { - return etaGrowthRatePerTree; - } - - public Double getFeatureBagFraction() { - return featureBagFraction; - } - - public Double getGamma() { - return gamma; - } - - public Double getLambda() { - return lambda; - } - - public Integer getMaxAttemptsToAddTree() { - return maxAttemptsToAddTree; - } - - public Integer getMaxOptimizationRoundsPerHyperparameter() { - return maxOptimizationRoundsPerHyperparameter; - } - - public Integer getMaxTrees() { - return maxTrees; - } - - public Integer getNumFolds() { - return numFolds; - } - - public Integer getNumSplitsPerFeature() { - return numSplitsPerFeature; - } - - public Double getSoftTreeDepthLimit() { - return softTreeDepthLimit; - } - - public Double getSoftTreeDepthTolerance() { - return softTreeDepthTolerance; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (classAssignmentObjective != null) { - builder.field(CLASS_ASSIGNMENT_OBJECTIVE.getPreferredName(), classAssignmentObjective); - } - if (alpha != null) { - builder.field(ALPHA.getPreferredName(), alpha); - } - if (downsampleFactor != null) { - builder.field(DOWNSAMPLE_FACTOR.getPreferredName(), downsampleFactor); - } - if (eta != null) { - builder.field(ETA.getPreferredName(), eta); - } - if (etaGrowthRatePerTree != null) { - builder.field(ETA_GROWTH_RATE_PER_TREE.getPreferredName(), etaGrowthRatePerTree); - } - if (featureBagFraction != null) { - builder.field(FEATURE_BAG_FRACTION.getPreferredName(), featureBagFraction); - } - if (gamma != null) { - builder.field(GAMMA.getPreferredName(), gamma); - } - if (lambda != null) { - builder.field(LAMBDA.getPreferredName(), lambda); - } - if (maxAttemptsToAddTree != null) { - builder.field(MAX_ATTEMPTS_TO_ADD_TREE.getPreferredName(), maxAttemptsToAddTree); - } - if (maxOptimizationRoundsPerHyperparameter != null) { - builder.field(MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER.getPreferredName(), maxOptimizationRoundsPerHyperparameter); - } - if (maxTrees != null) { - builder.field(MAX_TREES.getPreferredName(), maxTrees); - } - if (numFolds != null) { - builder.field(NUM_FOLDS.getPreferredName(), numFolds); - } - if (numSplitsPerFeature != null) { - builder.field(NUM_SPLITS_PER_FEATURE.getPreferredName(), numSplitsPerFeature); - } - if (softTreeDepthLimit != null) { - builder.field(SOFT_TREE_DEPTH_LIMIT.getPreferredName(), softTreeDepthLimit); - } - if (softTreeDepthTolerance != null) { - builder.field(SOFT_TREE_DEPTH_TOLERANCE.getPreferredName(), softTreeDepthTolerance); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Hyperparameters that = (Hyperparameters) o; - return Objects.equals(classAssignmentObjective, that.classAssignmentObjective) - && Objects.equals(alpha, that.alpha) - && Objects.equals(downsampleFactor, that.downsampleFactor) - && Objects.equals(eta, that.eta) - && Objects.equals(etaGrowthRatePerTree, that.etaGrowthRatePerTree) - && Objects.equals(featureBagFraction, that.featureBagFraction) - && Objects.equals(gamma, that.gamma) - && Objects.equals(lambda, that.lambda) - && Objects.equals(maxAttemptsToAddTree, that.maxAttemptsToAddTree) - && Objects.equals(maxOptimizationRoundsPerHyperparameter, that.maxOptimizationRoundsPerHyperparameter) - && Objects.equals(maxTrees, that.maxTrees) - && Objects.equals(numFolds, that.numFolds) - && Objects.equals(numSplitsPerFeature, that.numSplitsPerFeature) - && Objects.equals(softTreeDepthLimit, that.softTreeDepthLimit) - && Objects.equals(softTreeDepthTolerance, that.softTreeDepthTolerance); - } - - @Override - public int hashCode() { - return Objects.hash( - classAssignmentObjective, - alpha, - downsampleFactor, - eta, - etaGrowthRatePerTree, - featureBagFraction, - gamma, - lambda, - maxAttemptsToAddTree, - maxOptimizationRoundsPerHyperparameter, - maxTrees, - numFolds, - numSplitsPerFeature, - softTreeDepthLimit, - softTreeDepthTolerance - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/TimingStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/TimingStats.java deleted file mode 100644 index 9afeeeeb3a4f8..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/TimingStats.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.classification; - -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class TimingStats implements ToXContentObject { - - public static final ParseField ELAPSED_TIME = new ParseField("elapsed_time"); - public static final ParseField ITERATION_TIME = new ParseField("iteration_time"); - - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "classification_timing_stats", - true, - a -> new TimingStats( - a[0] == null ? null : TimeValue.timeValueMillis((long) a[0]), - a[1] == null ? null : TimeValue.timeValueMillis((long) a[1]) - ) - ); - - static { - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ELAPSED_TIME); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ITERATION_TIME); - } - - private final TimeValue elapsedTime; - private final TimeValue iterationTime; - - public TimingStats(TimeValue elapsedTime, TimeValue iterationTime) { - this.elapsedTime = elapsedTime; - this.iterationTime = iterationTime; - } - - public TimeValue getElapsedTime() { - return elapsedTime; - } - - public TimeValue getIterationTime() { - return iterationTime; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (elapsedTime != null) { - builder.humanReadableField(ELAPSED_TIME.getPreferredName(), ELAPSED_TIME.getPreferredName() + "_string", elapsedTime); - } - if (iterationTime != null) { - builder.humanReadableField(ITERATION_TIME.getPreferredName(), ITERATION_TIME.getPreferredName() + "_string", iterationTime); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TimingStats that = (TimingStats) o; - return Objects.equals(elapsedTime, that.elapsedTime) && Objects.equals(iterationTime, that.iterationTime); - } - - @Override - public int hashCode() { - return Objects.hash(elapsedTime, iterationTime); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ValidationLoss.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ValidationLoss.java deleted file mode 100644 index ca781c8205300..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ValidationLoss.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.classification; - -import org.elasticsearch.client.ml.dataframe.stats.common.FoldValues; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class ValidationLoss implements ToXContentObject { - - public static final ParseField LOSS_TYPE = new ParseField("loss_type"); - public static final ParseField FOLD_VALUES = new ParseField("fold_values"); - - @SuppressWarnings("unchecked") - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "classification_validation_loss", - true, - a -> new ValidationLoss((String) a[0], (List) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), LOSS_TYPE); - PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), FoldValues.PARSER, FOLD_VALUES); - } - - private final String lossType; - private final List foldValues; - - public ValidationLoss(String lossType, List values) { - this.lossType = lossType; - this.foldValues = values; - } - - public String getLossType() { - return lossType; - } - - public List getFoldValues() { - return foldValues; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (lossType != null) { - builder.field(LOSS_TYPE.getPreferredName(), lossType); - } - if (foldValues != null) { - builder.field(FOLD_VALUES.getPreferredName(), foldValues); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ValidationLoss that = (ValidationLoss) o; - return Objects.equals(lossType, that.lossType) && Objects.equals(foldValues, that.foldValues); - } - - @Override - public int hashCode() { - return Objects.hash(lossType, foldValues); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/DataCounts.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/DataCounts.java deleted file mode 100644 index 82c4fccb09c8f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/DataCounts.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe.stats.common; - -import org.elasticsearch.common.inject.internal.ToStringBuilder; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class DataCounts implements ToXContentObject { - - public static final String TYPE_VALUE = "analytics_data_counts"; - - public static final ParseField TRAINING_DOCS_COUNT = new ParseField("training_docs_count"); - public static final ParseField TEST_DOCS_COUNT = new ParseField("test_docs_count"); - public static final ParseField SKIPPED_DOCS_COUNT = new ParseField("skipped_docs_count"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(TYPE_VALUE, true, a -> { - Long trainingDocsCount = (Long) a[0]; - Long testDocsCount = (Long) a[1]; - Long skippedDocsCount = (Long) a[2]; - return new DataCounts(getOrDefault(trainingDocsCount, 0L), getOrDefault(testDocsCount, 0L), getOrDefault(skippedDocsCount, 0L)); - }); - - static { - PARSER.declareLong(optionalConstructorArg(), TRAINING_DOCS_COUNT); - PARSER.declareLong(optionalConstructorArg(), TEST_DOCS_COUNT); - PARSER.declareLong(optionalConstructorArg(), SKIPPED_DOCS_COUNT); - } - - private final long trainingDocsCount; - private final long testDocsCount; - private final long skippedDocsCount; - - public DataCounts(long trainingDocsCount, long testDocsCount, long skippedDocsCount) { - this.trainingDocsCount = trainingDocsCount; - this.testDocsCount = testDocsCount; - this.skippedDocsCount = skippedDocsCount; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(TRAINING_DOCS_COUNT.getPreferredName(), trainingDocsCount); - builder.field(TEST_DOCS_COUNT.getPreferredName(), testDocsCount); - builder.field(SKIPPED_DOCS_COUNT.getPreferredName(), skippedDocsCount); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - DataCounts that = (DataCounts) o; - return trainingDocsCount == that.trainingDocsCount - && testDocsCount == that.testDocsCount - && skippedDocsCount == that.skippedDocsCount; - } - - @Override - public int hashCode() { - return Objects.hash(trainingDocsCount, testDocsCount, skippedDocsCount); - } - - @Override - public String toString() { - return new ToStringBuilder(getClass()).add(TRAINING_DOCS_COUNT.getPreferredName(), trainingDocsCount) - .add(TEST_DOCS_COUNT.getPreferredName(), testDocsCount) - .add(SKIPPED_DOCS_COUNT.getPreferredName(), skippedDocsCount) - .toString(); - } - - public long getTrainingDocsCount() { - return trainingDocsCount; - } - - public long getTestDocsCount() { - return testDocsCount; - } - - public long getSkippedDocsCount() { - return skippedDocsCount; - } - - private static T getOrDefault(@Nullable T value, T defaultValue) { - return value != null ? value : defaultValue; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/FoldValues.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/FoldValues.java deleted file mode 100644 index d9f9fbc74fe70..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/FoldValues.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.common; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -public class FoldValues implements ToXContentObject { - - public static final ParseField FOLD = new ParseField("fold"); - public static final ParseField VALUES = new ParseField("values"); - - @SuppressWarnings("unchecked") - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "fold_values", - true, - a -> new FoldValues((int) a[0], (List) a[1]) - ); - - static { - PARSER.declareInt(ConstructingObjectParser.constructorArg(), FOLD); - PARSER.declareDoubleArray(ConstructingObjectParser.constructorArg(), VALUES); - } - - private final int fold; - private final double[] values; - - private FoldValues(int fold, List values) { - this(fold, values.stream().mapToDouble(Double::doubleValue).toArray()); - } - - public FoldValues(int fold, double[] values) { - this.fold = fold; - this.values = values; - } - - public int getFold() { - return fold; - } - - public double[] getValues() { - return values; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(FOLD.getPreferredName(), fold); - builder.array(VALUES.getPreferredName(), values); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - - FoldValues other = (FoldValues) o; - return fold == other.fold && Arrays.equals(values, other.values); - } - - @Override - public int hashCode() { - return Objects.hash(fold, Arrays.hashCode(values)); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/MemoryUsage.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/MemoryUsage.java deleted file mode 100644 index a856df9c3130b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/MemoryUsage.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.common; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.common.inject.internal.ToStringBuilder; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.time.Instant; -import java.util.Locale; -import java.util.Objects; - -public class MemoryUsage implements ToXContentObject { - - static final ParseField TIMESTAMP = new ParseField("timestamp"); - static final ParseField PEAK_USAGE_BYTES = new ParseField("peak_usage_bytes"); - static final ParseField STATUS = new ParseField("status"); - static final ParseField MEMORY_REESTIMATE_BYTES = new ParseField("memory_reestimate_bytes"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "analytics_memory_usage", - true, - a -> new MemoryUsage((Instant) a[0], (long) a[1], (Status) a[2], (Long) a[3]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - p -> TimeUtil.parseTimeFieldToInstant(p, TIMESTAMP.getPreferredName()), - TIMESTAMP, - ObjectParser.ValueType.VALUE - ); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), PEAK_USAGE_BYTES); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), Status::fromString, STATUS); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), MEMORY_REESTIMATE_BYTES); - } - - @Nullable - private final Instant timestamp; - private final long peakUsageBytes; - private final Status status; - private final Long memoryReestimateBytes; - - public MemoryUsage(@Nullable Instant timestamp, long peakUsageBytes, Status status, @Nullable Long memoryReestimateBytes) { - this.timestamp = timestamp == null ? null : Instant.ofEpochMilli(Objects.requireNonNull(timestamp).toEpochMilli()); - this.peakUsageBytes = peakUsageBytes; - this.status = status; - this.memoryReestimateBytes = memoryReestimateBytes; - } - - @Nullable - public Instant getTimestamp() { - return timestamp; - } - - public long getPeakUsageBytes() { - return peakUsageBytes; - } - - public Status getStatus() { - return status; - } - - public Long getMemoryReestimateBytes() { - return memoryReestimateBytes; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (timestamp != null) { - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.toEpochMilli()); - } - builder.field(PEAK_USAGE_BYTES.getPreferredName(), peakUsageBytes); - builder.field(STATUS.getPreferredName(), status); - if (memoryReestimateBytes != null) { - builder.field(MEMORY_REESTIMATE_BYTES.getPreferredName(), memoryReestimateBytes); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - - MemoryUsage other = (MemoryUsage) o; - return Objects.equals(timestamp, other.timestamp) - && peakUsageBytes == other.peakUsageBytes - && Objects.equals(status, other.status) - && Objects.equals(memoryReestimateBytes, other.memoryReestimateBytes); - } - - @Override - public int hashCode() { - return Objects.hash(timestamp, peakUsageBytes, status, memoryReestimateBytes); - } - - @Override - public String toString() { - return new ToStringBuilder(getClass()).add(TIMESTAMP.getPreferredName(), timestamp == null ? null : timestamp.getEpochSecond()) - .add(PEAK_USAGE_BYTES.getPreferredName(), peakUsageBytes) - .add(STATUS.getPreferredName(), status) - .add(MEMORY_REESTIMATE_BYTES.getPreferredName(), memoryReestimateBytes) - .toString(); - } - - public enum Status { - OK, - HARD_LIMIT; - - public static Status fromString(String value) { - return valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/OutlierDetectionStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/OutlierDetectionStats.java deleted file mode 100644 index 8481aecf808a0..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/OutlierDetectionStats.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.outlierdetection; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.dataframe.stats.AnalysisStats; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.time.Instant; -import java.util.Objects; - -public class OutlierDetectionStats implements AnalysisStats { - - public static final ParseField NAME = new ParseField("outlier_detection_stats"); - - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - public static final ParseField PARAMETERS = new ParseField("parameters"); - public static final ParseField TIMING_STATS = new ParseField("timing_stats"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - a -> new OutlierDetectionStats((Instant) a[0], (Parameters) a[1], (TimingStats) a[2]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> TimeUtil.parseTimeFieldToInstant(p, TIMESTAMP.getPreferredName()), - TIMESTAMP, - ObjectParser.ValueType.VALUE - ); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), Parameters.PARSER, PARAMETERS); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), TimingStats.PARSER, TIMING_STATS); - } - - private final Instant timestamp; - private final Parameters parameters; - private final TimingStats timingStats; - - public OutlierDetectionStats(Instant timestamp, Parameters parameters, TimingStats timingStats) { - this.timestamp = Instant.ofEpochMilli(Objects.requireNonNull(timestamp).toEpochMilli()); - this.parameters = Objects.requireNonNull(parameters); - this.timingStats = Objects.requireNonNull(timingStats); - } - - public Instant getTimestamp() { - return timestamp; - } - - public Parameters getParameters() { - return parameters; - } - - public TimingStats getTimingStats() { - return timingStats; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.toEpochMilli()); - builder.field(PARAMETERS.getPreferredName(), parameters); - builder.field(TIMING_STATS.getPreferredName(), timingStats); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - OutlierDetectionStats that = (OutlierDetectionStats) o; - return Objects.equals(timestamp, that.timestamp) - && Objects.equals(parameters, that.parameters) - && Objects.equals(timingStats, that.timingStats); - } - - @Override - public int hashCode() { - return Objects.hash(timestamp, parameters, timingStats); - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/Parameters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/Parameters.java deleted file mode 100644 index aef6ad0833d42..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/Parameters.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.outlierdetection; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class Parameters implements ToXContentObject { - - public static final ParseField N_NEIGHBORS = new ParseField("n_neighbors"); - public static final ParseField METHOD = new ParseField("method"); - public static final ParseField FEATURE_INFLUENCE_THRESHOLD = new ParseField("feature_influence_threshold"); - public static final ParseField COMPUTE_FEATURE_INFLUENCE = new ParseField("compute_feature_influence"); - public static final ParseField OUTLIER_FRACTION = new ParseField("outlier_fraction"); - public static final ParseField STANDARDIZATION_ENABLED = new ParseField("standardization_enabled"); - - @SuppressWarnings("unchecked") - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "outlier_detection_parameters", - true, - a -> new Parameters((Integer) a[0], (String) a[1], (Boolean) a[2], (Double) a[3], (Double) a[4], (Boolean) a[5]) - ); - - static { - PARSER.declareInt(optionalConstructorArg(), N_NEIGHBORS); - PARSER.declareString(optionalConstructorArg(), METHOD); - PARSER.declareBoolean(optionalConstructorArg(), COMPUTE_FEATURE_INFLUENCE); - PARSER.declareDouble(optionalConstructorArg(), FEATURE_INFLUENCE_THRESHOLD); - PARSER.declareDouble(optionalConstructorArg(), OUTLIER_FRACTION); - PARSER.declareBoolean(optionalConstructorArg(), STANDARDIZATION_ENABLED); - } - - private final Integer nNeighbors; - private final String method; - private final Boolean computeFeatureInfluence; - private final Double featureInfluenceThreshold; - private final Double outlierFraction; - private final Boolean standardizationEnabled; - - public Parameters( - Integer nNeighbors, - String method, - Boolean computeFeatureInfluence, - Double featureInfluenceThreshold, - Double outlierFraction, - Boolean standardizationEnabled - ) { - this.nNeighbors = nNeighbors; - this.method = method; - this.computeFeatureInfluence = computeFeatureInfluence; - this.featureInfluenceThreshold = featureInfluenceThreshold; - this.outlierFraction = outlierFraction; - this.standardizationEnabled = standardizationEnabled; - } - - public Integer getnNeighbors() { - return nNeighbors; - } - - public String getMethod() { - return method; - } - - public Boolean getComputeFeatureInfluence() { - return computeFeatureInfluence; - } - - public Double getFeatureInfluenceThreshold() { - return featureInfluenceThreshold; - } - - public Double getOutlierFraction() { - return outlierFraction; - } - - public Boolean getStandardizationEnabled() { - return standardizationEnabled; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (nNeighbors != null) { - builder.field(N_NEIGHBORS.getPreferredName(), nNeighbors); - } - if (method != null) { - builder.field(METHOD.getPreferredName(), method); - } - if (computeFeatureInfluence != null) { - builder.field(COMPUTE_FEATURE_INFLUENCE.getPreferredName(), computeFeatureInfluence); - } - if (featureInfluenceThreshold != null) { - builder.field(FEATURE_INFLUENCE_THRESHOLD.getPreferredName(), featureInfluenceThreshold); - } - if (outlierFraction != null) { - builder.field(OUTLIER_FRACTION.getPreferredName(), outlierFraction); - } - if (standardizationEnabled != null) { - builder.field(STANDARDIZATION_ENABLED.getPreferredName(), standardizationEnabled); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Parameters that = (Parameters) o; - return Objects.equals(nNeighbors, that.nNeighbors) - && Objects.equals(method, that.method) - && Objects.equals(computeFeatureInfluence, that.computeFeatureInfluence) - && Objects.equals(featureInfluenceThreshold, that.featureInfluenceThreshold) - && Objects.equals(outlierFraction, that.outlierFraction) - && Objects.equals(standardizationEnabled, that.standardizationEnabled); - } - - @Override - public int hashCode() { - return Objects.hash( - nNeighbors, - method, - computeFeatureInfluence, - featureInfluenceThreshold, - outlierFraction, - standardizationEnabled - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/TimingStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/TimingStats.java deleted file mode 100644 index 72d96fa4d71cf..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/TimingStats.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.outlierdetection; - -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class TimingStats implements ToXContentObject { - - public static final ParseField ELAPSED_TIME = new ParseField("elapsed_time"); - - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "outlier_detection_timing_stats", - true, - a -> new TimingStats(a[0] == null ? null : TimeValue.timeValueMillis((long) a[0])) - ); - - static { - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ELAPSED_TIME); - } - - private final TimeValue elapsedTime; - - public TimingStats(TimeValue elapsedTime) { - this.elapsedTime = elapsedTime; - } - - public TimeValue getElapsedTime() { - return elapsedTime; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (elapsedTime != null) { - builder.humanReadableField(ELAPSED_TIME.getPreferredName(), ELAPSED_TIME.getPreferredName() + "_string", elapsedTime); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TimingStats that = (TimingStats) o; - return Objects.equals(elapsedTime, that.elapsedTime); - } - - @Override - public int hashCode() { - return Objects.hash(elapsedTime); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/Hyperparameters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/Hyperparameters.java deleted file mode 100644 index bd89928f035c8..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/Hyperparameters.java +++ /dev/null @@ -1,270 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.regression; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class Hyperparameters implements ToXContentObject { - - public static final ParseField ALPHA = new ParseField("alpha"); - public static final ParseField DOWNSAMPLE_FACTOR = new ParseField("downsample_factor"); - public static final ParseField ETA = new ParseField("eta"); - public static final ParseField ETA_GROWTH_RATE_PER_TREE = new ParseField("eta_growth_rate_per_tree"); - public static final ParseField FEATURE_BAG_FRACTION = new ParseField("feature_bag_fraction"); - public static final ParseField GAMMA = new ParseField("gamma"); - public static final ParseField LAMBDA = new ParseField("lambda"); - public static final ParseField MAX_ATTEMPTS_TO_ADD_TREE = new ParseField("max_attempts_to_add_tree"); - public static final ParseField MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER = new ParseField( - "max_optimization_rounds_per_hyperparameter" - ); - public static final ParseField MAX_TREES = new ParseField("max_trees"); - public static final ParseField NUM_FOLDS = new ParseField("num_folds"); - public static final ParseField NUM_SPLITS_PER_FEATURE = new ParseField("num_splits_per_feature"); - public static final ParseField SOFT_TREE_DEPTH_LIMIT = new ParseField("soft_tree_depth_limit"); - public static final ParseField SOFT_TREE_DEPTH_TOLERANCE = new ParseField("soft_tree_depth_tolerance"); - - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "regression_hyperparameters", - true, - a -> new Hyperparameters( - (Double) a[0], - (Double) a[1], - (Double) a[2], - (Double) a[3], - (Double) a[4], - (Double) a[5], - (Double) a[6], - (Integer) a[7], - (Integer) a[8], - (Integer) a[9], - (Integer) a[10], - (Integer) a[11], - (Double) a[12], - (Double) a[13] - ) - ); - - static { - PARSER.declareDouble(optionalConstructorArg(), ALPHA); - PARSER.declareDouble(optionalConstructorArg(), DOWNSAMPLE_FACTOR); - PARSER.declareDouble(optionalConstructorArg(), ETA); - PARSER.declareDouble(optionalConstructorArg(), ETA_GROWTH_RATE_PER_TREE); - PARSER.declareDouble(optionalConstructorArg(), FEATURE_BAG_FRACTION); - PARSER.declareDouble(optionalConstructorArg(), GAMMA); - PARSER.declareDouble(optionalConstructorArg(), LAMBDA); - PARSER.declareInt(optionalConstructorArg(), MAX_ATTEMPTS_TO_ADD_TREE); - PARSER.declareInt(optionalConstructorArg(), MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER); - PARSER.declareInt(optionalConstructorArg(), MAX_TREES); - PARSER.declareInt(optionalConstructorArg(), NUM_FOLDS); - PARSER.declareInt(optionalConstructorArg(), NUM_SPLITS_PER_FEATURE); - PARSER.declareDouble(optionalConstructorArg(), SOFT_TREE_DEPTH_LIMIT); - PARSER.declareDouble(optionalConstructorArg(), SOFT_TREE_DEPTH_TOLERANCE); - } - - private final Double alpha; - private final Double downsampleFactor; - private final Double eta; - private final Double etaGrowthRatePerTree; - private final Double featureBagFraction; - private final Double gamma; - private final Double lambda; - private final Integer maxAttemptsToAddTree; - private final Integer maxOptimizationRoundsPerHyperparameter; - private final Integer maxTrees; - private final Integer numFolds; - private final Integer numSplitsPerFeature; - private final Double softTreeDepthLimit; - private final Double softTreeDepthTolerance; - - public Hyperparameters( - Double alpha, - Double downsampleFactor, - Double eta, - Double etaGrowthRatePerTree, - Double featureBagFraction, - Double gamma, - Double lambda, - Integer maxAttemptsToAddTree, - Integer maxOptimizationRoundsPerHyperparameter, - Integer maxTrees, - Integer numFolds, - Integer numSplitsPerFeature, - Double softTreeDepthLimit, - Double softTreeDepthTolerance - ) { - this.alpha = alpha; - this.downsampleFactor = downsampleFactor; - this.eta = eta; - this.etaGrowthRatePerTree = etaGrowthRatePerTree; - this.featureBagFraction = featureBagFraction; - this.gamma = gamma; - this.lambda = lambda; - this.maxAttemptsToAddTree = maxAttemptsToAddTree; - this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter; - this.maxTrees = maxTrees; - this.numFolds = numFolds; - this.numSplitsPerFeature = numSplitsPerFeature; - this.softTreeDepthLimit = softTreeDepthLimit; - this.softTreeDepthTolerance = softTreeDepthTolerance; - } - - public Double getAlpha() { - return alpha; - } - - public Double getDownsampleFactor() { - return downsampleFactor; - } - - public Double getEta() { - return eta; - } - - public Double getEtaGrowthRatePerTree() { - return etaGrowthRatePerTree; - } - - public Double getFeatureBagFraction() { - return featureBagFraction; - } - - public Double getGamma() { - return gamma; - } - - public Double getLambda() { - return lambda; - } - - public Integer getMaxAttemptsToAddTree() { - return maxAttemptsToAddTree; - } - - public Integer getMaxOptimizationRoundsPerHyperparameter() { - return maxOptimizationRoundsPerHyperparameter; - } - - public Integer getMaxTrees() { - return maxTrees; - } - - public Integer getNumFolds() { - return numFolds; - } - - public Integer getNumSplitsPerFeature() { - return numSplitsPerFeature; - } - - public Double getSoftTreeDepthLimit() { - return softTreeDepthLimit; - } - - public Double getSoftTreeDepthTolerance() { - return softTreeDepthTolerance; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (alpha != null) { - builder.field(ALPHA.getPreferredName(), alpha); - } - if (downsampleFactor != null) { - builder.field(DOWNSAMPLE_FACTOR.getPreferredName(), downsampleFactor); - } - if (eta != null) { - builder.field(ETA.getPreferredName(), eta); - } - if (etaGrowthRatePerTree != null) { - builder.field(ETA_GROWTH_RATE_PER_TREE.getPreferredName(), etaGrowthRatePerTree); - } - if (featureBagFraction != null) { - builder.field(FEATURE_BAG_FRACTION.getPreferredName(), featureBagFraction); - } - if (gamma != null) { - builder.field(GAMMA.getPreferredName(), gamma); - } - if (lambda != null) { - builder.field(LAMBDA.getPreferredName(), lambda); - } - if (maxAttemptsToAddTree != null) { - builder.field(MAX_ATTEMPTS_TO_ADD_TREE.getPreferredName(), maxAttemptsToAddTree); - } - if (maxOptimizationRoundsPerHyperparameter != null) { - builder.field(MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER.getPreferredName(), maxOptimizationRoundsPerHyperparameter); - } - if (maxTrees != null) { - builder.field(MAX_TREES.getPreferredName(), maxTrees); - } - if (numFolds != null) { - builder.field(NUM_FOLDS.getPreferredName(), numFolds); - } - if (numSplitsPerFeature != null) { - builder.field(NUM_SPLITS_PER_FEATURE.getPreferredName(), numSplitsPerFeature); - } - if (softTreeDepthLimit != null) { - builder.field(SOFT_TREE_DEPTH_LIMIT.getPreferredName(), softTreeDepthLimit); - } - if (softTreeDepthTolerance != null) { - builder.field(SOFT_TREE_DEPTH_TOLERANCE.getPreferredName(), softTreeDepthTolerance); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Hyperparameters that = (Hyperparameters) o; - return Objects.equals(alpha, that.alpha) - && Objects.equals(downsampleFactor, that.downsampleFactor) - && Objects.equals(eta, that.eta) - && Objects.equals(etaGrowthRatePerTree, that.etaGrowthRatePerTree) - && Objects.equals(featureBagFraction, that.featureBagFraction) - && Objects.equals(gamma, that.gamma) - && Objects.equals(lambda, that.lambda) - && Objects.equals(maxAttemptsToAddTree, that.maxAttemptsToAddTree) - && Objects.equals(maxOptimizationRoundsPerHyperparameter, that.maxOptimizationRoundsPerHyperparameter) - && Objects.equals(maxTrees, that.maxTrees) - && Objects.equals(numFolds, that.numFolds) - && Objects.equals(numSplitsPerFeature, that.numSplitsPerFeature) - && Objects.equals(softTreeDepthLimit, that.softTreeDepthLimit) - && Objects.equals(softTreeDepthTolerance, that.softTreeDepthTolerance); - } - - @Override - public int hashCode() { - return Objects.hash( - alpha, - downsampleFactor, - eta, - etaGrowthRatePerTree, - featureBagFraction, - gamma, - lambda, - maxAttemptsToAddTree, - maxOptimizationRoundsPerHyperparameter, - maxTrees, - numFolds, - numSplitsPerFeature, - softTreeDepthLimit, - softTreeDepthTolerance - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/RegressionStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/RegressionStats.java deleted file mode 100644 index 8507a2c88f3a9..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/RegressionStats.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.regression; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.dataframe.stats.AnalysisStats; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.time.Instant; -import java.util.Objects; - -public class RegressionStats implements AnalysisStats { - - public static final ParseField NAME = new ParseField("regression_stats"); - - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - public static final ParseField ITERATION = new ParseField("iteration"); - public static final ParseField HYPERPARAMETERS = new ParseField("hyperparameters"); - public static final ParseField TIMING_STATS = new ParseField("timing_stats"); - public static final ParseField VALIDATION_LOSS = new ParseField("validation_loss"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - a -> new RegressionStats((Instant) a[0], (Integer) a[1], (Hyperparameters) a[2], (TimingStats) a[3], (ValidationLoss) a[4]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> TimeUtil.parseTimeFieldToInstant(p, TIMESTAMP.getPreferredName()), - TIMESTAMP, - ObjectParser.ValueType.VALUE - ); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), ITERATION); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), Hyperparameters.PARSER, HYPERPARAMETERS); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), TimingStats.PARSER, TIMING_STATS); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), ValidationLoss.PARSER, VALIDATION_LOSS); - } - - private final Instant timestamp; - private final Integer iteration; - private final Hyperparameters hyperparameters; - private final TimingStats timingStats; - private final ValidationLoss validationLoss; - - public RegressionStats( - Instant timestamp, - Integer iteration, - Hyperparameters hyperparameters, - TimingStats timingStats, - ValidationLoss validationLoss - ) { - this.timestamp = Instant.ofEpochMilli(Objects.requireNonNull(timestamp).toEpochMilli()); - this.iteration = iteration; - this.hyperparameters = Objects.requireNonNull(hyperparameters); - this.timingStats = Objects.requireNonNull(timingStats); - this.validationLoss = Objects.requireNonNull(validationLoss); - } - - public Instant getTimestamp() { - return timestamp; - } - - public Integer getIteration() { - return iteration; - } - - public Hyperparameters getHyperparameters() { - return hyperparameters; - } - - public TimingStats getTimingStats() { - return timingStats; - } - - public ValidationLoss getValidationLoss() { - return validationLoss; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.toEpochMilli()); - if (iteration != null) { - builder.field(ITERATION.getPreferredName(), iteration); - } - builder.field(HYPERPARAMETERS.getPreferredName(), hyperparameters); - builder.field(TIMING_STATS.getPreferredName(), timingStats); - builder.field(VALIDATION_LOSS.getPreferredName(), validationLoss); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - RegressionStats that = (RegressionStats) o; - return Objects.equals(timestamp, that.timestamp) - && Objects.equals(iteration, that.iteration) - && Objects.equals(hyperparameters, that.hyperparameters) - && Objects.equals(timingStats, that.timingStats) - && Objects.equals(validationLoss, that.validationLoss); - } - - @Override - public int hashCode() { - return Objects.hash(timestamp, iteration, hyperparameters, timingStats, validationLoss); - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/TimingStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/TimingStats.java deleted file mode 100644 index 7a06a2aa3b4d5..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/TimingStats.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.regression; - -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class TimingStats implements ToXContentObject { - - public static final ParseField ELAPSED_TIME = new ParseField("elapsed_time"); - public static final ParseField ITERATION_TIME = new ParseField("iteration_time"); - - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "regression_timing_stats", - true, - a -> new TimingStats( - a[0] == null ? null : TimeValue.timeValueMillis((long) a[0]), - a[1] == null ? null : TimeValue.timeValueMillis((long) a[1]) - ) - ); - - static { - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ELAPSED_TIME); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ITERATION_TIME); - } - - private final TimeValue elapsedTime; - private final TimeValue iterationTime; - - public TimingStats(TimeValue elapsedTime, TimeValue iterationTime) { - this.elapsedTime = elapsedTime; - this.iterationTime = iterationTime; - } - - public TimeValue getElapsedTime() { - return elapsedTime; - } - - public TimeValue getIterationTime() { - return iterationTime; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (elapsedTime != null) { - builder.humanReadableField(ELAPSED_TIME.getPreferredName(), ELAPSED_TIME.getPreferredName() + "_string", elapsedTime); - } - if (iterationTime != null) { - builder.humanReadableField(ITERATION_TIME.getPreferredName(), ITERATION_TIME.getPreferredName() + "_string", iterationTime); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TimingStats that = (TimingStats) o; - return Objects.equals(elapsedTime, that.elapsedTime) && Objects.equals(iterationTime, that.iterationTime); - } - - @Override - public int hashCode() { - return Objects.hash(elapsedTime, iterationTime); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/ValidationLoss.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/ValidationLoss.java deleted file mode 100644 index 2fabaad16ffc5..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/ValidationLoss.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.regression; - -import org.elasticsearch.client.ml.dataframe.stats.common.FoldValues; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class ValidationLoss implements ToXContentObject { - - public static final ParseField LOSS_TYPE = new ParseField("loss_type"); - public static final ParseField FOLD_VALUES = new ParseField("fold_values"); - - @SuppressWarnings("unchecked") - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "regression_validation_loss", - true, - a -> new ValidationLoss((String) a[0], (List) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), LOSS_TYPE); - PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), FoldValues.PARSER, FOLD_VALUES); - } - - private final String lossType; - private final List foldValues; - - public ValidationLoss(String lossType, List values) { - this.lossType = lossType; - this.foldValues = values; - } - - public String getLossType() { - return lossType; - } - - public List getFoldValues() { - return foldValues; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (lossType != null) { - builder.field(LOSS_TYPE.getPreferredName(), lossType); - } - if (foldValues != null) { - builder.field(FOLD_VALUES.getPreferredName(), foldValues); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ValidationLoss that = (ValidationLoss) o; - return Objects.equals(lossType, that.lossType) && Objects.equals(foldValues, that.foldValues); - } - - @Override - public int hashCode() { - return Objects.hash(lossType, foldValues); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/InferenceToXContentCompressor.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/InferenceToXContentCompressor.java deleted file mode 100644 index af06d177d9bf9..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/InferenceToXContentCompressor.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.Streams; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.xcontent.DeprecationHandler; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentType; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.nio.charset.StandardCharsets; -import java.util.Base64; -import java.util.zip.GZIPInputStream; -import java.util.zip.GZIPOutputStream; - -/** - * Collection of helper methods. Similar to CompressedXContent, but this utilizes GZIP. - */ -public final class InferenceToXContentCompressor { - private static final int BUFFER_SIZE = 4096; - private static final long MAX_INFLATED_BYTES = 1_000_000_000; // 1 gb maximum - - private InferenceToXContentCompressor() {} - - public static String deflate(T objectToCompress) throws IOException { - BytesReference reference = XContentHelper.toXContent(objectToCompress, XContentType.JSON, false); - return deflate(reference); - } - - public static T inflate( - String compressedString, - CheckedFunction parserFunction, - NamedXContentRegistry xContentRegistry - ) throws IOException { - try ( - XContentParser parser = XContentHelper.createParser( - xContentRegistry, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - inflate(compressedString, MAX_INFLATED_BYTES), - XContentType.JSON - ) - ) { - return parserFunction.apply(parser); - } - } - - static BytesReference inflate(String compressedString, long streamSize) throws IOException { - byte[] compressedBytes = Base64.getDecoder().decode(compressedString.getBytes(StandardCharsets.UTF_8)); - InputStream gzipStream = new GZIPInputStream(new BytesArray(compressedBytes).streamInput(), BUFFER_SIZE); - InputStream inflateStream = new SimpleBoundedInputStream(gzipStream, streamSize); - return Streams.readFully(inflateStream); - } - - private static String deflate(BytesReference reference) throws IOException { - BytesStreamOutput out = new BytesStreamOutput(); - try (OutputStream compressedOutput = new GZIPOutputStream(out, BUFFER_SIZE)) { - reference.writeTo(compressedOutput); - } - return new String(Base64.getEncoder().encode(BytesReference.toBytes(out.bytes())), StandardCharsets.UTF_8); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java deleted file mode 100644 index 271b882f697e3..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.client.ml.inference.preprocessing.CustomWordEmbedding; -import org.elasticsearch.client.ml.inference.preprocessing.FrequencyEncoding; -import org.elasticsearch.client.ml.inference.preprocessing.Multi; -import org.elasticsearch.client.ml.inference.preprocessing.NGram; -import org.elasticsearch.client.ml.inference.preprocessing.OneHotEncoding; -import org.elasticsearch.client.ml.inference.preprocessing.PreProcessor; -import org.elasticsearch.client.ml.inference.preprocessing.TargetMeanEncoding; -import org.elasticsearch.client.ml.inference.trainedmodel.ClassificationConfig; -import org.elasticsearch.client.ml.inference.trainedmodel.IndexLocation; -import org.elasticsearch.client.ml.inference.trainedmodel.InferenceConfig; -import org.elasticsearch.client.ml.inference.trainedmodel.RegressionConfig; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModelLocation; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.Ensemble; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.Exponent; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.LogisticRegression; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.OutputAggregator; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.WeightedMode; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.WeightedSum; -import org.elasticsearch.client.ml.inference.trainedmodel.langident.LangIdentNeuralNetwork; -import org.elasticsearch.client.ml.inference.trainedmodel.tree.Tree; -import org.elasticsearch.plugins.spi.NamedXContentProvider; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; - -import java.util.ArrayList; -import java.util.List; - -public class MlInferenceNamedXContentProvider implements NamedXContentProvider { - - @Override - public List getNamedXContentParsers() { - List namedXContent = new ArrayList<>(); - - // PreProcessing - namedXContent.add( - new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(OneHotEncoding.NAME), OneHotEncoding::fromXContent) - ); - namedXContent.add( - new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(TargetMeanEncoding.NAME), TargetMeanEncoding::fromXContent) - ); - namedXContent.add( - new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(FrequencyEncoding.NAME), FrequencyEncoding::fromXContent) - ); - namedXContent.add( - new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(CustomWordEmbedding.NAME), CustomWordEmbedding::fromXContent) - ); - namedXContent.add(new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(NGram.NAME), NGram::fromXContent)); - namedXContent.add(new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(Multi.NAME), Multi::fromXContent)); - - // Model - namedXContent.add(new NamedXContentRegistry.Entry(TrainedModel.class, new ParseField(Tree.NAME), Tree::fromXContent)); - namedXContent.add(new NamedXContentRegistry.Entry(TrainedModel.class, new ParseField(Ensemble.NAME), Ensemble::fromXContent)); - namedXContent.add( - new NamedXContentRegistry.Entry( - TrainedModel.class, - new ParseField(LangIdentNeuralNetwork.NAME), - LangIdentNeuralNetwork::fromXContent - ) - ); - - // Inference Config - namedXContent.add( - new NamedXContentRegistry.Entry(InferenceConfig.class, ClassificationConfig.NAME, ClassificationConfig::fromXContent) - ); - namedXContent.add(new NamedXContentRegistry.Entry(InferenceConfig.class, RegressionConfig.NAME, RegressionConfig::fromXContent)); - - // Aggregating output - namedXContent.add( - new NamedXContentRegistry.Entry(OutputAggregator.class, new ParseField(WeightedMode.NAME), WeightedMode::fromXContent) - ); - namedXContent.add( - new NamedXContentRegistry.Entry(OutputAggregator.class, new ParseField(WeightedSum.NAME), WeightedSum::fromXContent) - ); - namedXContent.add( - new NamedXContentRegistry.Entry( - OutputAggregator.class, - new ParseField(LogisticRegression.NAME), - LogisticRegression::fromXContent - ) - ); - namedXContent.add(new NamedXContentRegistry.Entry(OutputAggregator.class, new ParseField(Exponent.NAME), Exponent::fromXContent)); - - // location - namedXContent.add( - new NamedXContentRegistry.Entry(TrainedModelLocation.class, new ParseField(IndexLocation.INDEX), IndexLocation::fromXContent) - ); - - return namedXContent; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObject.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObject.java deleted file mode 100644 index 1a6eb8afdac24..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObject.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.xcontent.ToXContentObject; - -/** - * Simple interface for XContent Objects that are named. - * - * This affords more general handling when serializing and de-serializing this type of XContent when it is used in a NamedObjects - * parser. - */ -public interface NamedXContentObject extends ToXContentObject { - /** - * @return The name of the XContentObject that is to be serialized - */ - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObjectHelper.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObjectHelper.java deleted file mode 100644 index b0c4015e186a0..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObjectHelper.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; - -public final class NamedXContentObjectHelper { - - private NamedXContentObjectHelper() {} - - public static XContentBuilder writeNamedObjects( - XContentBuilder builder, - ToXContent.Params params, - boolean useExplicitOrder, - String namedObjectsName, - List namedObjects - ) throws IOException { - if (useExplicitOrder) { - builder.startArray(namedObjectsName); - } else { - builder.startObject(namedObjectsName); - } - for (NamedXContentObject object : namedObjects) { - if (useExplicitOrder) { - builder.startObject(); - } - builder.field(object.getName(), object, params); - if (useExplicitOrder) { - builder.endObject(); - } - } - if (useExplicitOrder) { - builder.endArray(); - } else { - builder.endObject(); - } - return builder; - } - - public static XContentBuilder writeNamedObject( - XContentBuilder builder, - ToXContent.Params params, - String namedObjectName, - NamedXContentObject namedObject - ) throws IOException { - builder.startObject(namedObjectName); - builder.field(namedObject.getName(), namedObject, params); - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/SimpleBoundedInputStream.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/SimpleBoundedInputStream.java deleted file mode 100644 index f0c274d49592a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/SimpleBoundedInputStream.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Objects; - -/** - * This is a pared down bounded input stream. - * Only read is specifically enforced. - */ -final class SimpleBoundedInputStream extends InputStream { - - private final InputStream in; - private final long maxBytes; - private long numBytes; - - SimpleBoundedInputStream(InputStream inputStream, long maxBytes) { - this.in = Objects.requireNonNull(inputStream, "inputStream"); - if (maxBytes < 0) { - throw new IllegalArgumentException("[maxBytes] must be greater than or equal to 0"); - } - this.maxBytes = maxBytes; - } - - /** - * A simple wrapper around the injected input stream that restricts the total number of bytes able to be read. - * @return The byte read. -1 on internal stream completion or when maxBytes is exceeded. - * @throws IOException on failure - */ - @Override - public int read() throws IOException { - // We have reached the maximum, signal stream completion. - if (numBytes >= maxBytes) { - return -1; - } - numBytes++; - return in.read(); - } - - /** - * Delegates `close` to the wrapped InputStream - * @throws IOException on failure - */ - @Override - public void close() throws IOException { - in.close(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelConfig.java deleted file mode 100644 index 8defbcfce2e83..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelConfig.java +++ /dev/null @@ -1,504 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.Version; -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.inference.trainedmodel.InferenceConfig; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModelLocation; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.time.Instant; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.client.ml.inference.NamedXContentObjectHelper.writeNamedObject; - -public class TrainedModelConfig implements ToXContentObject { - - public static final String NAME = "trained_model_config"; - - public static final ParseField MODEL_ID = new ParseField("model_id"); - public static final ParseField MODEL_TYPE = new ParseField("model_type"); - public static final ParseField CREATED_BY = new ParseField("created_by"); - public static final ParseField VERSION = new ParseField("version"); - public static final ParseField DESCRIPTION = new ParseField("description"); - public static final ParseField CREATE_TIME = new ParseField("create_time"); - public static final ParseField DEFINITION = new ParseField("definition"); - public static final ParseField COMPRESSED_DEFINITION = new ParseField("compressed_definition"); - public static final ParseField TAGS = new ParseField("tags"); - public static final ParseField METADATA = new ParseField("metadata"); - public static final ParseField INPUT = new ParseField("input"); - @Deprecated - public static final ParseField ESTIMATED_HEAP_MEMORY_USAGE_BYTES = new ParseField("estimated_heap_memory_usage_bytes"); - public static final ParseField MODEL_SIZE_BYTES = new ParseField("model_size_bytes", "estimated_heap_memory_usage_bytes"); - public static final ParseField ESTIMATED_OPERATIONS = new ParseField("estimated_operations"); - public static final ParseField LICENSE_LEVEL = new ParseField("license_level"); - public static final ParseField DEFAULT_FIELD_MAP = new ParseField("default_field_map"); - public static final ParseField INFERENCE_CONFIG = new ParseField("inference_config"); - public static final ParseField LOCATION = new ParseField("location"); - - public static final ObjectParser PARSER = new ObjectParser<>(NAME, true, TrainedModelConfig.Builder::new); - static { - PARSER.declareString(TrainedModelConfig.Builder::setModelId, MODEL_ID); - PARSER.declareString(TrainedModelConfig.Builder::setModelType, MODEL_TYPE); - PARSER.declareString(TrainedModelConfig.Builder::setCreatedBy, CREATED_BY); - PARSER.declareString(TrainedModelConfig.Builder::setVersion, VERSION); - PARSER.declareString(TrainedModelConfig.Builder::setDescription, DESCRIPTION); - PARSER.declareField( - TrainedModelConfig.Builder::setCreateTime, - (p, c) -> TimeUtil.parseTimeFieldToInstant(p, CREATE_TIME.getPreferredName()), - CREATE_TIME, - ObjectParser.ValueType.VALUE - ); - PARSER.declareObject(TrainedModelConfig.Builder::setDefinition, (p, c) -> TrainedModelDefinition.fromXContent(p), DEFINITION); - PARSER.declareString(TrainedModelConfig.Builder::setCompressedDefinition, COMPRESSED_DEFINITION); - PARSER.declareStringArray(TrainedModelConfig.Builder::setTags, TAGS); - PARSER.declareObject(TrainedModelConfig.Builder::setMetadata, (p, c) -> p.map(), METADATA); - PARSER.declareObject(TrainedModelConfig.Builder::setInput, (p, c) -> TrainedModelInput.fromXContent(p), INPUT); - PARSER.declareLong(TrainedModelConfig.Builder::setModelSize, MODEL_SIZE_BYTES); - PARSER.declareLong(TrainedModelConfig.Builder::setEstimatedOperations, ESTIMATED_OPERATIONS); - PARSER.declareString(TrainedModelConfig.Builder::setLicenseLevel, LICENSE_LEVEL); - PARSER.declareObject(TrainedModelConfig.Builder::setDefaultFieldMap, (p, c) -> p.mapStrings(), DEFAULT_FIELD_MAP); - PARSER.declareNamedObject( - TrainedModelConfig.Builder::setInferenceConfig, - (p, c, n) -> p.namedObject(InferenceConfig.class, n, null), - INFERENCE_CONFIG - ); - PARSER.declareNamedObject( - TrainedModelConfig.Builder::setLocation, - (p, c, n) -> p.namedObject(TrainedModelLocation.class, n, null), - LOCATION - ); - } - - public static TrainedModelConfig fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null).build(); - } - - private final String modelId; - private final TrainedModelType modelType; - private final String createdBy; - private final Version version; - private final String description; - private final Instant createTime; - private final TrainedModelDefinition definition; - private final String compressedDefinition; - private final List tags; - private final Map metadata; - private final TrainedModelInput input; - private final Long modelSize; - private final Long estimatedOperations; - private final String licenseLevel; - private final Map defaultFieldMap; - private final InferenceConfig inferenceConfig; - private final TrainedModelLocation location; - - TrainedModelConfig( - String modelId, - TrainedModelType modelType, - String createdBy, - Version version, - String description, - Instant createTime, - TrainedModelDefinition definition, - String compressedDefinition, - List tags, - Map metadata, - TrainedModelInput input, - Long modelSize, - Long estimatedOperations, - String licenseLevel, - Map defaultFieldMap, - InferenceConfig inferenceConfig, - TrainedModelLocation location - ) { - this.modelId = modelId; - this.modelType = modelType; - this.createdBy = createdBy; - this.version = version; - this.createTime = createTime == null ? null : Instant.ofEpochMilli(createTime.toEpochMilli()); - this.definition = definition; - this.compressedDefinition = compressedDefinition; - this.description = description; - this.tags = tags == null ? null : Collections.unmodifiableList(tags); - this.metadata = metadata == null ? null : Collections.unmodifiableMap(metadata); - this.input = input; - this.modelSize = modelSize; - this.estimatedOperations = estimatedOperations; - this.licenseLevel = licenseLevel; - this.defaultFieldMap = defaultFieldMap == null ? null : Collections.unmodifiableMap(defaultFieldMap); - this.inferenceConfig = inferenceConfig; - this.location = location; - } - - public String getModelId() { - return modelId; - } - - public TrainedModelType getModelType() { - return modelType; - } - - public String getCreatedBy() { - return createdBy; - } - - public Version getVersion() { - return version; - } - - public String getDescription() { - return description; - } - - public Instant getCreateTime() { - return createTime; - } - - public List getTags() { - return tags; - } - - public Map getMetadata() { - return metadata; - } - - public TrainedModelDefinition getDefinition() { - return definition; - } - - public String getCompressedDefinition() { - return compressedDefinition; - } - - @Nullable - public TrainedModelLocation getLocation() { - return location; - } - - public TrainedModelInput getInput() { - return input; - } - - /** - * @deprecated use {@link TrainedModelConfig#getModelSize()} instead - * @return the {@link ByteSizeValue} of the model size if available. - */ - @Deprecated - public ByteSizeValue getEstimatedHeapMemory() { - return modelSize == null ? null : new ByteSizeValue(modelSize); - } - - /** - * @deprecated use {@link TrainedModelConfig#getModelSizeBytes()} instead - * @return the model size in bytes if available. - */ - @Deprecated - public Long getEstimatedHeapMemoryBytes() { - return modelSize; - } - - /** - * @return the {@link ByteSizeValue} of the model size if available. - */ - public ByteSizeValue getModelSize() { - return modelSize == null ? null : new ByteSizeValue(modelSize); - } - - /** - * @return the model size in bytes if available. - */ - public Long getModelSizeBytes() { - return modelSize; - } - - public String getLicenseLevel() { - return licenseLevel; - } - - public Map getDefaultFieldMap() { - return defaultFieldMap; - } - - public InferenceConfig getInferenceConfig() { - return inferenceConfig; - } - - public static Builder builder() { - return new Builder(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (modelId != null) { - builder.field(MODEL_ID.getPreferredName(), modelId); - } - if (modelType != null) { - builder.field(MODEL_TYPE.getPreferredName(), modelType.toString()); - } - if (createdBy != null) { - builder.field(CREATED_BY.getPreferredName(), createdBy); - } - if (version != null) { - builder.field(VERSION.getPreferredName(), version.toString()); - } - if (description != null) { - builder.field(DESCRIPTION.getPreferredName(), description); - } - if (createTime != null) { - builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + "_string", createTime.toEpochMilli()); - } - if (definition != null) { - builder.field(DEFINITION.getPreferredName(), definition); - } - if (tags != null) { - builder.field(TAGS.getPreferredName(), tags); - } - if (metadata != null) { - builder.field(METADATA.getPreferredName(), metadata); - } - if (input != null) { - builder.field(INPUT.getPreferredName(), input); - } - if (modelSize != null) { - builder.field(MODEL_SIZE_BYTES.getPreferredName(), modelSize); - } - if (estimatedOperations != null) { - builder.field(ESTIMATED_OPERATIONS.getPreferredName(), estimatedOperations); - } - if (compressedDefinition != null) { - builder.field(COMPRESSED_DEFINITION.getPreferredName(), compressedDefinition); - } - if (licenseLevel != null) { - builder.field(LICENSE_LEVEL.getPreferredName(), licenseLevel); - } - if (defaultFieldMap != null) { - builder.field(DEFAULT_FIELD_MAP.getPreferredName(), defaultFieldMap); - } - if (inferenceConfig != null) { - writeNamedObject(builder, params, INFERENCE_CONFIG.getPreferredName(), inferenceConfig); - } - if (location != null) { - writeNamedObject(builder, params, LOCATION.getPreferredName(), location); - } - builder.endObject(); - return builder; - } - - @Override - public String toString() { - return Strings.toString(this); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TrainedModelConfig that = (TrainedModelConfig) o; - return Objects.equals(modelId, that.modelId) - && Objects.equals(modelType, that.modelType) - && Objects.equals(createdBy, that.createdBy) - && Objects.equals(version, that.version) - && Objects.equals(description, that.description) - && Objects.equals(createTime, that.createTime) - && Objects.equals(definition, that.definition) - && Objects.equals(compressedDefinition, that.compressedDefinition) - && Objects.equals(tags, that.tags) - && Objects.equals(input, that.input) - && Objects.equals(modelSize, that.modelSize) - && Objects.equals(estimatedOperations, that.estimatedOperations) - && Objects.equals(licenseLevel, that.licenseLevel) - && Objects.equals(defaultFieldMap, that.defaultFieldMap) - && Objects.equals(inferenceConfig, that.inferenceConfig) - && Objects.equals(metadata, that.metadata) - && Objects.equals(location, that.location); - } - - @Override - public int hashCode() { - return Objects.hash( - modelId, - modelType, - createdBy, - version, - createTime, - definition, - compressedDefinition, - description, - tags, - modelSize, - estimatedOperations, - metadata, - licenseLevel, - input, - inferenceConfig, - defaultFieldMap, - location - ); - } - - public static class Builder { - - private String modelId; - private TrainedModelType modelType; - private String createdBy; - private Version version; - private String description; - private Instant createTime; - private Map metadata; - private List tags; - private TrainedModelDefinition definition; - private String compressedDefinition; - private TrainedModelInput input; - private Long modelSize; - private Long estimatedOperations; - private String licenseLevel; - private Map defaultFieldMap; - private InferenceConfig inferenceConfig; - private TrainedModelLocation location; - - public Builder setModelId(String modelId) { - this.modelId = modelId; - return this; - } - - public Builder setModelType(String modelType) { - this.modelType = TrainedModelType.fromString(modelType); - return this; - } - - public Builder setModelType(TrainedModelType modelType) { - this.modelType = modelType; - return this; - } - - private Builder setCreatedBy(String createdBy) { - this.createdBy = createdBy; - return this; - } - - private Builder setVersion(Version version) { - this.version = version; - return this; - } - - private Builder setVersion(String version) { - return this.setVersion(Version.fromString(version)); - } - - public Builder setDescription(String description) { - this.description = description; - return this; - } - - private Builder setCreateTime(Instant createTime) { - this.createTime = createTime; - return this; - } - - public Builder setTags(List tags) { - this.tags = tags; - return this; - } - - public Builder setTags(String... tags) { - return setTags(Arrays.asList(tags)); - } - - public Builder setMetadata(Map metadata) { - this.metadata = metadata; - return this; - } - - public Builder setDefinition(TrainedModelDefinition.Builder definition) { - this.definition = definition == null ? null : definition.build(); - return this; - } - - public Builder setCompressedDefinition(String compressedDefinition) { - this.compressedDefinition = compressedDefinition; - return this; - } - - public Builder setDefinition(TrainedModelDefinition definition) { - this.definition = definition; - return this; - } - - public Builder setLocation(TrainedModelLocation location) { - this.location = location; - return this; - } - - public Builder setInput(TrainedModelInput input) { - this.input = input; - return this; - } - - private Builder setModelSize(Long modelSize) { - this.modelSize = modelSize; - return this; - } - - private Builder setEstimatedOperations(Long estimatedOperations) { - this.estimatedOperations = estimatedOperations; - return this; - } - - private Builder setLicenseLevel(String licenseLevel) { - this.licenseLevel = licenseLevel; - return this; - } - - public Builder setDefaultFieldMap(Map defaultFieldMap) { - this.defaultFieldMap = defaultFieldMap; - return this; - } - - public Builder setInferenceConfig(InferenceConfig inferenceConfig) { - this.inferenceConfig = inferenceConfig; - return this; - } - - public TrainedModelConfig build() { - return new TrainedModelConfig( - modelId, - modelType, - createdBy, - version, - description, - createTime, - definition, - compressedDefinition, - tags, - metadata, - input, - modelSize, - estimatedOperations, - licenseLevel, - defaultFieldMap, - inferenceConfig, - location - ); - } - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelDefinition.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelDefinition.java deleted file mode 100644 index 3ca84bc62cbd5..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelDefinition.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.client.ml.inference.preprocessing.PreProcessor; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -public class TrainedModelDefinition implements ToXContentObject { - - public static final String NAME = "trained_model_doc"; - - public static final ParseField TRAINED_MODEL = new ParseField("trained_model"); - public static final ParseField PREPROCESSORS = new ParseField("preprocessors"); - - public static final ObjectParser PARSER = new ObjectParser<>(NAME, true, TrainedModelDefinition.Builder::new); - static { - PARSER.declareNamedObject( - TrainedModelDefinition.Builder::setTrainedModel, - (p, c, n) -> p.namedObject(TrainedModel.class, n, null), - TRAINED_MODEL - ); - PARSER.declareNamedObjects( - TrainedModelDefinition.Builder::setPreProcessors, - (p, c, n) -> p.namedObject(PreProcessor.class, n, null), - (trainedModelDefBuilder) -> {/* Does not matter client side*/ }, - PREPROCESSORS - ); - } - - public static TrainedModelDefinition.Builder fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final TrainedModel trainedModel; - private final List preProcessors; - - TrainedModelDefinition(TrainedModel trainedModel, List preProcessors) { - this.trainedModel = trainedModel; - this.preProcessors = preProcessors == null ? Collections.emptyList() : Collections.unmodifiableList(preProcessors); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - NamedXContentObjectHelper.writeNamedObjects( - builder, - params, - false, - TRAINED_MODEL.getPreferredName(), - Collections.singletonList(trainedModel) - ); - NamedXContentObjectHelper.writeNamedObjects(builder, params, true, PREPROCESSORS.getPreferredName(), preProcessors); - builder.endObject(); - return builder; - } - - public TrainedModel getTrainedModel() { - return trainedModel; - } - - public List getPreProcessors() { - return preProcessors; - } - - @Override - public String toString() { - return Strings.toString(this); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TrainedModelDefinition that = (TrainedModelDefinition) o; - return Objects.equals(trainedModel, that.trainedModel) && Objects.equals(preProcessors, that.preProcessors); - } - - @Override - public int hashCode() { - return Objects.hash(trainedModel, preProcessors); - } - - public static class Builder { - - private List preProcessors; - private TrainedModel trainedModel; - - public Builder setPreProcessors(List preProcessors) { - this.preProcessors = preProcessors; - return this; - } - - public Builder setTrainedModel(TrainedModel trainedModel) { - this.trainedModel = trainedModel; - return this; - } - - public TrainedModelDefinition build() { - return new TrainedModelDefinition(this.trainedModel, this.preProcessors); - } - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelInput.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelInput.java deleted file mode 100644 index d6e2d0559396c..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelInput.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -public class TrainedModelInput implements ToXContentObject { - - public static final String NAME = "trained_model_config_input"; - public static final ParseField FIELD_NAMES = new ParseField("field_names"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new TrainedModelInput((List) a[0]) - ); - - static { - PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), FIELD_NAMES); - } - - private final List fieldNames; - - public TrainedModelInput(List fieldNames) { - this.fieldNames = fieldNames; - } - - public TrainedModelInput(String... fieldNames) { - this(Arrays.asList(fieldNames)); - } - - public static TrainedModelInput fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - public List getFieldNames() { - return fieldNames; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (fieldNames != null) { - builder.field(FIELD_NAMES.getPreferredName(), fieldNames); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TrainedModelInput that = (TrainedModelInput) o; - return Objects.equals(fieldNames, that.fieldNames); - } - - @Override - public int hashCode() { - return Objects.hash(fieldNames); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelStats.java deleted file mode 100644 index bd45805e70603..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelStats.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.client.ml.inference.trainedmodel.InferenceStats; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.ingest.IngestStats; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class TrainedModelStats implements ToXContentObject { - - public static final ParseField MODEL_ID = new ParseField("model_id"); - public static final ParseField PIPELINE_COUNT = new ParseField("pipeline_count"); - public static final ParseField INGEST_STATS = new ParseField("ingest"); - public static final ParseField INFERENCE_STATS = new ParseField("inference_stats"); - - private final String modelId; - private final Map ingestStats; - private final int pipelineCount; - private final InferenceStats inferenceStats; - - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "trained_model_stats", - true, - args -> new TrainedModelStats((String) args[0], (Map) args[1], (Integer) args[2], (InferenceStats) args[3]) - ); - - static { - PARSER.declareString(constructorArg(), MODEL_ID); - PARSER.declareObject(optionalConstructorArg(), (p, c) -> p.mapOrdered(), INGEST_STATS); - PARSER.declareInt(constructorArg(), PIPELINE_COUNT); - PARSER.declareObject(optionalConstructorArg(), InferenceStats.PARSER, INFERENCE_STATS); - } - - public static TrainedModelStats fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public TrainedModelStats(String modelId, Map ingestStats, int pipelineCount, InferenceStats inferenceStats) { - this.modelId = modelId; - this.ingestStats = ingestStats; - this.pipelineCount = pipelineCount; - this.inferenceStats = inferenceStats; - } - - /** - * The model id for which the stats apply - */ - public String getModelId() { - return modelId; - } - - /** - * Ingest level statistics. See {@link IngestStats#toXContent(XContentBuilder, Params)} for fields and format - * If there are no ingest pipelines referencing the model, then the ingest statistics could be null. - */ - @Nullable - public Map getIngestStats() { - return ingestStats; - } - - /** - * The total number of pipelines that reference the trained model - */ - public int getPipelineCount() { - return pipelineCount; - } - - /** - * Inference statistics - */ - public InferenceStats getInferenceStats() { - return inferenceStats; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(MODEL_ID.getPreferredName(), modelId); - builder.field(PIPELINE_COUNT.getPreferredName(), pipelineCount); - if (ingestStats != null) { - builder.field(INGEST_STATS.getPreferredName(), ingestStats); - } - if (inferenceStats != null) { - builder.field(INFERENCE_STATS.getPreferredName(), inferenceStats); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(modelId, ingestStats, pipelineCount, inferenceStats); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - TrainedModelStats other = (TrainedModelStats) obj; - return Objects.equals(this.modelId, other.modelId) - && Objects.equals(this.ingestStats, other.ingestStats) - && Objects.equals(this.pipelineCount, other.pipelineCount) - && Objects.equals(this.inferenceStats, other.inferenceStats); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelType.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelType.java deleted file mode 100644 index e34c01d880b87..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelType.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference; - -import java.util.Locale; - -public enum TrainedModelType { - TREE_ENSEMBLE, - LANG_IDENT, - PYTORCH; - - public static TrainedModelType fromString(String name) { - return valueOf(name.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/CustomWordEmbedding.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/CustomWordEmbedding.java deleted file mode 100644 index 3ad8170b3ce9f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/CustomWordEmbedding.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * This is a pre-processor that embeds text into a numerical vector. - * - * It calculates a set of features based on script type, ngram hashes, and most common script values. - * - * The features are then concatenated with specific quantization scales and weights into a vector of length 80. - * - * This is a fork and a port of: https://github.com/google/cld3/blob/06f695f1c8ee530104416aab5dcf2d6a1414a56a/src/embedding_network.cc - */ -public class CustomWordEmbedding implements PreProcessor { - - public static final String NAME = "custom_word_embedding"; - static final ParseField FIELD = new ParseField("field"); - static final ParseField DEST_FIELD = new ParseField("dest_field"); - static final ParseField EMBEDDING_WEIGHTS = new ParseField("embedding_weights"); - static final ParseField EMBEDDING_QUANT_SCALES = new ParseField("embedding_quant_scales"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new CustomWordEmbedding((short[][]) a[0], (byte[][]) a[1], (String) a[2], (String) a[3]) - ); - static { - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> { - List> listOfListOfShorts = parseArrays(EMBEDDING_QUANT_SCALES.getPreferredName(), XContentParser::shortValue, p); - short[][] primitiveShorts = new short[listOfListOfShorts.size()][]; - int i = 0; - for (List shorts : listOfListOfShorts) { - short[] innerShorts = new short[shorts.size()]; - for (int j = 0; j < shorts.size(); j++) { - innerShorts[j] = shorts.get(j); - } - primitiveShorts[i++] = innerShorts; - } - return primitiveShorts; - }, EMBEDDING_QUANT_SCALES, ObjectParser.ValueType.VALUE_ARRAY); - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> { - List values = new ArrayList<>(); - while (p.nextToken() != XContentParser.Token.END_ARRAY) { - values.add(p.binaryValue()); - } - byte[][] primitiveBytes = new byte[values.size()][]; - int i = 0; - for (byte[] bytes : values) { - primitiveBytes[i++] = bytes; - } - return primitiveBytes; - }, EMBEDDING_WEIGHTS, ObjectParser.ValueType.VALUE_ARRAY); - PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); - PARSER.declareString(ConstructingObjectParser.constructorArg(), DEST_FIELD); - } - - private static List> parseArrays( - String fieldName, - CheckedFunction fromParser, - XContentParser p - ) throws IOException { - if (p.currentToken() != XContentParser.Token.START_ARRAY) { - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" + fieldName + "]"); - } - List> values = new ArrayList<>(); - while (p.nextToken() != XContentParser.Token.END_ARRAY) { - if (p.currentToken() != XContentParser.Token.START_ARRAY) { - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" + fieldName + "]"); - } - List innerList = new ArrayList<>(); - while (p.nextToken() != XContentParser.Token.END_ARRAY) { - if (p.currentToken().isValue() == false) { - throw new IllegalStateException( - "expected non-null value but got [" + p.currentToken() + "] " + "for [" + fieldName + "]" - ); - } - innerList.add(fromParser.apply(p)); - } - values.add(innerList); - } - return values; - } - - public static CustomWordEmbedding fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final short[][] embeddingsQuantScales; - private final byte[][] embeddingsWeights; - private final String fieldName; - private final String destField; - - CustomWordEmbedding(short[][] embeddingsQuantScales, byte[][] embeddingsWeights, String fieldName, String destField) { - this.embeddingsQuantScales = embeddingsQuantScales; - this.embeddingsWeights = embeddingsWeights; - this.fieldName = fieldName; - this.destField = destField; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(FIELD.getPreferredName(), fieldName); - builder.field(DEST_FIELD.getPreferredName(), destField); - builder.field(EMBEDDING_QUANT_SCALES.getPreferredName(), embeddingsQuantScales); - builder.field(EMBEDDING_WEIGHTS.getPreferredName(), embeddingsWeights); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CustomWordEmbedding that = (CustomWordEmbedding) o; - return Objects.equals(fieldName, that.fieldName) - && Objects.equals(destField, that.destField) - && Arrays.deepEquals(embeddingsWeights, that.embeddingsWeights) - && Arrays.deepEquals(embeddingsQuantScales, that.embeddingsQuantScales); - } - - @Override - public int hashCode() { - return Objects.hash(fieldName, destField, Arrays.deepHashCode(embeddingsQuantScales), Arrays.deepHashCode(embeddingsWeights)); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncoding.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncoding.java deleted file mode 100644 index 81d3cfa05cb45..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncoding.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * PreProcessor for frequency encoding a set of categorical values for a given field. - */ -public class FrequencyEncoding implements PreProcessor { - - public static final String NAME = "frequency_encoding"; - public static final ParseField FIELD = new ParseField("field"); - public static final ParseField FEATURE_NAME = new ParseField("feature_name"); - public static final ParseField FREQUENCY_MAP = new ParseField("frequency_map"); - public static final ParseField CUSTOM = new ParseField("custom"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new FrequencyEncoding((String) a[0], (String) a[1], (Map) a[2], (Boolean) a[3]) - ); - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); - PARSER.declareString(ConstructingObjectParser.constructorArg(), FEATURE_NAME); - PARSER.declareObject( - ConstructingObjectParser.constructorArg(), - (p, c) -> p.map(HashMap::new, XContentParser::doubleValue), - FREQUENCY_MAP - ); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), CUSTOM); - } - - public static FrequencyEncoding fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final String field; - private final String featureName; - private final Map frequencyMap; - private final Boolean custom; - - FrequencyEncoding(String field, String featureName, Map frequencyMap, Boolean custom) { - this.field = Objects.requireNonNull(field); - this.featureName = Objects.requireNonNull(featureName); - this.frequencyMap = Collections.unmodifiableMap(Objects.requireNonNull(frequencyMap)); - this.custom = custom; - } - - /** - * @return Field name on which to frequency encode - */ - public String getField() { - return field; - } - - /** - * @return Map of Value: frequency for the frequency encoding - */ - public Map getFrequencyMap() { - return frequencyMap; - } - - /** - * @return The encoded feature name - */ - public String getFeatureName() { - return featureName; - } - - @Override - public String getName() { - return NAME; - } - - public Boolean getCustom() { - return custom; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(FIELD.getPreferredName(), field); - builder.field(FEATURE_NAME.getPreferredName(), featureName); - builder.field(FREQUENCY_MAP.getPreferredName(), frequencyMap); - if (custom != null) { - builder.field(CUSTOM.getPreferredName(), custom); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - FrequencyEncoding that = (FrequencyEncoding) o; - return Objects.equals(field, that.field) - && Objects.equals(featureName, that.featureName) - && Objects.equals(custom, that.custom) - && Objects.equals(frequencyMap, that.frequencyMap); - } - - @Override - public int hashCode() { - return Objects.hash(field, featureName, frequencyMap, custom); - } - - public Builder builder(String fieldName) { - return new Builder(fieldName); - } - - public static class Builder { - - private String field; - private String featureName; - private Map frequencyMap = new HashMap<>(); - private Boolean custom; - - public Builder(String field) { - this.field = field; - } - - public Builder setField(String field) { - this.field = field; - return this; - } - - public Builder setFeatureName(String featureName) { - this.featureName = featureName; - return this; - } - - public Builder setFrequencyMap(Map frequencyMap) { - this.frequencyMap = new HashMap<>(frequencyMap); - return this; - } - - public Builder addFrequency(String valueName, double frequency) { - this.frequencyMap.put(valueName, frequency); - return this; - } - - public Builder setCustom(boolean custom) { - this.custom = custom; - return this; - } - - public FrequencyEncoding build() { - return new FrequencyEncoding(field, featureName, frequencyMap, custom); - } - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/Multi.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/Multi.java deleted file mode 100644 index bf9f1aba2c057..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/Multi.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.client.ml.inference.NamedXContentObjectHelper; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * Multi-PreProcessor for chaining together multiple processors - */ -public class Multi implements PreProcessor { - - public static final String NAME = "multi_encoding"; - public static final ParseField PROCESSORS = new ParseField("processors"); - public static final ParseField CUSTOM = new ParseField("custom"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new Multi((List) a[0], (Boolean) a[1]) - ); - static { - PARSER.declareNamedObjects( - ConstructingObjectParser.constructorArg(), - (p, c, n) -> p.namedObject(PreProcessor.class, n, null), - (_unused) -> {/* Does not matter client side*/ }, - PROCESSORS - ); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), CUSTOM); - } - - public static Multi fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List processors; - private final Boolean custom; - - Multi(List processors, Boolean custom) { - this.processors = Objects.requireNonNull(processors, PROCESSORS.getPreferredName()); - this.custom = custom; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - NamedXContentObjectHelper.writeNamedObjects(builder, params, true, PROCESSORS.getPreferredName(), processors); - if (custom != null) { - builder.field(CUSTOM.getPreferredName(), custom); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Multi multi = (Multi) o; - return Objects.equals(multi.processors, processors) && Objects.equals(custom, multi.custom); - } - - @Override - public int hashCode() { - return Objects.hash(custom, processors); - } - - public static Builder builder(List processors) { - return new Builder(processors); - } - - public static class Builder { - private final List processors; - private Boolean custom; - - public Builder(List processors) { - this.processors = processors; - } - - public Builder setCustom(boolean custom) { - this.custom = custom; - return this; - } - - public Multi build() { - return new Multi(processors, custom); - } - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/NGram.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/NGram.java deleted file mode 100644 index bd831a6bf8d54..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/NGram.java +++ /dev/null @@ -1,224 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.function.IntFunction; -import java.util.stream.IntStream; - -/** - * PreProcessor for n-gram encoding a string - */ -public class NGram implements PreProcessor { - - public static final String NAME = "n_gram_encoding"; - public static final ParseField FIELD = new ParseField("field"); - public static final ParseField FEATURE_PREFIX = new ParseField("feature_prefix"); - public static final ParseField NGRAMS = new ParseField("n_grams"); - public static final ParseField START = new ParseField("start"); - public static final ParseField LENGTH = new ParseField("length"); - public static final ParseField CUSTOM = new ParseField("custom"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser( - NAME, - true, - a -> new NGram((String) a[0], (List) a[1], (Integer) a[2], (Integer) a[3], (Boolean) a[4], (String) a[5]) - ); - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); - PARSER.declareIntArray(ConstructingObjectParser.constructorArg(), NGRAMS); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), START); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), LENGTH); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), CUSTOM); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FEATURE_PREFIX); - } - - public static NGram fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final String field; - private final String featurePrefix; - private final List nGrams; - private final Integer start; - private final Integer length; - private final Boolean custom; - - NGram(String field, List nGrams, Integer start, Integer length, Boolean custom, String featurePrefix) { - this.field = field; - this.featurePrefix = featurePrefix; - this.nGrams = nGrams; - this.start = start; - this.length = length; - this.custom = custom; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (field != null) { - builder.field(FIELD.getPreferredName(), field); - } - if (featurePrefix != null) { - builder.field(FEATURE_PREFIX.getPreferredName(), featurePrefix); - } - if (nGrams != null) { - builder.field(NGRAMS.getPreferredName(), nGrams); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (length != null) { - builder.field(LENGTH.getPreferredName(), length); - } - if (custom != null) { - builder.field(CUSTOM.getPreferredName(), custom); - } - builder.endObject(); - return builder; - } - - public String getField() { - return field; - } - - public String getFeaturePrefix() { - return featurePrefix; - } - - public List getnGrams() { - return nGrams; - } - - public Integer getStart() { - return start; - } - - public Integer getLength() { - return length; - } - - public Boolean getCustom() { - return custom; - } - - public List outputFields() { - return allPossibleNGramOutputFeatureNames(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - NGram nGram = (NGram) o; - return Objects.equals(field, nGram.field) - && Objects.equals(featurePrefix, nGram.featurePrefix) - && Objects.equals(nGrams, nGram.nGrams) - && Objects.equals(start, nGram.start) - && Objects.equals(length, nGram.length) - && Objects.equals(custom, nGram.custom); - } - - @Override - public int hashCode() { - return Objects.hash(field, featurePrefix, start, length, custom, nGrams); - } - - private String nGramFeature(int nGram, int pos) { - return featurePrefix + "." + nGram + pos; - } - - private List allPossibleNGramOutputFeatureNames() { - int totalNgrams = 0; - for (int nGram : nGrams) { - totalNgrams += (length - (nGram - 1)); - } - if (totalNgrams <= 0) { - return Collections.emptyList(); - } - List ngramOutputs = new ArrayList<>(totalNgrams); - - for (int nGram : nGrams) { - IntFunction func = i -> nGramFeature(nGram, i); - IntStream.range(0, (length - (nGram - 1))).mapToObj(func).forEach(ngramOutputs::add); - } - return ngramOutputs; - } - - public static Builder builder(String field) { - return new Builder(field); - } - - public static class Builder { - - private String field; - private String featurePrefix; - private List nGrams; - private Integer start; - private Integer length; - private Boolean custom; - - public Builder(String field) { - this.field = field; - } - - public Builder setField(String field) { - this.field = field; - return this; - } - - public Builder setCustom(boolean custom) { - this.custom = custom; - return this; - } - - public Builder setFeaturePrefix(String featurePrefix) { - this.featurePrefix = featurePrefix; - return this; - } - - public Builder setnGrams(List nGrams) { - this.nGrams = nGrams; - return this; - } - - public Builder setStart(Integer start) { - this.start = start; - return this; - } - - public Builder setLength(Integer length) { - this.length = length; - return this; - } - - public Builder setCustom(Boolean custom) { - this.custom = custom; - return this; - } - - public NGram build() { - return new NGram(field, nGrams, start, length, custom, featurePrefix); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncoding.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncoding.java deleted file mode 100644 index 461c62fd54c0d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncoding.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * PreProcessor for one hot encoding a set of categorical values for a given field. - */ -public class OneHotEncoding implements PreProcessor { - - public static final String NAME = "one_hot_encoding"; - public static final ParseField FIELD = new ParseField("field"); - public static final ParseField HOT_MAP = new ParseField("hot_map"); - public static final ParseField CUSTOM = new ParseField("custom"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new OneHotEncoding((String) a[0], (Map) a[1], (Boolean) a[2]) - ); - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), HOT_MAP); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), CUSTOM); - } - - public static OneHotEncoding fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final String field; - private final Map hotMap; - private final Boolean custom; - - OneHotEncoding(String field, Map hotMap, Boolean custom) { - this.field = Objects.requireNonNull(field); - this.hotMap = Collections.unmodifiableMap(Objects.requireNonNull(hotMap)); - this.custom = custom; - } - - /** - * @return Field name on which to one hot encode - */ - public String getField() { - return field; - } - - /** - * @return Map of Value: ColumnName for the one hot encoding - */ - public Map getHotMap() { - return hotMap; - } - - @Override - public String getName() { - return NAME; - } - - public Boolean getCustom() { - return custom; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(FIELD.getPreferredName(), field); - builder.field(HOT_MAP.getPreferredName(), hotMap); - if (custom != null) { - builder.field(CUSTOM.getPreferredName(), custom); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - OneHotEncoding that = (OneHotEncoding) o; - return Objects.equals(field, that.field) && Objects.equals(hotMap, that.hotMap) && Objects.equals(custom, that.custom); - } - - @Override - public int hashCode() { - return Objects.hash(field, hotMap, custom); - } - - public static Builder builder(String field) { - return new Builder(field); - } - - public static class Builder { - - private String field; - private Map hotMap = new HashMap<>(); - private Boolean custom; - - public Builder(String field) { - this.field = field; - } - - public Builder setField(String field) { - this.field = field; - return this; - } - - public Builder setHotMap(Map hotMap) { - this.hotMap = new HashMap<>(hotMap); - return this; - } - - public Builder addOneHot(String valueName, String oneHotFeatureName) { - this.hotMap.put(valueName, oneHotFeatureName); - return this; - } - - public Builder setCustom(boolean custom) { - this.custom = custom; - return this; - } - - public OneHotEncoding build() { - return new OneHotEncoding(field, hotMap, custom); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncoding.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncoding.java deleted file mode 100644 index 713b2a67ba3de..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncoding.java +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * PreProcessor for target mean encoding a set of categorical values for a given field. - */ -public class TargetMeanEncoding implements PreProcessor { - - public static final String NAME = "target_mean_encoding"; - public static final ParseField FIELD = new ParseField("field"); - public static final ParseField FEATURE_NAME = new ParseField("feature_name"); - public static final ParseField TARGET_MAP = new ParseField("target_map"); - public static final ParseField DEFAULT_VALUE = new ParseField("default_value"); - public static final ParseField CUSTOM = new ParseField("custom"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new TargetMeanEncoding((String) a[0], (String) a[1], (Map) a[2], (Double) a[3], (Boolean) a[4]) - ); - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); - PARSER.declareString(ConstructingObjectParser.constructorArg(), FEATURE_NAME); - PARSER.declareObject( - ConstructingObjectParser.constructorArg(), - (p, c) -> p.map(HashMap::new, XContentParser::doubleValue), - TARGET_MAP - ); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), DEFAULT_VALUE); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), CUSTOM); - } - - public static TargetMeanEncoding fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final String field; - private final String featureName; - private final Map meanMap; - private final double defaultValue; - private final Boolean custom; - - TargetMeanEncoding(String field, String featureName, Map meanMap, Double defaultValue, Boolean custom) { - this.field = Objects.requireNonNull(field); - this.featureName = Objects.requireNonNull(featureName); - this.meanMap = Collections.unmodifiableMap(Objects.requireNonNull(meanMap)); - this.defaultValue = Objects.requireNonNull(defaultValue); - this.custom = custom; - } - - /** - * @return Field name on which to target mean encode - */ - public String getField() { - return field; - } - - /** - * @return Map of Value: targetMean for the target mean encoding - */ - public Map getMeanMap() { - return meanMap; - } - - /** - * @return The default value to set when a previously unobserved value is seen - */ - public double getDefaultValue() { - return defaultValue; - } - - /** - * @return The feature name for the encoded value - */ - public String getFeatureName() { - return featureName; - } - - public Boolean getCustom() { - return custom; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(FIELD.getPreferredName(), field); - builder.field(FEATURE_NAME.getPreferredName(), featureName); - builder.field(TARGET_MAP.getPreferredName(), meanMap); - builder.field(DEFAULT_VALUE.getPreferredName(), defaultValue); - if (custom != null) { - builder.field(CUSTOM.getPreferredName(), custom); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TargetMeanEncoding that = (TargetMeanEncoding) o; - return Objects.equals(field, that.field) - && Objects.equals(featureName, that.featureName) - && Objects.equals(meanMap, that.meanMap) - && Objects.equals(defaultValue, that.defaultValue) - && Objects.equals(custom, that.custom); - } - - @Override - public int hashCode() { - return Objects.hash(field, featureName, meanMap, defaultValue, custom); - } - - public Builder builder(String fieldName) { - return new Builder(fieldName); - } - - public static class Builder { - - private String field; - private String featureName; - private Map meanMap = new HashMap<>(); - private double defaultValue; - private Boolean custom; - - public Builder(String field) { - this.field = field; - } - - public String getField() { - return field; - } - - public Builder setField(String field) { - this.field = field; - return this; - } - - public Builder setFeatureName(String featureName) { - this.featureName = featureName; - return this; - } - - public Builder setMeanMap(Map meanMap) { - this.meanMap = meanMap; - return this; - } - - public Builder addMeanMapEntry(String valueName, double meanEncoding) { - this.meanMap.put(valueName, meanEncoding); - return this; - } - - public Builder setDefaultValue(double defaultValue) { - this.defaultValue = defaultValue; - return this; - } - - public Builder setCustom(boolean custom) { - this.custom = custom; - return this; - } - - public TargetMeanEncoding build() { - return new TargetMeanEncoding(field, featureName, meanMap, defaultValue, custom); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/FeatureImportance.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/FeatureImportance.java deleted file mode 100644 index d68dfc88488a7..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/FeatureImportance.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference.results; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParseException; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class FeatureImportance implements ToXContentObject { - - public static final String IMPORTANCE = "importance"; - public static final String FEATURE_NAME = "feature_name"; - public static final String CLASSES = "classes"; - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "feature_importance", - true, - a -> new FeatureImportance((String) a[0], (Double) a[1], (List) a[2]) - ); - - static { - PARSER.declareString(constructorArg(), new ParseField(FeatureImportance.FEATURE_NAME)); - PARSER.declareDouble(optionalConstructorArg(), new ParseField(FeatureImportance.IMPORTANCE)); - PARSER.declareObjectArray( - optionalConstructorArg(), - (p, c) -> ClassImportance.fromXContent(p), - new ParseField(FeatureImportance.CLASSES) - ); - } - - public static FeatureImportance fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List classImportance; - private final Double importance; - private final String featureName; - - public FeatureImportance(String featureName, Double importance, List classImportance) { - this.featureName = Objects.requireNonNull(featureName); - this.importance = importance; - this.classImportance = classImportance == null ? null : Collections.unmodifiableList(classImportance); - } - - public List getClassImportance() { - return classImportance; - } - - public Double getImportance() { - return importance; - } - - public String getFeatureName() { - return featureName; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(FEATURE_NAME, featureName); - if (importance != null) { - builder.field(IMPORTANCE, importance); - } - if (classImportance != null && classImportance.isEmpty() == false) { - builder.field(CLASSES, classImportance); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object object) { - if (object == this) { - return true; - } - if (object == null || getClass() != object.getClass()) { - return false; - } - FeatureImportance that = (FeatureImportance) object; - return Objects.equals(featureName, that.featureName) - && Objects.equals(importance, that.importance) - && Objects.equals(classImportance, that.classImportance); - } - - @Override - public int hashCode() { - return Objects.hash(featureName, importance, classImportance); - } - - public static class ClassImportance implements ToXContentObject { - - static final String CLASS_NAME = "class_name"; - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "feature_importance_class_importance", - true, - a -> new ClassImportance(a[0], (Double) a[1]) - ); - - static { - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return p.text(); - } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return p.numberValue(); - } else if (p.currentToken() == XContentParser.Token.VALUE_BOOLEAN) { - return p.booleanValue(); - } - throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); - }, new ParseField(CLASS_NAME), ObjectParser.ValueType.VALUE); - PARSER.declareDouble(constructorArg(), new ParseField(FeatureImportance.IMPORTANCE)); - } - - public static ClassImportance fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final Object className; - private final double importance; - - public ClassImportance(Object className, double importance) { - this.className = className; - this.importance = importance; - } - - public Object getClassName() { - return className; - } - - public double getImportance() { - return importance; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CLASS_NAME, className); - builder.field(IMPORTANCE, importance); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ClassImportance that = (ClassImportance) o; - return Double.compare(that.importance, importance) == 0 && Objects.equals(className, that.className); - } - - @Override - public int hashCode() { - return Objects.hash(className, importance); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/TopClassEntry.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/TopClassEntry.java deleted file mode 100644 index 889677f6dd99f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/TopClassEntry.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference.results; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParseException; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class TopClassEntry implements ToXContentObject { - - public static final ParseField CLASS_NAME = new ParseField("class_name"); - public static final ParseField CLASS_PROBABILITY = new ParseField("class_probability"); - public static final ParseField CLASS_SCORE = new ParseField("class_score"); - - public static final String NAME = "top_class"; - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new TopClassEntry(a[0], (Double) a[1], (Double) a[2]) - ); - - static { - PARSER.declareField(constructorArg(), (p, n) -> { - Object o; - XContentParser.Token token = p.currentToken(); - if (token == XContentParser.Token.VALUE_STRING) { - o = p.text(); - } else if (token == XContentParser.Token.VALUE_BOOLEAN) { - o = p.booleanValue(); - } else if (token == XContentParser.Token.VALUE_NUMBER) { - o = p.doubleValue(); - } else { - throw new XContentParseException( - p.getTokenLocation(), - "[" + NAME + "] failed to parse field [" + CLASS_NAME + "] value [" + token + "] is not a string, boolean or number" - ); - } - return o; - }, CLASS_NAME, ObjectParser.ValueType.VALUE); - PARSER.declareDouble(constructorArg(), CLASS_PROBABILITY); - PARSER.declareDouble(constructorArg(), CLASS_SCORE); - } - - public static TopClassEntry fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final Object classification; - private final double probability; - private final double score; - - public TopClassEntry(Object classification, double probability, double score) { - this.classification = Objects.requireNonNull(classification); - this.probability = probability; - this.score = score; - } - - public Object getClassification() { - return classification; - } - - public double getProbability() { - return probability; - } - - public double getScore() { - return score; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(CLASS_NAME.getPreferredName(), classification); - builder.field(CLASS_PROBABILITY.getPreferredName(), probability); - builder.field(CLASS_SCORE.getPreferredName(), score); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object object) { - if (object == this) { - return true; - } - if (object == null || getClass() != object.getClass()) { - return false; - } - TopClassEntry that = (TopClassEntry) object; - return Objects.equals(classification, that.classification) && probability == that.probability && score == that.score; - } - - @Override - public int hashCode() { - return Objects.hash(classification, probability, score); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ClassificationConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ClassificationConfig.java deleted file mode 100644 index 1e63677bfd70b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ClassificationConfig.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class ClassificationConfig implements InferenceConfig { - - public static final ParseField NAME = new ParseField("classification"); - - public static final ParseField RESULTS_FIELD = new ParseField("results_field"); - public static final ParseField NUM_TOP_CLASSES = new ParseField("num_top_classes"); - public static final ParseField TOP_CLASSES_RESULTS_FIELD = new ParseField("top_classes_results_field"); - public static final ParseField NUM_TOP_FEATURE_IMPORTANCE_VALUES = new ParseField("num_top_feature_importance_values"); - - private final Integer numTopClasses; - private final String topClassesResultsField; - private final String resultsField; - private final Integer numTopFeatureImportanceValues; - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - args -> new ClassificationConfig((Integer) args[0], (String) args[1], (String) args[2], (Integer) args[3]) - ); - - static { - PARSER.declareInt(optionalConstructorArg(), NUM_TOP_CLASSES); - PARSER.declareString(optionalConstructorArg(), RESULTS_FIELD); - PARSER.declareString(optionalConstructorArg(), TOP_CLASSES_RESULTS_FIELD); - PARSER.declareInt(optionalConstructorArg(), NUM_TOP_FEATURE_IMPORTANCE_VALUES); - } - - public static ClassificationConfig fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public ClassificationConfig() { - this(null, null, null, null); - } - - public ClassificationConfig(Integer numTopClasses, String resultsField, String topClassesResultsField, Integer featureImportance) { - this.numTopClasses = numTopClasses; - this.topClassesResultsField = topClassesResultsField; - this.resultsField = resultsField; - this.numTopFeatureImportanceValues = featureImportance; - } - - public Integer getNumTopClasses() { - return numTopClasses; - } - - public String getTopClassesResultsField() { - return topClassesResultsField; - } - - public String getResultsField() { - return resultsField; - } - - public Integer getNumTopFeatureImportanceValues() { - return numTopFeatureImportanceValues; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ClassificationConfig that = (ClassificationConfig) o; - return Objects.equals(numTopClasses, that.numTopClasses) - && Objects.equals(topClassesResultsField, that.topClassesResultsField) - && Objects.equals(resultsField, that.resultsField) - && Objects.equals(numTopFeatureImportanceValues, that.numTopFeatureImportanceValues); - } - - @Override - public int hashCode() { - return Objects.hash(numTopClasses, topClassesResultsField, resultsField, numTopFeatureImportanceValues); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (numTopClasses != null) { - builder.field(NUM_TOP_CLASSES.getPreferredName(), numTopClasses); - } - if (topClassesResultsField != null) { - builder.field(TOP_CLASSES_RESULTS_FIELD.getPreferredName(), topClassesResultsField); - } - if (resultsField != null) { - builder.field(RESULTS_FIELD.getPreferredName(), resultsField); - } - if (numTopFeatureImportanceValues != null) { - builder.field(NUM_TOP_FEATURE_IMPORTANCE_VALUES.getPreferredName(), numTopFeatureImportanceValues); - } - builder.endObject(); - return builder; - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/IndexLocation.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/IndexLocation.java deleted file mode 100644 index 44bcac9e67554..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/IndexLocation.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -public class IndexLocation implements TrainedModelLocation { - - public static final String INDEX = "index"; - private static final ParseField NAME = new ParseField("name"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - INDEX, - true, - a -> new IndexLocation((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME); - } - - public static IndexLocation fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final String index; - - public IndexLocation(String index) { - this.index = Objects.requireNonNull(index); - } - - public String getIndex() { - return index; - } - - @Override - public String getName() { - return INDEX; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(NAME.getPreferredName(), index); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - IndexLocation that = (IndexLocation) o; - return Objects.equals(index, that.index); - } - - @Override - public int hashCode() { - return Objects.hash(index); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceConfig.java deleted file mode 100644 index 1b444cc14cbe2..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceConfig.java +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.client.ml.inference.NamedXContentObject; - -public interface InferenceConfig extends NamedXContentObject { - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceStats.java deleted file mode 100644 index d668f7a2aa6fc..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceStats.java +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.time.Instant; -import java.util.Objects; - -public class InferenceStats implements ToXContentObject { - - public static final String NAME = "inference_stats"; - public static final ParseField MISSING_ALL_FIELDS_COUNT = new ParseField("missing_all_fields_count"); - public static final ParseField INFERENCE_COUNT = new ParseField("inference_count"); - public static final ParseField CACHE_MISS_COUNT = new ParseField("cache_miss_count"); - public static final ParseField FAILURE_COUNT = new ParseField("failure_count"); - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new InferenceStats((Long) a[0], (Long) a[1], (Long) a[2], (Long) a[3], (Instant) a[4]) - ); - static { - PARSER.declareLong(ConstructingObjectParser.constructorArg(), MISSING_ALL_FIELDS_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), INFERENCE_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), FAILURE_COUNT); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), CACHE_MISS_COUNT); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> TimeUtil.parseTimeFieldToInstant(p, TIMESTAMP.getPreferredName()), - TIMESTAMP, - ObjectParser.ValueType.VALUE - ); - } - - private final long missingAllFieldsCount; - private final long inferenceCount; - private final long failureCount; - private final long cacheMissCount; - private final Instant timeStamp; - - private InferenceStats(Long missingAllFieldsCount, Long inferenceCount, Long failureCount, Long cacheMissCount, Instant instant) { - this( - unboxOrZero(missingAllFieldsCount), - unboxOrZero(inferenceCount), - unboxOrZero(failureCount), - unboxOrZero(cacheMissCount), - instant - ); - } - - public InferenceStats(long missingAllFieldsCount, long inferenceCount, long failureCount, long cacheMissCount, Instant timeStamp) { - this.missingAllFieldsCount = missingAllFieldsCount; - this.inferenceCount = inferenceCount; - this.failureCount = failureCount; - this.cacheMissCount = cacheMissCount; - this.timeStamp = timeStamp == null - ? Instant.ofEpochMilli(Instant.now().toEpochMilli()) - : Instant.ofEpochMilli(timeStamp.toEpochMilli()); - } - - /** - * How many times this model attempted to infer with all its fields missing - */ - public long getMissingAllFieldsCount() { - return missingAllFieldsCount; - } - - /** - * How many inference calls were made against this model - */ - public long getInferenceCount() { - return inferenceCount; - } - - /** - * How many inference failures occurred. - */ - public long getFailureCount() { - return failureCount; - } - - /** - * How many cache misses occurred when inferring this model - */ - public long getCacheMissCount() { - return cacheMissCount; - } - - /** - * The timestamp of these statistics. - */ - public Instant getTimeStamp() { - return timeStamp; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(FAILURE_COUNT.getPreferredName(), failureCount); - builder.field(INFERENCE_COUNT.getPreferredName(), inferenceCount); - builder.field(CACHE_MISS_COUNT.getPreferredName(), cacheMissCount); - builder.field(MISSING_ALL_FIELDS_COUNT.getPreferredName(), missingAllFieldsCount); - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timeStamp.toEpochMilli()); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - InferenceStats that = (InferenceStats) o; - return missingAllFieldsCount == that.missingAllFieldsCount - && inferenceCount == that.inferenceCount - && failureCount == that.failureCount - && cacheMissCount == that.cacheMissCount - && Objects.equals(timeStamp, that.timeStamp); - } - - @Override - public int hashCode() { - return Objects.hash(missingAllFieldsCount, inferenceCount, failureCount, cacheMissCount, timeStamp); - } - - @Override - public String toString() { - return "InferenceStats{" - + "missingAllFieldsCount=" - + missingAllFieldsCount - + ", inferenceCount=" - + inferenceCount - + ", failureCount=" - + failureCount - + ", cacheMissCount=" - + cacheMissCount - + ", timeStamp=" - + timeStamp - + '}'; - } - - private static long unboxOrZero(@Nullable Long value) { - return value == null ? 0L : value; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/RegressionConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/RegressionConfig.java deleted file mode 100644 index 496cceb4e5a17..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/RegressionConfig.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class RegressionConfig implements InferenceConfig { - - public static final ParseField NAME = new ParseField("regression"); - public static final ParseField RESULTS_FIELD = new ParseField("results_field"); - public static final ParseField NUM_TOP_FEATURE_IMPORTANCE_VALUES = new ParseField("num_top_feature_importance_values"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - args -> new RegressionConfig((String) args[0], (Integer) args[1]) - ); - - static { - PARSER.declareString(optionalConstructorArg(), RESULTS_FIELD); - PARSER.declareInt(optionalConstructorArg(), NUM_TOP_FEATURE_IMPORTANCE_VALUES); - } - - public static RegressionConfig fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final String resultsField; - private final Integer numTopFeatureImportanceValues; - - public RegressionConfig() { - this(null, null); - } - - public RegressionConfig(String resultsField, Integer numTopFeatureImportanceValues) { - this.resultsField = resultsField; - this.numTopFeatureImportanceValues = numTopFeatureImportanceValues; - } - - public Integer getNumTopFeatureImportanceValues() { - return numTopFeatureImportanceValues; - } - - public String getResultsField() { - return resultsField; - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (resultsField != null) { - builder.field(RESULTS_FIELD.getPreferredName(), resultsField); - } - if (numTopFeatureImportanceValues != null) { - builder.field(NUM_TOP_FEATURE_IMPORTANCE_VALUES.getPreferredName(), numTopFeatureImportanceValues); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - RegressionConfig that = (RegressionConfig) o; - return Objects.equals(this.resultsField, that.resultsField) - && Objects.equals(this.numTopFeatureImportanceValues, that.numTopFeatureImportanceValues); - } - - @Override - public int hashCode() { - return Objects.hash(resultsField, numTopFeatureImportanceValues); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TargetType.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TargetType.java deleted file mode 100644 index 4f5ce1aecadcc..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TargetType.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.xcontent.ParseField; - -import java.util.Locale; - -public enum TargetType { - - REGRESSION, - CLASSIFICATION; - - public static final ParseField TARGET_TYPE = new ParseField("target_type"); - - public static TargetType fromString(String name) { - return valueOf(name.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModel.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModel.java deleted file mode 100644 index 76d5538708f52..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModel.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.client.ml.inference.NamedXContentObject; - -import java.util.List; - -public interface TrainedModel extends NamedXContentObject { - - /** - * @return List of featureNames expected by the model. In the order that they are expected - */ - List getFeatureNames(); - - /** - * @return The name of the model - */ - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModelLocation.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModelLocation.java deleted file mode 100644 index cb86b0d121c1f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModelLocation.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.client.ml.inference.NamedXContentObject; - -public interface TrainedModelLocation extends NamedXContentObject {} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Ensemble.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Ensemble.java deleted file mode 100644 index c19e50b46c824..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Ensemble.java +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.ensemble; - -import org.elasticsearch.client.ml.inference.NamedXContentObjectHelper; -import org.elasticsearch.client.ml.inference.trainedmodel.TargetType; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -public class Ensemble implements TrainedModel { - - public static final String NAME = "ensemble"; - public static final ParseField FEATURE_NAMES = new ParseField("feature_names"); - public static final ParseField TRAINED_MODELS = new ParseField("trained_models"); - public static final ParseField AGGREGATE_OUTPUT = new ParseField("aggregate_output"); - public static final ParseField CLASSIFICATION_LABELS = new ParseField("classification_labels"); - public static final ParseField CLASSIFICATION_WEIGHTS = new ParseField("classification_weights"); - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, Ensemble.Builder::new); - - static { - PARSER.declareStringArray(Ensemble.Builder::setFeatureNames, FEATURE_NAMES); - PARSER.declareNamedObjects( - Ensemble.Builder::setTrainedModels, - (p, c, n) -> p.namedObject(TrainedModel.class, n, null), - (ensembleBuilder) -> { /* Noop does not matter client side */ }, - TRAINED_MODELS - ); - PARSER.declareNamedObject( - Ensemble.Builder::setOutputAggregator, - (p, c, n) -> p.namedObject(OutputAggregator.class, n, null), - AGGREGATE_OUTPUT - ); - PARSER.declareString(Ensemble.Builder::setTargetType, TargetType.TARGET_TYPE); - PARSER.declareStringArray(Ensemble.Builder::setClassificationLabels, CLASSIFICATION_LABELS); - PARSER.declareDoubleArray(Ensemble.Builder::setClassificationWeights, CLASSIFICATION_WEIGHTS); - } - - public static Ensemble fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - private final List featureNames; - private final List models; - private final OutputAggregator outputAggregator; - private final TargetType targetType; - private final List classificationLabels; - private final double[] classificationWeights; - - Ensemble( - List featureNames, - List models, - @Nullable OutputAggregator outputAggregator, - TargetType targetType, - @Nullable List classificationLabels, - @Nullable double[] classificationWeights - ) { - this.featureNames = featureNames; - this.models = models; - this.outputAggregator = outputAggregator; - this.targetType = targetType; - this.classificationLabels = classificationLabels; - this.classificationWeights = classificationWeights; - } - - @Override - public List getFeatureNames() { - return featureNames; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (featureNames != null && featureNames.isEmpty() == false) { - builder.field(FEATURE_NAMES.getPreferredName(), featureNames); - } - if (models != null) { - NamedXContentObjectHelper.writeNamedObjects(builder, params, true, TRAINED_MODELS.getPreferredName(), models); - } - if (outputAggregator != null) { - NamedXContentObjectHelper.writeNamedObjects( - builder, - params, - false, - AGGREGATE_OUTPUT.getPreferredName(), - Collections.singletonList(outputAggregator) - ); - } - if (targetType != null) { - builder.field(TargetType.TARGET_TYPE.getPreferredName(), targetType); - } - if (classificationLabels != null) { - builder.field(CLASSIFICATION_LABELS.getPreferredName(), classificationLabels); - } - if (classificationWeights != null) { - builder.field(CLASSIFICATION_WEIGHTS.getPreferredName(), classificationWeights); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Ensemble that = (Ensemble) o; - return Objects.equals(featureNames, that.featureNames) - && Objects.equals(models, that.models) - && Objects.equals(targetType, that.targetType) - && Objects.equals(classificationLabels, that.classificationLabels) - && Arrays.equals(classificationWeights, that.classificationWeights) - && Objects.equals(outputAggregator, that.outputAggregator); - } - - @Override - public int hashCode() { - return Objects.hash( - featureNames, - models, - outputAggregator, - classificationLabels, - targetType, - Arrays.hashCode(classificationWeights) - ); - } - - public static Builder builder() { - return new Builder(); - } - - public static class Builder { - private List featureNames = Collections.emptyList(); - private List trainedModels; - private OutputAggregator outputAggregator; - private TargetType targetType; - private List classificationLabels; - private double[] classificationWeights; - - public Builder setFeatureNames(List featureNames) { - this.featureNames = featureNames; - return this; - } - - public Builder setTrainedModels(List trainedModels) { - this.trainedModels = trainedModels; - return this; - } - - public Builder setOutputAggregator(OutputAggregator outputAggregator) { - this.outputAggregator = outputAggregator; - return this; - } - - public Builder setTargetType(TargetType targetType) { - this.targetType = targetType; - return this; - } - - public Builder setClassificationLabels(List classificationLabels) { - this.classificationLabels = classificationLabels; - return this; - } - - public Builder setClassificationWeights(List classificationWeights) { - this.classificationWeights = classificationWeights.stream().mapToDouble(Double::doubleValue).toArray(); - return this; - } - - private void setTargetType(String targetType) { - this.targetType = TargetType.fromString(targetType); - } - - public Ensemble build() { - return new Ensemble(featureNames, trainedModels, outputAggregator, targetType, classificationLabels, classificationWeights); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Exponent.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Exponent.java deleted file mode 100644 index 22fc6f7ef3f55..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Exponent.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.ensemble; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class Exponent implements OutputAggregator { - - public static final String NAME = "exponent"; - public static final ParseField WEIGHTS = new ParseField("weights"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new Exponent((List) a[0]) - ); - static { - PARSER.declareDoubleArray(ConstructingObjectParser.optionalConstructorArg(), WEIGHTS); - } - - public static Exponent fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List weights; - - public Exponent(List weights) { - this.weights = weights; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (weights != null) { - builder.field(WEIGHTS.getPreferredName(), weights); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Exponent that = (Exponent) o; - return Objects.equals(weights, that.weights); - } - - @Override - public int hashCode() { - return Objects.hash(weights); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/LogisticRegression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/LogisticRegression.java deleted file mode 100644 index 19693a728d2ee..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/LogisticRegression.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.ensemble; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class LogisticRegression implements OutputAggregator { - - public static final String NAME = "logistic_regression"; - public static final ParseField WEIGHTS = new ParseField("weights"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new LogisticRegression((List) a[0]) - ); - static { - PARSER.declareDoubleArray(ConstructingObjectParser.optionalConstructorArg(), WEIGHTS); - } - - public static LogisticRegression fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List weights; - - public LogisticRegression(List weights) { - this.weights = weights; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (weights != null) { - builder.field(WEIGHTS.getPreferredName(), weights); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - LogisticRegression that = (LogisticRegression) o; - return Objects.equals(weights, that.weights); - } - - @Override - public int hashCode() { - return Objects.hash(weights); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/OutputAggregator.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/OutputAggregator.java deleted file mode 100644 index 8a0f50d3a8bb6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/OutputAggregator.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.ensemble; - -import org.elasticsearch.client.ml.inference.NamedXContentObject; - -public interface OutputAggregator extends NamedXContentObject { - /** - * @return The name of the output aggregator - */ - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedMode.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedMode.java deleted file mode 100644 index 422dfb0a21fc1..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedMode.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.ensemble; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class WeightedMode implements OutputAggregator { - - public static final String NAME = "weighted_mode"; - public static final ParseField WEIGHTS = new ParseField("weights"); - public static final ParseField NUM_CLASSES = new ParseField("num_classes"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new WeightedMode((Integer) a[0], (List) a[1]) - ); - static { - PARSER.declareInt(ConstructingObjectParser.constructorArg(), NUM_CLASSES); - PARSER.declareDoubleArray(ConstructingObjectParser.optionalConstructorArg(), WEIGHTS); - } - - public static WeightedMode fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List weights; - private final int numClasses; - - public WeightedMode(int numClasses, List weights) { - this.weights = weights; - this.numClasses = numClasses; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (weights != null) { - builder.field(WEIGHTS.getPreferredName(), weights); - } - builder.field(NUM_CLASSES.getPreferredName(), numClasses); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - WeightedMode that = (WeightedMode) o; - return Objects.equals(weights, that.weights) && numClasses == that.numClasses; - } - - @Override - public int hashCode() { - return Objects.hash(weights, numClasses); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedSum.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedSum.java deleted file mode 100644 index a36c13b138f78..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedSum.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.ensemble; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class WeightedSum implements OutputAggregator { - - public static final String NAME = "weighted_sum"; - public static final ParseField WEIGHTS = new ParseField("weights"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new WeightedSum((List) a[0]) - ); - - static { - PARSER.declareDoubleArray(ConstructingObjectParser.optionalConstructorArg(), WEIGHTS); - } - - public static WeightedSum fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List weights; - - public WeightedSum(List weights) { - this.weights = weights; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (weights != null) { - builder.field(WEIGHTS.getPreferredName(), weights); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - WeightedSum that = (WeightedSum) o; - return Objects.equals(weights, that.weights); - } - - @Override - public int hashCode() { - return Objects.hash(weights); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangIdentNeuralNetwork.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangIdentNeuralNetwork.java deleted file mode 100644 index 89f5625331cd7..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangIdentNeuralNetwork.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.langident; - -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Shallow, fully connected, feed forward NN modeled after and ported from https://github.com/google/cld3 - */ -public class LangIdentNeuralNetwork implements TrainedModel { - - public static final String NAME = "lang_ident_neural_network"; - public static final ParseField EMBEDDED_VECTOR_FEATURE_NAME = new ParseField("embedded_vector_feature_name"); - public static final ParseField HIDDEN_LAYER = new ParseField("hidden_layer"); - public static final ParseField SOFTMAX_LAYER = new ParseField("softmax_layer"); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new LangIdentNeuralNetwork((String) a[0], (LangNetLayer) a[1], (LangNetLayer) a[2]) - ); - - static { - PARSER.declareString(constructorArg(), EMBEDDED_VECTOR_FEATURE_NAME); - PARSER.declareObject(constructorArg(), LangNetLayer.PARSER::apply, HIDDEN_LAYER); - PARSER.declareObject(constructorArg(), LangNetLayer.PARSER::apply, SOFTMAX_LAYER); - } - - public static LangIdentNeuralNetwork fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final LangNetLayer hiddenLayer; - private final LangNetLayer softmaxLayer; - private final String embeddedVectorFeatureName; - - LangIdentNeuralNetwork(String embeddedVectorFeatureName, LangNetLayer hiddenLayer, LangNetLayer softmaxLayer) { - this.embeddedVectorFeatureName = embeddedVectorFeatureName; - this.hiddenLayer = hiddenLayer; - this.softmaxLayer = softmaxLayer; - } - - @Override - public List getFeatureNames() { - return Collections.singletonList(embeddedVectorFeatureName); - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(EMBEDDED_VECTOR_FEATURE_NAME.getPreferredName(), embeddedVectorFeatureName); - builder.field(HIDDEN_LAYER.getPreferredName(), hiddenLayer); - builder.field(SOFTMAX_LAYER.getPreferredName(), softmaxLayer); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - LangIdentNeuralNetwork that = (LangIdentNeuralNetwork) o; - return Objects.equals(embeddedVectorFeatureName, that.embeddedVectorFeatureName) - && Objects.equals(hiddenLayer, that.hiddenLayer) - && Objects.equals(softmaxLayer, that.softmaxLayer); - } - - @Override - public int hashCode() { - return Objects.hash(embeddedVectorFeatureName, hiddenLayer, softmaxLayer); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangNetLayer.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangNetLayer.java deleted file mode 100644 index 9737a577725f7..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangNetLayer.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.langident; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Represents a single layer in the compressed Lang Net - */ -public class LangNetLayer implements ToXContentObject { - - public static final ParseField NAME = new ParseField("lang_net_layer"); - - private static final ParseField NUM_ROWS = new ParseField("num_rows"); - private static final ParseField NUM_COLS = new ParseField("num_cols"); - private static final ParseField WEIGHTS = new ParseField("weights"); - private static final ParseField BIAS = new ParseField("bias"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - a -> new LangNetLayer((List) a[0], (int) a[1], (int) a[2], (List) a[3]) - ); - - static { - PARSER.declareDoubleArray(constructorArg(), WEIGHTS); - PARSER.declareInt(constructorArg(), NUM_COLS); - PARSER.declareInt(constructorArg(), NUM_ROWS); - PARSER.declareDoubleArray(constructorArg(), BIAS); - } - - private final double[] weights; - private final int weightRows; - private final int weightCols; - private final double[] bias; - - private LangNetLayer(List weights, int numCols, int numRows, List bias) { - this( - weights.stream().mapToDouble(Double::doubleValue).toArray(), - numCols, - numRows, - bias.stream().mapToDouble(Double::doubleValue).toArray() - ); - } - - LangNetLayer(double[] weights, int numCols, int numRows, double[] bias) { - this.weights = weights; - this.weightCols = numCols; - this.weightRows = numRows; - this.bias = bias; - } - - double[] getWeights() { - return weights; - } - - int getWeightRows() { - return weightRows; - } - - int getWeightCols() { - return weightCols; - } - - double[] getBias() { - return bias; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(NUM_COLS.getPreferredName(), weightCols); - builder.field(NUM_ROWS.getPreferredName(), weightRows); - builder.field(WEIGHTS.getPreferredName(), weights); - builder.field(BIAS.getPreferredName(), bias); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - LangNetLayer that = (LangNetLayer) o; - return Arrays.equals(weights, that.weights) - && Arrays.equals(bias, that.bias) - && Objects.equals(weightCols, that.weightCols) - && Objects.equals(weightRows, that.weightRows); - } - - @Override - public int hashCode() { - return Objects.hash(Arrays.hashCode(weights), Arrays.hashCode(bias), weightCols, weightRows); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/Tree.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/Tree.java deleted file mode 100644 index 7d0b633693e7d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/Tree.java +++ /dev/null @@ -1,231 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.tree; - -import org.elasticsearch.client.ml.inference.trainedmodel.TargetType; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -public class Tree implements TrainedModel { - - public static final String NAME = "tree"; - - public static final ParseField FEATURE_NAMES = new ParseField("feature_names"); - public static final ParseField TREE_STRUCTURE = new ParseField("tree_structure"); - public static final ParseField CLASSIFICATION_LABELS = new ParseField("classification_labels"); - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, Builder::new); - - static { - PARSER.declareStringArray(Builder::setFeatureNames, FEATURE_NAMES); - PARSER.declareObjectArray(Builder::setNodes, (p, c) -> TreeNode.fromXContent(p), TREE_STRUCTURE); - PARSER.declareString(Builder::setTargetType, TargetType.TARGET_TYPE); - PARSER.declareStringArray(Builder::setClassificationLabels, CLASSIFICATION_LABELS); - } - - public static Tree fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - private final List featureNames; - private final List nodes; - private final TargetType targetType; - private final List classificationLabels; - - Tree(List featureNames, List nodes, TargetType targetType, List classificationLabels) { - this.featureNames = featureNames; - this.nodes = nodes; - this.targetType = targetType; - this.classificationLabels = classificationLabels; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public List getFeatureNames() { - return featureNames; - } - - public List getNodes() { - return nodes; - } - - @Nullable - public List getClassificationLabels() { - return classificationLabels; - } - - public TargetType getTargetType() { - return targetType; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (featureNames != null) { - builder.field(FEATURE_NAMES.getPreferredName(), featureNames); - } - if (nodes != null) { - builder.field(TREE_STRUCTURE.getPreferredName(), nodes); - } - if (classificationLabels != null) { - builder.field(CLASSIFICATION_LABELS.getPreferredName(), classificationLabels); - } - if (targetType != null) { - builder.field(TargetType.TARGET_TYPE.getPreferredName(), targetType.toString()); - } - builder.endObject(); - return builder; - } - - @Override - public String toString() { - return Strings.toString(this); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Tree that = (Tree) o; - return Objects.equals(featureNames, that.featureNames) - && Objects.equals(classificationLabels, that.classificationLabels) - && Objects.equals(targetType, that.targetType) - && Objects.equals(nodes, that.nodes); - } - - @Override - public int hashCode() { - return Objects.hash(featureNames, nodes, targetType, classificationLabels); - } - - public static Builder builder() { - return new Builder(); - } - - public static class Builder { - private List featureNames; - private ArrayList nodes; - private int numNodes; - private TargetType targetType; - private List classificationLabels; - - public Builder() { - nodes = new ArrayList<>(); - // allocate space in the root node and set to a leaf - nodes.add(null); - addLeaf(0, 0.0); - numNodes = 1; - } - - public Builder setFeatureNames(List featureNames) { - this.featureNames = featureNames; - return this; - } - - public Builder addNode(TreeNode.Builder node) { - nodes.add(node); - return this; - } - - public Builder setNodes(List nodes) { - this.nodes = new ArrayList<>(nodes); - return this; - } - - public Builder setNodes(TreeNode.Builder... nodes) { - return setNodes(Arrays.asList(nodes)); - } - - public Builder setTargetType(TargetType targetType) { - this.targetType = targetType; - return this; - } - - public Builder setClassificationLabels(List classificationLabels) { - this.classificationLabels = classificationLabels; - return this; - } - - private void setTargetType(String targetType) { - this.targetType = TargetType.fromString(targetType); - } - - /** - * Add a decision node. Space for the child nodes is allocated - * @param nodeIndex Where to place the node. This is either 0 (root) or an existing child node index - * @param featureIndex The feature index the decision is made on - * @param isDefaultLeft Default left branch if the feature is missing - * @param decisionThreshold The decision threshold - * @return The created node - */ - public TreeNode.Builder addJunction(int nodeIndex, int featureIndex, boolean isDefaultLeft, double decisionThreshold) { - int leftChild = numNodes++; - int rightChild = numNodes++; - nodes.ensureCapacity(nodeIndex + 1); - for (int i = nodes.size(); i < nodeIndex + 1; i++) { - nodes.add(null); - } - - TreeNode.Builder node = TreeNode.builder(nodeIndex) - .setDefaultLeft(isDefaultLeft) - .setLeftChild(leftChild) - .setRightChild(rightChild) - .setSplitFeature(featureIndex) - .setThreshold(decisionThreshold); - nodes.set(nodeIndex, node); - - // allocate space for the child nodes - while (nodes.size() <= rightChild) { - nodes.add(null); - } - - return node; - } - - /** - * Sets the node at {@code nodeIndex} to a leaf node. - * @param nodeIndex The index as allocated by a call to {@link #addJunction(int, int, boolean, double)} - * @param value The prediction value - * @return this - */ - public Builder addLeaf(int nodeIndex, double value) { - for (int i = nodes.size(); i < nodeIndex + 1; i++) { - nodes.add(null); - } - nodes.set(nodeIndex, TreeNode.builder(nodeIndex).setLeafValue(Collections.singletonList(value))); - return this; - } - - public Tree build() { - return new Tree( - featureNames, - nodes.stream().map(TreeNode.Builder::build).collect(Collectors.toList()), - targetType, - classificationLabels - ); - } - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNode.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNode.java deleted file mode 100644 index cb7d9a0f8f211..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNode.java +++ /dev/null @@ -1,286 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.tree; - -import org.elasticsearch.client.ml.job.config.Operator; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class TreeNode implements ToXContentObject { - - public static final String NAME = "tree_node"; - - public static final ParseField DECISION_TYPE = new ParseField("decision_type"); - public static final ParseField THRESHOLD = new ParseField("threshold"); - public static final ParseField LEFT_CHILD = new ParseField("left_child"); - public static final ParseField RIGHT_CHILD = new ParseField("right_child"); - public static final ParseField DEFAULT_LEFT = new ParseField("default_left"); - public static final ParseField SPLIT_FEATURE = new ParseField("split_feature"); - public static final ParseField NODE_INDEX = new ParseField("node_index"); - public static final ParseField SPLIT_GAIN = new ParseField("split_gain"); - public static final ParseField LEAF_VALUE = new ParseField("leaf_value"); - public static final ParseField NUMBER_SAMPLES = new ParseField("number_samples"); - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, Builder::new); - static { - PARSER.declareDouble(Builder::setThreshold, THRESHOLD); - PARSER.declareField(Builder::setOperator, p -> Operator.fromString(p.text()), DECISION_TYPE, ObjectParser.ValueType.STRING); - PARSER.declareInt(Builder::setLeftChild, LEFT_CHILD); - PARSER.declareInt(Builder::setRightChild, RIGHT_CHILD); - PARSER.declareBoolean(Builder::setDefaultLeft, DEFAULT_LEFT); - PARSER.declareInt(Builder::setSplitFeature, SPLIT_FEATURE); - PARSER.declareInt(Builder::setNodeIndex, NODE_INDEX); - PARSER.declareDouble(Builder::setSplitGain, SPLIT_GAIN); - PARSER.declareDoubleArray(Builder::setLeafValue, LEAF_VALUE); - PARSER.declareLong(Builder::setNumberSamples, NUMBER_SAMPLES); - } - - public static Builder fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final Operator operator; - private final Double threshold; - private final Integer splitFeature; - private final int nodeIndex; - private final Double splitGain; - private final List leafValue; - private final Boolean defaultLeft; - private final Integer leftChild; - private final Integer rightChild; - private final Long numberSamples; - - TreeNode( - Operator operator, - Double threshold, - Integer splitFeature, - int nodeIndex, - Double splitGain, - List leafValue, - Boolean defaultLeft, - Integer leftChild, - Integer rightChild, - Long numberSamples - ) { - this.operator = operator; - this.threshold = threshold; - this.splitFeature = splitFeature; - this.nodeIndex = nodeIndex; - this.splitGain = splitGain; - this.leafValue = leafValue; - this.defaultLeft = defaultLeft; - this.leftChild = leftChild; - this.rightChild = rightChild; - this.numberSamples = numberSamples; - } - - public Operator getOperator() { - return operator; - } - - public Double getThreshold() { - return threshold; - } - - public Integer getSplitFeature() { - return splitFeature; - } - - public Integer getNodeIndex() { - return nodeIndex; - } - - public Double getSplitGain() { - return splitGain; - } - - public List getLeafValue() { - return leafValue; - } - - public Boolean isDefaultLeft() { - return defaultLeft; - } - - public Integer getLeftChild() { - return leftChild; - } - - public Integer getRightChild() { - return rightChild; - } - - public Long getNumberSamples() { - return numberSamples; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - addOptionalField(builder, DECISION_TYPE, operator); - addOptionalField(builder, THRESHOLD, threshold); - addOptionalField(builder, SPLIT_FEATURE, splitFeature); - addOptionalField(builder, SPLIT_GAIN, splitGain); - addOptionalField(builder, NODE_INDEX, nodeIndex); - addOptionalField(builder, LEAF_VALUE, leafValue); - addOptionalField(builder, DEFAULT_LEFT, defaultLeft); - addOptionalField(builder, LEFT_CHILD, leftChild); - addOptionalField(builder, RIGHT_CHILD, rightChild); - addOptionalField(builder, NUMBER_SAMPLES, numberSamples); - builder.endObject(); - return builder; - } - - private void addOptionalField(XContentBuilder builder, ParseField field, Object value) throws IOException { - if (value != null) { - builder.field(field.getPreferredName(), value); - } - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TreeNode that = (TreeNode) o; - return Objects.equals(operator, that.operator) - && Objects.equals(threshold, that.threshold) - && Objects.equals(splitFeature, that.splitFeature) - && Objects.equals(nodeIndex, that.nodeIndex) - && Objects.equals(splitGain, that.splitGain) - && Objects.equals(leafValue, that.leafValue) - && Objects.equals(defaultLeft, that.defaultLeft) - && Objects.equals(leftChild, that.leftChild) - && Objects.equals(rightChild, that.rightChild) - && Objects.equals(numberSamples, that.numberSamples); - } - - @Override - public int hashCode() { - return Objects.hash( - operator, - threshold, - splitFeature, - splitGain, - nodeIndex, - leafValue, - defaultLeft, - leftChild, - rightChild, - numberSamples - ); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public static Builder builder(int nodeIndex) { - return new Builder(nodeIndex); - } - - public static class Builder { - private Operator operator; - private Double threshold; - private Integer splitFeature; - private int nodeIndex; - private Double splitGain; - private List leafValue; - private Boolean defaultLeft; - private Integer leftChild; - private Integer rightChild; - private Long numberSamples; - - public Builder(int nodeIndex) { - this.nodeIndex = nodeIndex; - } - - private Builder() {} - - public Builder setOperator(Operator operator) { - this.operator = operator; - return this; - } - - public Builder setThreshold(Double threshold) { - this.threshold = threshold; - return this; - } - - public Builder setSplitFeature(Integer splitFeature) { - this.splitFeature = splitFeature; - return this; - } - - public Builder setNodeIndex(int nodeIndex) { - this.nodeIndex = nodeIndex; - return this; - } - - public Builder setSplitGain(Double splitGain) { - this.splitGain = splitGain; - return this; - } - - public Builder setLeafValue(List leafValue) { - this.leafValue = leafValue; - return this; - } - - public Builder setDefaultLeft(Boolean defaultLeft) { - this.defaultLeft = defaultLeft; - return this; - } - - public Builder setLeftChild(Integer leftChild) { - this.leftChild = leftChild; - return this; - } - - public Integer getLeftChild() { - return leftChild; - } - - public Builder setRightChild(Integer rightChild) { - this.rightChild = rightChild; - return this; - } - - public Integer getRightChild() { - return rightChild; - } - - public Builder setNumberSamples(Long numberSamples) { - this.numberSamples = numberSamples; - return this; - } - - public TreeNode build() { - return new TreeNode( - operator, - threshold, - splitFeature, - nodeIndex, - splitGain, - leafValue, - defaultLeft, - leftChild, - rightChild, - numberSamples - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisConfig.java deleted file mode 100644 index 09b8ef16eeda4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisConfig.java +++ /dev/null @@ -1,446 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Objects; -import java.util.Set; -import java.util.function.Function; - -/** - * Analysis configuration options that describe which fields are - * analyzed and which functions are used to detect anomalies. - *

- * The configuration can contain multiple detectors, a new anomaly detector will - * be created for each detector configuration. The fields - * bucketSpan, summaryCountFieldName and categorizationFieldName - * apply to all detectors. - *

- * If a value has not been set it will be null - * Object wrappers are used around integral types & booleans so they can take - * null values. - */ -public class AnalysisConfig implements ToXContentObject { - /** - * Serialisation names - */ - public static final ParseField ANALYSIS_CONFIG = new ParseField("analysis_config"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - public static final ParseField CATEGORIZATION_FIELD_NAME = new ParseField("categorization_field_name"); - public static final ParseField CATEGORIZATION_FILTERS = new ParseField("categorization_filters"); - public static final ParseField CATEGORIZATION_ANALYZER = CategorizationAnalyzerConfig.CATEGORIZATION_ANALYZER; - public static final ParseField PER_PARTITION_CATEGORIZATION = new ParseField("per_partition_categorization"); - public static final ParseField LATENCY = new ParseField("latency"); - public static final ParseField SUMMARY_COUNT_FIELD_NAME = new ParseField("summary_count_field_name"); - public static final ParseField DETECTORS = new ParseField("detectors"); - public static final ParseField INFLUENCERS = new ParseField("influencers"); - public static final ParseField MULTIVARIATE_BY_FIELDS = new ParseField("multivariate_by_fields"); - public static final ParseField MODEL_PRUNE_WINDOW = new ParseField("model_prune_window"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - ANALYSIS_CONFIG.getPreferredName(), - true, - a -> new AnalysisConfig.Builder((List) a[0]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> (Detector.PARSER).apply(p, c).build(), DETECTORS); - PARSER.declareString( - (builder, val) -> builder.setBucketSpan(TimeValue.parseTimeValue(val, BUCKET_SPAN.getPreferredName())), - BUCKET_SPAN - ); - PARSER.declareString(Builder::setCategorizationFieldName, CATEGORIZATION_FIELD_NAME); - PARSER.declareStringArray(Builder::setCategorizationFilters, CATEGORIZATION_FILTERS); - // This one is nasty - the syntax for analyzers takes either names or objects at many levels, hence it's not - // possible to simply declare whether the field is a string or object and a completely custom parser is required - PARSER.declareField( - Builder::setCategorizationAnalyzerConfig, - (p, c) -> CategorizationAnalyzerConfig.buildFromXContentFragment(p), - CATEGORIZATION_ANALYZER, - ObjectParser.ValueType.OBJECT_OR_STRING - ); - PARSER.declareObject( - Builder::setPerPartitionCategorizationConfig, - PerPartitionCategorizationConfig.PARSER, - PER_PARTITION_CATEGORIZATION - ); - PARSER.declareString((builder, val) -> builder.setLatency(TimeValue.parseTimeValue(val, LATENCY.getPreferredName())), LATENCY); - PARSER.declareString(Builder::setSummaryCountFieldName, SUMMARY_COUNT_FIELD_NAME); - PARSER.declareStringArray(Builder::setInfluencers, INFLUENCERS); - PARSER.declareBoolean(Builder::setMultivariateByFields, MULTIVARIATE_BY_FIELDS); - PARSER.declareString( - (builder, val) -> builder.setModelPruneWindow(TimeValue.parseTimeValue(val, MODEL_PRUNE_WINDOW.getPreferredName())), - MODEL_PRUNE_WINDOW - ); - } - - /** - * These values apply to all detectors - */ - private final TimeValue bucketSpan; - private final String categorizationFieldName; - private final List categorizationFilters; - private final CategorizationAnalyzerConfig categorizationAnalyzerConfig; - private final PerPartitionCategorizationConfig perPartitionCategorizationConfig; - private final TimeValue latency; - private final String summaryCountFieldName; - private final List detectors; - private final List influencers; - private final Boolean multivariateByFields; - private final TimeValue modelPruneWindow; - - private AnalysisConfig( - TimeValue bucketSpan, - String categorizationFieldName, - List categorizationFilters, - CategorizationAnalyzerConfig categorizationAnalyzerConfig, - PerPartitionCategorizationConfig perPartitionCategorizationConfig, - TimeValue latency, - String summaryCountFieldName, - List detectors, - List influencers, - Boolean multivariateByFields, - TimeValue modelPruneWindow - ) { - this.detectors = Collections.unmodifiableList(detectors); - this.bucketSpan = bucketSpan; - this.latency = latency; - this.categorizationFieldName = categorizationFieldName; - this.categorizationAnalyzerConfig = categorizationAnalyzerConfig; - this.perPartitionCategorizationConfig = perPartitionCategorizationConfig; - this.categorizationFilters = categorizationFilters == null ? null : Collections.unmodifiableList(categorizationFilters); - this.summaryCountFieldName = summaryCountFieldName; - this.influencers = Collections.unmodifiableList(influencers); - this.multivariateByFields = multivariateByFields; - this.modelPruneWindow = modelPruneWindow; - } - - /** - * The analysis bucket span - * - * @return The bucketspan or null if not set - */ - public TimeValue getBucketSpan() { - return bucketSpan; - } - - public String getCategorizationFieldName() { - return categorizationFieldName; - } - - public List getCategorizationFilters() { - return categorizationFilters; - } - - public CategorizationAnalyzerConfig getCategorizationAnalyzerConfig() { - return categorizationAnalyzerConfig; - } - - public PerPartitionCategorizationConfig getPerPartitionCategorizationConfig() { - return perPartitionCategorizationConfig; - } - - /** - * The latency interval during which out-of-order records should be handled. - * - * @return The latency interval or null if not set - */ - public TimeValue getLatency() { - return latency; - } - - /** - * The name of the field that contains counts for pre-summarised input - * - * @return The field name or null if not set - */ - public String getSummaryCountFieldName() { - return summaryCountFieldName; - } - - /** - * The list of analysis detectors. In a valid configuration the list should - * contain at least 1 {@link Detector} - * - * @return The Detectors used in this job - */ - public List getDetectors() { - return detectors; - } - - /** - * The list of influence field names - */ - public List getInfluencers() { - return influencers; - } - - public Boolean getMultivariateByFields() { - return multivariateByFields; - } - - public TimeValue getModelPruneWindow() { - return modelPruneWindow; - } - - private static void addIfNotNull(Set fields, String field) { - if (field != null) { - fields.add(field); - } - } - - public List fields() { - return collectNonNullAndNonEmptyDetectorFields(Detector::getFieldName); - } - - private List collectNonNullAndNonEmptyDetectorFields(Function fieldGetter) { - Set fields = new HashSet<>(); - - for (Detector d : getDetectors()) { - addIfNotNull(fields, fieldGetter.apply(d)); - } - - // remove empty strings - fields.remove(""); - - return new ArrayList<>(fields); - } - - public List byFields() { - return collectNonNullAndNonEmptyDetectorFields(Detector::getByFieldName); - } - - public List overFields() { - return collectNonNullAndNonEmptyDetectorFields(Detector::getOverFieldName); - } - - public List partitionFields() { - return collectNonNullAndNonEmptyDetectorFields(Detector::getPartitionFieldName); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (bucketSpan != null) { - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan.getStringRep()); - } - if (categorizationFieldName != null) { - builder.field(CATEGORIZATION_FIELD_NAME.getPreferredName(), categorizationFieldName); - } - if (categorizationFilters != null) { - builder.field(CATEGORIZATION_FILTERS.getPreferredName(), categorizationFilters); - } - if (categorizationAnalyzerConfig != null) { - // This cannot be builder.field(CATEGORIZATION_ANALYZER.getPreferredName(), categorizationAnalyzerConfig, params); - // because that always writes categorizationAnalyzerConfig as an object, and in the case of a global analyzer it - // gets written as a single string. - categorizationAnalyzerConfig.toXContent(builder, params); - } - if (perPartitionCategorizationConfig != null) { - builder.field(PER_PARTITION_CATEGORIZATION.getPreferredName(), perPartitionCategorizationConfig); - } - if (latency != null) { - builder.field(LATENCY.getPreferredName(), latency.getStringRep()); - } - if (summaryCountFieldName != null) { - builder.field(SUMMARY_COUNT_FIELD_NAME.getPreferredName(), summaryCountFieldName); - } - builder.startArray(DETECTORS.getPreferredName()); - for (Detector detector : detectors) { - detector.toXContent(builder, params); - } - builder.endArray(); - builder.field(INFLUENCERS.getPreferredName(), influencers); - if (multivariateByFields != null) { - builder.field(MULTIVARIATE_BY_FIELDS.getPreferredName(), multivariateByFields); - } - if (modelPruneWindow != null) { - builder.field(MODEL_PRUNE_WINDOW.getPreferredName(), modelPruneWindow.getStringRep()); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - AnalysisConfig that = (AnalysisConfig) object; - return Objects.equals(latency, that.latency) - && Objects.equals(bucketSpan, that.bucketSpan) - && Objects.equals(categorizationFieldName, that.categorizationFieldName) - && Objects.equals(categorizationFilters, that.categorizationFilters) - && Objects.equals(categorizationAnalyzerConfig, that.categorizationAnalyzerConfig) - && Objects.equals(perPartitionCategorizationConfig, that.perPartitionCategorizationConfig) - && Objects.equals(summaryCountFieldName, that.summaryCountFieldName) - && Objects.equals(detectors, that.detectors) - && Objects.equals(influencers, that.influencers) - && Objects.equals(multivariateByFields, that.multivariateByFields) - && Objects.equals(modelPruneWindow, that.modelPruneWindow); - } - - @Override - public int hashCode() { - return Objects.hash( - bucketSpan, - categorizationFieldName, - categorizationFilters, - categorizationAnalyzerConfig, - perPartitionCategorizationConfig, - latency, - summaryCountFieldName, - detectors, - influencers, - multivariateByFields, - modelPruneWindow - ); - } - - public static Builder builder(List detectors) { - return new Builder(detectors); - } - - public static class Builder { - - private List detectors; - private TimeValue bucketSpan; - private TimeValue latency; - private String categorizationFieldName; - private List categorizationFilters; - private CategorizationAnalyzerConfig categorizationAnalyzerConfig; - private PerPartitionCategorizationConfig perPartitionCategorizationConfig; - private String summaryCountFieldName; - private List influencers = new ArrayList<>(); - private Boolean multivariateByFields; - private TimeValue modelPruneWindow; - - public Builder(List detectors) { - setDetectors(detectors); - } - - public Builder(AnalysisConfig analysisConfig) { - this.detectors = new ArrayList<>(analysisConfig.detectors); - this.bucketSpan = analysisConfig.bucketSpan; - this.latency = analysisConfig.latency; - this.categorizationFieldName = analysisConfig.categorizationFieldName; - this.categorizationFilters = analysisConfig.categorizationFilters == null - ? null - : new ArrayList<>(analysisConfig.categorizationFilters); - this.categorizationAnalyzerConfig = analysisConfig.categorizationAnalyzerConfig; - this.perPartitionCategorizationConfig = analysisConfig.perPartitionCategorizationConfig; - this.summaryCountFieldName = analysisConfig.summaryCountFieldName; - this.influencers = new ArrayList<>(analysisConfig.influencers); - this.multivariateByFields = analysisConfig.multivariateByFields; - this.modelPruneWindow = analysisConfig.modelPruneWindow; - } - - public Builder setDetectors(List detectors) { - Objects.requireNonNull(detectors, "[" + DETECTORS.getPreferredName() + "] must not be null"); - // We always assign sequential IDs to the detectors that are correct for this analysis config - int detectorIndex = 0; - List sequentialIndexDetectors = new ArrayList<>(detectors.size()); - for (Detector origDetector : detectors) { - Detector.Builder builder = new Detector.Builder(origDetector); - builder.setDetectorIndex(detectorIndex++); - sequentialIndexDetectors.add(builder.build()); - } - this.detectors = sequentialIndexDetectors; - return this; - } - - public Builder setDetector(int detectorIndex, Detector detector) { - detectors.set(detectorIndex, detector); - return this; - } - - public Builder setBucketSpan(TimeValue bucketSpan) { - this.bucketSpan = bucketSpan; - return this; - } - - public Builder setLatency(TimeValue latency) { - this.latency = latency; - return this; - } - - public Builder setCategorizationFieldName(String categorizationFieldName) { - this.categorizationFieldName = categorizationFieldName; - return this; - } - - public Builder setCategorizationFilters(List categorizationFilters) { - this.categorizationFilters = categorizationFilters; - return this; - } - - public Builder setCategorizationAnalyzerConfig(CategorizationAnalyzerConfig categorizationAnalyzerConfig) { - this.categorizationAnalyzerConfig = categorizationAnalyzerConfig; - return this; - } - - public Builder setPerPartitionCategorizationConfig(PerPartitionCategorizationConfig perPartitionCategorizationConfig) { - this.perPartitionCategorizationConfig = perPartitionCategorizationConfig; - return this; - } - - public Builder setSummaryCountFieldName(String summaryCountFieldName) { - this.summaryCountFieldName = summaryCountFieldName; - return this; - } - - public Builder setInfluencers(List influencers) { - this.influencers = Objects.requireNonNull(influencers, INFLUENCERS.getPreferredName()); - return this; - } - - public Builder setMultivariateByFields(Boolean multivariateByFields) { - this.multivariateByFields = multivariateByFields; - return this; - } - - public Builder setModelPruneWindow(TimeValue modelPruneWindow) { - this.modelPruneWindow = modelPruneWindow; - return this; - } - - public AnalysisConfig build() { - - return new AnalysisConfig( - bucketSpan, - categorizationFieldName, - categorizationFilters, - categorizationAnalyzerConfig, - perPartitionCategorizationConfig, - latency, - summaryCountFieldName, - detectors, - influencers, - multivariateByFields, - modelPruneWindow - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisLimits.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisLimits.java deleted file mode 100644 index f4172c843dd39..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisLimits.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Analysis limits for autodetect. In particular, - * this is a collection of parameters that allow limiting - * the resources used by the job. - */ -public class AnalysisLimits implements ToXContentObject { - - /** - * Serialisation field names - */ - public static final ParseField MODEL_MEMORY_LIMIT = new ParseField("model_memory_limit"); - public static final ParseField CATEGORIZATION_EXAMPLES_LIMIT = new ParseField("categorization_examples_limit"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "analysis_limits", - true, - a -> new AnalysisLimits((Long) a[0], (Long) a[1]) - ); - - static { - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return ByteSizeValue.parseBytesSizeValue(p.text(), MODEL_MEMORY_LIMIT.getPreferredName()).getMb(); - } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return p.longValue(); - } - throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); - }, MODEL_MEMORY_LIMIT, ObjectParser.ValueType.VALUE); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), CATEGORIZATION_EXAMPLES_LIMIT); - } - - /** - * The model memory limit in MiBs. - * It is initialised to null, which implies that the server-side default will be used. - */ - private final Long modelMemoryLimit; - - /** - * It is initialised to null. - * A value of null will result in the server-side default being used. - */ - private final Long categorizationExamplesLimit; - - public AnalysisLimits(Long categorizationExamplesLimit) { - this(null, categorizationExamplesLimit); - } - - public AnalysisLimits(Long modelMemoryLimit, Long categorizationExamplesLimit) { - this.modelMemoryLimit = modelMemoryLimit; - this.categorizationExamplesLimit = categorizationExamplesLimit; - } - - /** - * Maximum size of the model in MB before the anomaly detector - * will drop new samples to prevent the model using any more - * memory. - * - * @return The set memory limit or null if not set - */ - @Nullable - public Long getModelMemoryLimit() { - return modelMemoryLimit; - } - - /** - * Gets the limit to the number of examples that are stored per category - * - * @return the limit or null if not set - */ - @Nullable - public Long getCategorizationExamplesLimit() { - return categorizationExamplesLimit; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (modelMemoryLimit != null) { - builder.field(MODEL_MEMORY_LIMIT.getPreferredName(), modelMemoryLimit + "mb"); - } - if (categorizationExamplesLimit != null) { - builder.field(CATEGORIZATION_EXAMPLES_LIMIT.getPreferredName(), categorizationExamplesLimit); - } - builder.endObject(); - return builder; - } - - /** - * Overridden equality test - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other instanceof AnalysisLimits == false) { - return false; - } - - AnalysisLimits that = (AnalysisLimits) other; - return Objects.equals(this.modelMemoryLimit, that.modelMemoryLimit) - && Objects.equals(this.categorizationExamplesLimit, that.categorizationExamplesLimit); - } - - @Override - public int hashCode() { - return Objects.hash(modelMemoryLimit, categorizationExamplesLimit); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/CategorizationAnalyzerConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/CategorizationAnalyzerConfig.java deleted file mode 100644 index 8782bacc83f32..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/CategorizationAnalyzerConfig.java +++ /dev/null @@ -1,347 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.action.admin.indices.RestAnalyzeAction; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentFragment; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * Configuration for the categorization analyzer. - * - * The syntax is a subset of what can be supplied to the {@linkplain RestAnalyzeAction _analyze endpoint}. - * To summarize, the first option is to specify the name of an out-of-the-box analyzer: - * - * "categorization_analyzer" : "standard" - * - * - * The second option is to specify a custom analyzer by combining the char_filters, tokenizer - * and token_filters fields. In turn, each of these can be specified as the name of an out-of-the-box - * one or as an object defining a custom one. For example: - * - * "char_filters" : [ - * "html_strip", - * { "type" : "pattern_replace", "pattern": "SQL: .*" } - * ], - * "tokenizer" : "thai", - * "token_filters" : [ - * "lowercase", - * { "type" : "pattern_replace", "pattern": "^[0-9].*" } - * ] - * - */ -public class CategorizationAnalyzerConfig implements ToXContentFragment { - - public static final ParseField CATEGORIZATION_ANALYZER = new ParseField("categorization_analyzer"); - private static final ParseField TOKENIZER = AnalyzeAction.Fields.TOKENIZER; - private static final ParseField TOKEN_FILTERS = AnalyzeAction.Fields.TOKEN_FILTERS; - private static final ParseField CHAR_FILTERS = AnalyzeAction.Fields.CHAR_FILTERS; - - /** - * This method is only used in the unit tests - in production code this config is always parsed as a fragment. - */ - static CategorizationAnalyzerConfig buildFromXContentObject(XContentParser parser) throws IOException { - - if (parser.nextToken() != XContentParser.Token.START_OBJECT) { - throw new IllegalArgumentException("Expected start object but got [" + parser.currentToken() + "]"); - } - if (parser.nextToken() != XContentParser.Token.FIELD_NAME) { - throw new IllegalArgumentException("Expected field name but got [" + parser.currentToken() + "]"); - } - parser.nextToken(); - CategorizationAnalyzerConfig categorizationAnalyzerConfig = buildFromXContentFragment(parser); - parser.nextToken(); - return categorizationAnalyzerConfig; - } - - /** - * Parse a categorization_analyzer configuration. A custom parser is needed due to the - * complexity of the format, with many elements able to be specified as either the name of a built-in - * element or an object containing a custom definition. - */ - static CategorizationAnalyzerConfig buildFromXContentFragment(XContentParser parser) throws IOException { - - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder(); - - XContentParser.Token token = parser.currentToken(); - if (token == XContentParser.Token.VALUE_STRING) { - builder.setAnalyzer(parser.text()); - } else if (token != XContentParser.Token.START_OBJECT) { - throw new IllegalArgumentException("[" + CATEGORIZATION_ANALYZER + "] should be analyzer's name or settings [" + token + "]"); - } else { - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (CHAR_FILTERS.match(currentFieldName, parser.getDeprecationHandler()) - && token == XContentParser.Token.START_ARRAY) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token == XContentParser.Token.VALUE_STRING) { - builder.addCharFilter(parser.text()); - } else if (token == XContentParser.Token.START_OBJECT) { - builder.addCharFilter(parser.map()); - } else { - throw new IllegalArgumentException( - "[" - + currentFieldName - + "] in [" - + CATEGORIZATION_ANALYZER - + "] array element should contain char_filter's name or settings [" - + token - + "]" - ); - } - } - } else if (TOKENIZER.match(currentFieldName, parser.getDeprecationHandler())) { - if (token == XContentParser.Token.VALUE_STRING) { - builder.setTokenizer(parser.text()); - } else if (token == XContentParser.Token.START_OBJECT) { - builder.setTokenizer(parser.map()); - } else { - throw new IllegalArgumentException( - "[" - + currentFieldName - + "] in [" - + CATEGORIZATION_ANALYZER - + "] should be tokenizer's name or settings [" - + token - + "]" - ); - } - } else if (TOKEN_FILTERS.match(currentFieldName, parser.getDeprecationHandler()) - && token == XContentParser.Token.START_ARRAY) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token == XContentParser.Token.VALUE_STRING) { - builder.addTokenFilter(parser.text()); - } else if (token == XContentParser.Token.START_OBJECT) { - builder.addTokenFilter(parser.map()); - } else { - throw new IllegalArgumentException( - "[" - + currentFieldName - + "] in [" - + CATEGORIZATION_ANALYZER - + "] array element should contain token_filter's name or settings [" - + token - + "]" - ); - } - } - } - } - } - - return builder.build(); - } - - /** - * Simple store of either a name of a built-in analyzer element or a custom definition. - */ - public static final class NameOrDefinition implements ToXContentFragment { - - // Exactly one of these two members is not null - public final String name; - public final Settings definition; - - NameOrDefinition(String name) { - this.name = Objects.requireNonNull(name); - this.definition = null; - } - - NameOrDefinition(ParseField field, Map definition) { - this.name = null; - Objects.requireNonNull(definition); - try { - this.definition = Settings.builder().loadFromMap(definition).build(); - } catch (Exception e) { - throw new IllegalArgumentException("Failed to parse [" + definition + "] in [" + field.getPreferredName() + "]", e); - } - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (definition == null) { - builder.value(name); - } else { - builder.startObject(); - definition.toXContent(builder, params); - builder.endObject(); - } - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - NameOrDefinition that = (NameOrDefinition) o; - return Objects.equals(name, that.name) && Objects.equals(definition, that.definition); - } - - @Override - public int hashCode() { - return Objects.hash(name, definition); - } - - @Override - public String toString() { - if (definition == null) { - return name; - } else { - return definition.toDelimitedString(';'); - } - } - } - - private final String analyzer; - private final List charFilters; - private final NameOrDefinition tokenizer; - private final List tokenFilters; - - private CategorizationAnalyzerConfig( - String analyzer, - List charFilters, - NameOrDefinition tokenizer, - List tokenFilters - ) { - this.analyzer = analyzer; - this.charFilters = Collections.unmodifiableList(charFilters); - this.tokenizer = tokenizer; - this.tokenFilters = Collections.unmodifiableList(tokenFilters); - } - - public String getAnalyzer() { - return analyzer; - } - - public List getCharFilters() { - return charFilters; - } - - public NameOrDefinition getTokenizer() { - return tokenizer; - } - - public List getTokenFilters() { - return tokenFilters; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (analyzer != null) { - builder.field(CATEGORIZATION_ANALYZER.getPreferredName(), analyzer); - } else { - builder.startObject(CATEGORIZATION_ANALYZER.getPreferredName()); - if (charFilters.isEmpty() == false) { - builder.startArray(CHAR_FILTERS.getPreferredName()); - for (NameOrDefinition charFilter : charFilters) { - charFilter.toXContent(builder, params); - } - builder.endArray(); - } - if (tokenizer != null) { - builder.field(TOKENIZER.getPreferredName(), tokenizer); - } - if (tokenFilters.isEmpty() == false) { - builder.startArray(TOKEN_FILTERS.getPreferredName()); - for (NameOrDefinition tokenFilter : tokenFilters) { - tokenFilter.toXContent(builder, params); - } - builder.endArray(); - } - builder.endObject(); - } - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CategorizationAnalyzerConfig that = (CategorizationAnalyzerConfig) o; - return Objects.equals(analyzer, that.analyzer) - && Objects.equals(charFilters, that.charFilters) - && Objects.equals(tokenizer, that.tokenizer) - && Objects.equals(tokenFilters, that.tokenFilters); - } - - @Override - public int hashCode() { - return Objects.hash(analyzer, charFilters, tokenizer, tokenFilters); - } - - public static class Builder { - - private String analyzer; - private List charFilters = new ArrayList<>(); - private NameOrDefinition tokenizer; - private List tokenFilters = new ArrayList<>(); - - public Builder() {} - - public Builder(CategorizationAnalyzerConfig categorizationAnalyzerConfig) { - this.analyzer = categorizationAnalyzerConfig.analyzer; - this.charFilters = new ArrayList<>(categorizationAnalyzerConfig.charFilters); - this.tokenizer = categorizationAnalyzerConfig.tokenizer; - this.tokenFilters = new ArrayList<>(categorizationAnalyzerConfig.tokenFilters); - } - - public Builder setAnalyzer(String analyzer) { - this.analyzer = analyzer; - return this; - } - - public Builder addCharFilter(String charFilter) { - this.charFilters.add(new NameOrDefinition(charFilter)); - return this; - } - - public Builder addCharFilter(Map charFilter) { - this.charFilters.add(new NameOrDefinition(CHAR_FILTERS, charFilter)); - return this; - } - - public Builder setTokenizer(String tokenizer) { - this.tokenizer = new NameOrDefinition(tokenizer); - return this; - } - - public Builder setTokenizer(Map tokenizer) { - this.tokenizer = new NameOrDefinition(TOKENIZER, tokenizer); - return this; - } - - public Builder addTokenFilter(String tokenFilter) { - this.tokenFilters.add(new NameOrDefinition(tokenFilter)); - return this; - } - - public Builder addTokenFilter(Map tokenFilter) { - this.tokenFilters.add(new NameOrDefinition(TOKEN_FILTERS, tokenFilter)); - return this; - } - - /** - * Create a config - */ - public CategorizationAnalyzerConfig build() { - return new CategorizationAnalyzerConfig(analyzer, charFilters, tokenizer, tokenFilters); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DataDescription.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DataDescription.java deleted file mode 100644 index d460cf9bd81a4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DataDescription.java +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Locale; -import java.util.Objects; - -/** - * Describes the format of the data used in the job and how it should - * be interpreted by the ML job. - *

- * {@link #getTimeField()} is the name of the field containing the timestamp and - * {@link #getTimeFormat()} is the format code for the date string in as described by - * {@link java.time.format.DateTimeFormatter}. - */ -public class DataDescription implements ToXContentObject { - /** - * Enum of the acceptable data formats. - */ - public enum DataFormat { - XCONTENT; - - /** - * Case-insensitive from string method. - * Works with either XCONTENT, XContent, etc. - * - * @param value String representation - * @return The data format - */ - public static DataFormat forString(String value) { - return DataFormat.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - private static final ParseField DATA_DESCRIPTION_FIELD = new ParseField("data_description"); - private static final ParseField TIME_FIELD_NAME_FIELD = new ParseField("time_field"); - private static final ParseField TIME_FORMAT_FIELD = new ParseField("time_format"); - - /** - * Special time format string for epoch times (seconds) - */ - public static final String EPOCH = "epoch"; - - /** - * Special time format string for epoch times (milli-seconds) - */ - public static final String EPOCH_MS = "epoch_ms"; - - /** - * By default autodetect expects the timestamp in a field with this name - */ - public static final String DEFAULT_TIME_FIELD = "time"; - - private final String timeFieldName; - private final String timeFormat; - - public static final ObjectParser PARSER = new ObjectParser<>( - DATA_DESCRIPTION_FIELD.getPreferredName(), - true, - Builder::new - ); - - static { - PARSER.declareString(Builder::setTimeField, TIME_FIELD_NAME_FIELD); - PARSER.declareString(Builder::setTimeFormat, TIME_FORMAT_FIELD); - } - - public DataDescription(String timeFieldName, String timeFormat) { - this.timeFieldName = timeFieldName; - this.timeFormat = timeFormat; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(TIME_FIELD_NAME_FIELD.getPreferredName(), timeFieldName); - builder.field(TIME_FORMAT_FIELD.getPreferredName(), timeFormat); - builder.endObject(); - return builder; - } - - /** - * The format of the data to be processed. - * Always {@link DataDescription.DataFormat#XCONTENT} - * - * @return The data format - */ - public DataFormat getFormat() { - return DataFormat.XCONTENT; - } - - /** - * The name of the field containing the timestamp - * - * @return A String if set or null - */ - public String getTimeField() { - return timeFieldName; - } - - /** - * Either {@value #EPOCH}, {@value #EPOCH_MS} or a SimpleDateTime format string. - * If not set (is null or an empty string) or set to - * {@value #EPOCH_MS} (the default) then the date is assumed to be in - * milliseconds from the epoch. - * - * @return A String if set or null - */ - public String getTimeFormat() { - return timeFormat; - } - - /** - * Overridden equality test - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other instanceof DataDescription == false) { - return false; - } - - DataDescription that = (DataDescription) other; - - return Objects.equals(this.timeFieldName, that.timeFieldName) && Objects.equals(this.timeFormat, that.timeFormat); - } - - @Override - public int hashCode() { - return Objects.hash(timeFieldName, timeFormat); - } - - public static class Builder { - - private String timeFieldName = DEFAULT_TIME_FIELD; - private String timeFormat = EPOCH_MS; - - public Builder setFormat(DataFormat format) { - Objects.requireNonNull(format); - return this; - } - - public Builder setTimeField(String fieldName) { - timeFieldName = Objects.requireNonNull(fieldName); - return this; - } - - public Builder setTimeFormat(String format) { - timeFormat = Objects.requireNonNull(format); - return this; - } - - public DataDescription build() { - return new DataDescription(timeFieldName, timeFormat); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DefaultDetectorDescription.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DefaultDetectorDescription.java deleted file mode 100644 index 66ea72f928d54..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DefaultDetectorDescription.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.common.Strings; - -public final class DefaultDetectorDescription { - private static final String BY_TOKEN = " by "; - private static final String OVER_TOKEN = " over "; - - private static final String USE_NULL_OPTION = " usenull="; - private static final String PARTITION_FIELD_OPTION = " partitionfield="; - private static final String EXCLUDE_FREQUENT_OPTION = " excludefrequent="; - - private DefaultDetectorDescription() {} - - /** - * Returns the default description for the given {@code detector} - * - * @param detector the {@code Detector} for which a default description is requested - * @return the default description - */ - public static String of(Detector detector) { - StringBuilder sb = new StringBuilder(); - appendOn(detector, sb); - return sb.toString(); - } - - /** - * Appends to the given {@code StringBuilder} the default description - * for the given {@code detector} - * - * @param detector the {@code Detector} for which a default description is requested - * @param sb the {@code StringBuilder} to append to - */ - public static void appendOn(Detector detector, StringBuilder sb) { - if (isNotNullOrEmpty(detector.getFunction().getFullName())) { - sb.append(detector.getFunction()); - if (isNotNullOrEmpty(detector.getFieldName())) { - sb.append('(').append(quoteField(detector.getFieldName())).append(')'); - } - } else if (isNotNullOrEmpty(detector.getFieldName())) { - sb.append(quoteField(detector.getFieldName())); - } - - if (isNotNullOrEmpty(detector.getByFieldName())) { - sb.append(BY_TOKEN).append(quoteField(detector.getByFieldName())); - } - - if (isNotNullOrEmpty(detector.getOverFieldName())) { - sb.append(OVER_TOKEN).append(quoteField(detector.getOverFieldName())); - } - - if (detector.isUseNull()) { - sb.append(USE_NULL_OPTION).append(detector.isUseNull()); - } - - if (isNotNullOrEmpty(detector.getPartitionFieldName())) { - sb.append(PARTITION_FIELD_OPTION).append(quoteField(detector.getPartitionFieldName())); - } - - if (detector.getExcludeFrequent() != null) { - sb.append(EXCLUDE_FREQUENT_OPTION).append(detector.getExcludeFrequent()); - } - } - - private static String quoteField(String field) { - if (field.matches("\\w*")) { - return field; - } else { - return "\"" + field.replace("\\", "\\\\").replace("\"", "\\\"") + "\""; - } - } - - private static boolean isNotNullOrEmpty(String arg) { - return Strings.isNullOrEmpty(arg) == false; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectionRule.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectionRule.java deleted file mode 100644 index e23cad0c024aa..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectionRule.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.EnumSet; -import java.util.List; -import java.util.Objects; - -public class DetectionRule implements ToXContentObject { - - public static final ParseField DETECTION_RULE_FIELD = new ParseField("detection_rule"); - public static final ParseField ACTIONS_FIELD = new ParseField("actions"); - public static final ParseField SCOPE_FIELD = new ParseField("scope"); - public static final ParseField CONDITIONS_FIELD = new ParseField("conditions"); - - public static final ObjectParser PARSER = new ObjectParser<>( - DETECTION_RULE_FIELD.getPreferredName(), - true, - Builder::new - ); - - static { - PARSER.declareStringArray(Builder::setActions, ACTIONS_FIELD); - PARSER.declareObject(Builder::setScope, RuleScope.parser(), SCOPE_FIELD); - PARSER.declareObjectArray(Builder::setConditions, RuleCondition.PARSER, CONDITIONS_FIELD); - } - - private final EnumSet actions; - private final RuleScope scope; - private final List conditions; - - private DetectionRule(EnumSet actions, RuleScope scope, List conditions) { - this.actions = Objects.requireNonNull(actions); - this.scope = Objects.requireNonNull(scope); - this.conditions = Collections.unmodifiableList(conditions); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ACTIONS_FIELD.getPreferredName(), actions); - if (scope.isEmpty() == false) { - builder.field(SCOPE_FIELD.getPreferredName(), scope); - } - if (conditions.isEmpty() == false) { - builder.field(CONDITIONS_FIELD.getPreferredName(), conditions); - } - builder.endObject(); - return builder; - } - - public EnumSet getActions() { - return actions; - } - - public RuleScope getScope() { - return scope; - } - - public List getConditions() { - return conditions; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj instanceof DetectionRule == false) { - return false; - } - - DetectionRule other = (DetectionRule) obj; - return Objects.equals(actions, other.actions) && Objects.equals(scope, other.scope) && Objects.equals(conditions, other.conditions); - } - - @Override - public int hashCode() { - return Objects.hash(actions, scope, conditions); - } - - public static class Builder { - private EnumSet actions = EnumSet.of(RuleAction.SKIP_RESULT); - private RuleScope scope = new RuleScope(); - private List conditions = Collections.emptyList(); - - public Builder(RuleScope.Builder scope) { - this.scope = scope.build(); - } - - public Builder(List conditions) { - this.conditions = Objects.requireNonNull(conditions); - } - - Builder() {} - - public Builder setActions(List actions) { - this.actions.clear(); - actions.stream().map(RuleAction::fromString).forEach(this.actions::add); - return this; - } - - public Builder setActions(EnumSet actions) { - this.actions = Objects.requireNonNull(actions, ACTIONS_FIELD.getPreferredName()); - return this; - } - - public Builder setActions(RuleAction... actions) { - this.actions.clear(); - Arrays.stream(actions).forEach(this.actions::add); - return this; - } - - public Builder setScope(RuleScope scope) { - this.scope = Objects.requireNonNull(scope); - return this; - } - - public Builder setConditions(List conditions) { - this.conditions = Objects.requireNonNull(conditions); - return this; - } - - public DetectionRule build() { - return new DetectionRule(actions, scope, conditions); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Detector.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Detector.java deleted file mode 100644 index f20d67a238008..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Detector.java +++ /dev/null @@ -1,377 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Locale; -import java.util.Objects; - -/** - * Defines the fields and functions used in the analysis. A combination of field_name, - * by_field_name and over_field_name can be used depending on the specific - * function chosen. For more information see the - * create anomaly detection - * jobs API and detector functions. - */ -public class Detector implements ToXContentObject { - - public enum ExcludeFrequent { - ALL, - NONE, - BY, - OVER; - - /** - * Case-insensitive from string method. - * Works with either ALL, All, etc. - * - * @param value String representation - * @return The data format - */ - public static ExcludeFrequent forString(String value) { - return valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - public static final ParseField DETECTOR_DESCRIPTION_FIELD = new ParseField("detector_description"); - public static final ParseField FUNCTION_FIELD = new ParseField("function"); - public static final ParseField FIELD_NAME_FIELD = new ParseField("field_name"); - public static final ParseField BY_FIELD_NAME_FIELD = new ParseField("by_field_name"); - public static final ParseField OVER_FIELD_NAME_FIELD = new ParseField("over_field_name"); - public static final ParseField PARTITION_FIELD_NAME_FIELD = new ParseField("partition_field_name"); - public static final ParseField USE_NULL_FIELD = new ParseField("use_null"); - public static final ParseField EXCLUDE_FREQUENT_FIELD = new ParseField("exclude_frequent"); - public static final ParseField CUSTOM_RULES_FIELD = new ParseField("custom_rules"); - public static final ParseField DETECTOR_INDEX = new ParseField("detector_index"); - - public static final ObjectParser PARSER = new ObjectParser<>("detector", true, Builder::new); - - static { - PARSER.declareString(Builder::setDetectorDescription, DETECTOR_DESCRIPTION_FIELD); - PARSER.declareString(Builder::setFunction, FUNCTION_FIELD); - PARSER.declareString(Builder::setFieldName, FIELD_NAME_FIELD); - PARSER.declareString(Builder::setByFieldName, BY_FIELD_NAME_FIELD); - PARSER.declareString(Builder::setOverFieldName, OVER_FIELD_NAME_FIELD); - PARSER.declareString(Builder::setPartitionFieldName, PARTITION_FIELD_NAME_FIELD); - PARSER.declareBoolean(Builder::setUseNull, USE_NULL_FIELD); - PARSER.declareString(Builder::setExcludeFrequent, ExcludeFrequent::forString, EXCLUDE_FREQUENT_FIELD); - PARSER.declareObjectArray(Builder::setRules, (p, c) -> DetectionRule.PARSER.apply(p, c).build(), CUSTOM_RULES_FIELD); - PARSER.declareInt(Builder::setDetectorIndex, DETECTOR_INDEX); - } - - private final String detectorDescription; - private final DetectorFunction function; - private final String fieldName; - private final String byFieldName; - private final String overFieldName; - private final String partitionFieldName; - private final boolean useNull; - private final ExcludeFrequent excludeFrequent; - private final List rules; - private final int detectorIndex; - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DETECTOR_DESCRIPTION_FIELD.getPreferredName(), detectorDescription); - builder.field(FUNCTION_FIELD.getPreferredName(), function); - if (fieldName != null) { - builder.field(FIELD_NAME_FIELD.getPreferredName(), fieldName); - } - if (byFieldName != null) { - builder.field(BY_FIELD_NAME_FIELD.getPreferredName(), byFieldName); - } - if (overFieldName != null) { - builder.field(OVER_FIELD_NAME_FIELD.getPreferredName(), overFieldName); - } - if (partitionFieldName != null) { - builder.field(PARTITION_FIELD_NAME_FIELD.getPreferredName(), partitionFieldName); - } - if (useNull) { - builder.field(USE_NULL_FIELD.getPreferredName(), useNull); - } - if (excludeFrequent != null) { - builder.field(EXCLUDE_FREQUENT_FIELD.getPreferredName(), excludeFrequent); - } - if (rules.isEmpty() == false) { - builder.field(CUSTOM_RULES_FIELD.getPreferredName(), rules); - } - // negative means unknown - if (detectorIndex >= 0) { - builder.field(DETECTOR_INDEX.getPreferredName(), detectorIndex); - } - builder.endObject(); - return builder; - } - - private Detector( - String detectorDescription, - DetectorFunction function, - String fieldName, - String byFieldName, - String overFieldName, - String partitionFieldName, - boolean useNull, - ExcludeFrequent excludeFrequent, - List rules, - int detectorIndex - ) { - this.function = function; - this.fieldName = fieldName; - this.byFieldName = byFieldName; - this.overFieldName = overFieldName; - this.partitionFieldName = partitionFieldName; - this.useNull = useNull; - this.excludeFrequent = excludeFrequent; - this.rules = Collections.unmodifiableList(rules); - this.detectorDescription = detectorDescription != null ? detectorDescription : DefaultDetectorDescription.of(this); - this.detectorIndex = detectorIndex; - } - - public String getDetectorDescription() { - return detectorDescription; - } - - /** - * The analysis function used e.g. count, rare, min etc. - * - * @return The function or null if not set - */ - public DetectorFunction getFunction() { - return function; - } - - /** - * The Analysis field - * - * @return The field to analyse - */ - public String getFieldName() { - return fieldName; - } - - /** - * The 'by' field or null if not set. - * - * @return The 'by' field - */ - public String getByFieldName() { - return byFieldName; - } - - /** - * The 'over' field or null if not set. - * - * @return The 'over' field - */ - public String getOverFieldName() { - return overFieldName; - } - - /** - * Segments the analysis along another field to have completely - * independent baselines for each instance of partitionfield - * - * @return The Partition Field - */ - public String getPartitionFieldName() { - return partitionFieldName; - } - - /** - * Where there isn't a value for the 'by' or 'over' field should a new - * series be used as the 'null' series. - * - * @return true if the 'null' series should be created - */ - public boolean isUseNull() { - return useNull; - } - - /** - * Excludes frequently-occurring metrics from the analysis; - * can apply to 'by' field, 'over' field, or both - * - * @return the value that the user set - */ - public ExcludeFrequent getExcludeFrequent() { - return excludeFrequent; - } - - public List getRules() { - return rules; - } - - /** - * @return the detector index or a negative number if unknown - */ - public int getDetectorIndex() { - return detectorIndex; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other instanceof Detector == false) { - return false; - } - - Detector that = (Detector) other; - - return Objects.equals(this.detectorDescription, that.detectorDescription) - && Objects.equals(this.function, that.function) - && Objects.equals(this.fieldName, that.fieldName) - && Objects.equals(this.byFieldName, that.byFieldName) - && Objects.equals(this.overFieldName, that.overFieldName) - && Objects.equals(this.partitionFieldName, that.partitionFieldName) - && Objects.equals(this.useNull, that.useNull) - && Objects.equals(this.excludeFrequent, that.excludeFrequent) - && Objects.equals(this.rules, that.rules) - && this.detectorIndex == that.detectorIndex; - } - - @Override - public int hashCode() { - return Objects.hash( - detectorDescription, - function, - fieldName, - byFieldName, - overFieldName, - partitionFieldName, - useNull, - excludeFrequent, - rules, - detectorIndex - ); - } - - public static Builder builder() { - return new Builder(); - } - - public static class Builder { - - private String detectorDescription; - private DetectorFunction function; - private String fieldName; - private String byFieldName; - private String overFieldName; - private String partitionFieldName; - private boolean useNull = false; - private ExcludeFrequent excludeFrequent; - private List rules = Collections.emptyList(); - // negative means unknown - private int detectorIndex = -1; - - public Builder() {} - - public Builder(Detector detector) { - detectorDescription = detector.detectorDescription; - function = detector.function; - fieldName = detector.fieldName; - byFieldName = detector.byFieldName; - overFieldName = detector.overFieldName; - partitionFieldName = detector.partitionFieldName; - useNull = detector.useNull; - excludeFrequent = detector.excludeFrequent; - rules = new ArrayList<>(detector.rules); - detectorIndex = detector.detectorIndex; - } - - public Builder(String function, String fieldName) { - this(DetectorFunction.fromString(function), fieldName); - } - - public Builder(DetectorFunction function, String fieldName) { - this.function = function; - this.fieldName = fieldName; - } - - public Builder setDetectorDescription(String detectorDescription) { - this.detectorDescription = detectorDescription; - return this; - } - - public Builder setFunction(String function) { - this.function = DetectorFunction.fromString(function); - return this; - } - - public Builder setFieldName(String fieldName) { - this.fieldName = fieldName; - return this; - } - - public Builder setByFieldName(String byFieldName) { - this.byFieldName = byFieldName; - return this; - } - - public Builder setOverFieldName(String overFieldName) { - this.overFieldName = overFieldName; - return this; - } - - public Builder setPartitionFieldName(String partitionFieldName) { - this.partitionFieldName = partitionFieldName; - return this; - } - - public Builder setUseNull(boolean useNull) { - this.useNull = useNull; - return this; - } - - public Builder setExcludeFrequent(ExcludeFrequent excludeFrequent) { - this.excludeFrequent = excludeFrequent; - return this; - } - - public Builder setRules(List rules) { - this.rules = rules; - return this; - } - - public Builder setDetectorIndex(int detectorIndex) { - this.detectorIndex = detectorIndex; - return this; - } - - public Detector build() { - return new Detector( - detectorDescription, - function, - fieldName, - byFieldName, - overFieldName, - partitionFieldName, - useNull, - excludeFrequent, - rules, - detectorIndex - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectorFunction.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectorFunction.java deleted file mode 100644 index c33ffffd34f1a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectorFunction.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import java.util.Arrays; -import java.util.Collections; -import java.util.Locale; -import java.util.Set; -import java.util.stream.Collectors; - -public enum DetectorFunction { - - COUNT, - LOW_COUNT, - HIGH_COUNT, - NON_ZERO_COUNT("nzc"), - LOW_NON_ZERO_COUNT("low_nzc"), - HIGH_NON_ZERO_COUNT("high_nzc"), - DISTINCT_COUNT("dc"), - LOW_DISTINCT_COUNT("low_dc"), - HIGH_DISTINCT_COUNT("high_dc"), - RARE, - FREQ_RARE, - INFO_CONTENT, - LOW_INFO_CONTENT, - HIGH_INFO_CONTENT, - METRIC, - MEAN, - LOW_MEAN, - HIGH_MEAN, - AVG, - LOW_AVG, - HIGH_AVG, - MEDIAN, - LOW_MEDIAN, - HIGH_MEDIAN, - MIN, - MAX, - SUM, - LOW_SUM, - HIGH_SUM, - NON_NULL_SUM, - LOW_NON_NULL_SUM, - HIGH_NON_NULL_SUM, - VARP, - LOW_VARP, - HIGH_VARP, - TIME_OF_DAY, - TIME_OF_WEEK, - LAT_LONG; - - private Set shortcuts; - - DetectorFunction() { - shortcuts = Collections.emptySet(); - } - - DetectorFunction(String... shortcuts) { - this.shortcuts = Arrays.stream(shortcuts).collect(Collectors.toSet()); - } - - public String getFullName() { - return name().toLowerCase(Locale.ROOT); - } - - @Override - public String toString() { - return getFullName(); - } - - public static DetectorFunction fromString(String op) { - for (DetectorFunction function : values()) { - if (function.getFullName().equals(op) || function.shortcuts.contains(op)) { - return function; - } - } - throw new IllegalArgumentException("Unknown detector function [" + op + "]"); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/FilterRef.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/FilterRef.java deleted file mode 100644 index e9c0fbece98c3..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/FilterRef.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Locale; -import java.util.Objects; - -public class FilterRef implements ToXContentObject { - - public static final ParseField FILTER_REF_FIELD = new ParseField("filter_ref"); - public static final ParseField FILTER_ID = new ParseField("filter_id"); - public static final ParseField FILTER_TYPE = new ParseField("filter_type"); - - public enum FilterType { - INCLUDE, - EXCLUDE; - - public static FilterType fromString(String value) { - return valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - FILTER_REF_FIELD.getPreferredName(), - true, - a -> new FilterRef((String) a[0], (FilterType) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), FILTER_ID); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FilterType::fromString, FILTER_TYPE); - } - - private final String filterId; - private final FilterType filterType; - - public FilterRef(String filterId, FilterType filterType) { - this.filterId = Objects.requireNonNull(filterId); - this.filterType = filterType == null ? FilterType.INCLUDE : filterType; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(FILTER_ID.getPreferredName(), filterId); - builder.field(FILTER_TYPE.getPreferredName(), filterType); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj instanceof FilterRef == false) { - return false; - } - - FilterRef other = (FilterRef) obj; - return Objects.equals(filterId, other.filterId) && Objects.equals(filterType, other.filterType); - } - - @Override - public int hashCode() { - return Objects.hash(filterId, filterType); - } - - public String getFilterId() { - return filterId; - } - - public FilterType getFilterType() { - return filterType; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java deleted file mode 100644 index cfea39be07735..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java +++ /dev/null @@ -1,627 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * This class represents a configured and created Job. The creation time is set - * to the time the object was constructed and the finished time and last - * data time fields are {@code null} until the job has seen some data or it is - * finished respectively. - */ -public class Job implements ToXContentObject { - - public static final String ANOMALY_DETECTOR_JOB_TYPE = "anomaly_detector"; - - /* - * Field names used in serialization - */ - public static final ParseField ID = new ParseField("job_id"); - public static final ParseField JOB_TYPE = new ParseField("job_type"); - public static final ParseField GROUPS = new ParseField("groups"); - public static final ParseField ANALYSIS_CONFIG = AnalysisConfig.ANALYSIS_CONFIG; - public static final ParseField ANALYSIS_LIMITS = new ParseField("analysis_limits"); - public static final ParseField CREATE_TIME = new ParseField("create_time"); - public static final ParseField CUSTOM_SETTINGS = new ParseField("custom_settings"); - public static final ParseField DATA_DESCRIPTION = new ParseField("data_description"); - public static final ParseField DESCRIPTION = new ParseField("description"); - public static final ParseField FINISHED_TIME = new ParseField("finished_time"); - public static final ParseField MODEL_PLOT_CONFIG = new ParseField("model_plot_config"); - public static final ParseField RENORMALIZATION_WINDOW_DAYS = new ParseField("renormalization_window_days"); - public static final ParseField BACKGROUND_PERSIST_INTERVAL = new ParseField("background_persist_interval"); - public static final ParseField MODEL_SNAPSHOT_RETENTION_DAYS = new ParseField("model_snapshot_retention_days"); - public static final ParseField DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS = new ParseField("daily_model_snapshot_retention_after_days"); - public static final ParseField RESULTS_RETENTION_DAYS = new ParseField("results_retention_days"); - public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("model_snapshot_id"); - public static final ParseField RESULTS_INDEX_NAME = new ParseField("results_index_name"); - public static final ParseField DELETING = new ParseField("deleting"); - public static final ParseField ALLOW_LAZY_OPEN = new ParseField("allow_lazy_open"); - - public static final ObjectParser PARSER = new ObjectParser<>("job_details", true, Builder::new); - - static { - PARSER.declareString(Builder::setId, ID); - PARSER.declareString(Builder::setJobType, JOB_TYPE); - PARSER.declareStringArray(Builder::setGroups, GROUPS); - PARSER.declareStringOrNull(Builder::setDescription, DESCRIPTION); - PARSER.declareField( - Builder::setCreateTime, - (p) -> TimeUtil.parseTimeField(p, CREATE_TIME.getPreferredName()), - CREATE_TIME, - ValueType.VALUE - ); - PARSER.declareField( - Builder::setFinishedTime, - (p) -> TimeUtil.parseTimeField(p, FINISHED_TIME.getPreferredName()), - FINISHED_TIME, - ValueType.VALUE - ); - PARSER.declareObject(Builder::setAnalysisConfig, AnalysisConfig.PARSER, ANALYSIS_CONFIG); - PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSER, ANALYSIS_LIMITS); - PARSER.declareObject(Builder::setDataDescription, DataDescription.PARSER, DATA_DESCRIPTION); - PARSER.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.PARSER, MODEL_PLOT_CONFIG); - PARSER.declareLong(Builder::setRenormalizationWindowDays, RENORMALIZATION_WINDOW_DAYS); - PARSER.declareString( - (builder, val) -> builder.setBackgroundPersistInterval( - TimeValue.parseTimeValue(val, BACKGROUND_PERSIST_INTERVAL.getPreferredName()) - ), - BACKGROUND_PERSIST_INTERVAL - ); - PARSER.declareLong(Builder::setResultsRetentionDays, RESULTS_RETENTION_DAYS); - PARSER.declareLong(Builder::setModelSnapshotRetentionDays, MODEL_SNAPSHOT_RETENTION_DAYS); - PARSER.declareLong(Builder::setDailyModelSnapshotRetentionAfterDays, DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS); - PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.mapOrdered(), CUSTOM_SETTINGS, ValueType.OBJECT); - PARSER.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID); - PARSER.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME); - PARSER.declareBoolean(Builder::setDeleting, DELETING); - PARSER.declareBoolean(Builder::setAllowLazyOpen, ALLOW_LAZY_OPEN); - } - - private final String jobId; - private final String jobType; - - private final List groups; - private final String description; - private final Date createTime; - private final Date finishedTime; - private final AnalysisConfig analysisConfig; - private final AnalysisLimits analysisLimits; - private final DataDescription dataDescription; - private final ModelPlotConfig modelPlotConfig; - private final Long renormalizationWindowDays; - private final TimeValue backgroundPersistInterval; - private final Long modelSnapshotRetentionDays; - private final Long dailyModelSnapshotRetentionAfterDays; - private final Long resultsRetentionDays; - private final Map customSettings; - private final String modelSnapshotId; - private final String resultsIndexName; - private final Boolean deleting; - private final Boolean allowLazyOpen; - - private Job( - String jobId, - String jobType, - List groups, - String description, - Date createTime, - Date finishedTime, - AnalysisConfig analysisConfig, - AnalysisLimits analysisLimits, - DataDescription dataDescription, - ModelPlotConfig modelPlotConfig, - Long renormalizationWindowDays, - TimeValue backgroundPersistInterval, - Long modelSnapshotRetentionDays, - Long dailyModelSnapshotRetentionAfterDays, - Long resultsRetentionDays, - Map customSettings, - String modelSnapshotId, - String resultsIndexName, - Boolean deleting, - Boolean allowLazyOpen - ) { - - this.jobId = jobId; - this.jobType = jobType; - this.groups = Collections.unmodifiableList(groups); - this.description = description; - this.createTime = createTime; - this.finishedTime = finishedTime; - this.analysisConfig = analysisConfig; - this.analysisLimits = analysisLimits; - this.dataDescription = dataDescription; - this.modelPlotConfig = modelPlotConfig; - this.renormalizationWindowDays = renormalizationWindowDays; - this.backgroundPersistInterval = backgroundPersistInterval; - this.modelSnapshotRetentionDays = modelSnapshotRetentionDays; - this.dailyModelSnapshotRetentionAfterDays = dailyModelSnapshotRetentionAfterDays; - this.resultsRetentionDays = resultsRetentionDays; - this.customSettings = customSettings == null ? null : Collections.unmodifiableMap(customSettings); - this.modelSnapshotId = modelSnapshotId; - this.resultsIndexName = resultsIndexName; - this.deleting = deleting; - this.allowLazyOpen = allowLazyOpen; - } - - /** - * Return the Job Id. - * - * @return The job Id string - */ - public String getId() { - return jobId; - } - - public String getJobType() { - return jobType; - } - - public List getGroups() { - return groups; - } - - /** - * Private version of getResultsIndexName so that a job can be built from another - * job and pass index name validation - * - * @return The job's index name, minus prefix - */ - private String getResultsIndexNameNoPrefix() { - return resultsIndexName; - } - - /** - * The job description - * - * @return job description - */ - public String getDescription() { - return description; - } - - /** - * The Job creation time. This name is preferred when serialising to the - * REST API. - * - * @return The date the job was created - */ - public Date getCreateTime() { - return createTime; - } - - /** - * The time the job was finished or null if not finished. - * - * @return The date the job was last retired or null - */ - public Date getFinishedTime() { - return finishedTime; - } - - /** - * The analysis configuration object - * - * @return The AnalysisConfig - */ - public AnalysisConfig getAnalysisConfig() { - return analysisConfig; - } - - /** - * The analysis options object - * - * @return The AnalysisLimits - */ - public AnalysisLimits getAnalysisLimits() { - return analysisLimits; - } - - public ModelPlotConfig getModelPlotConfig() { - return modelPlotConfig; - } - - /** - * If not set the input data is assumed to be csv with a '_time' field in - * epoch format. - * - * @return A DataDescription or null - * @see DataDescription - */ - public DataDescription getDataDescription() { - return dataDescription; - } - - /** - * The duration of the renormalization window in days - * - * @return renormalization window in days - */ - public Long getRenormalizationWindowDays() { - return renormalizationWindowDays; - } - - /** - * The background persistence interval - * - * @return background persistence interval - */ - public TimeValue getBackgroundPersistInterval() { - return backgroundPersistInterval; - } - - public Long getModelSnapshotRetentionDays() { - return modelSnapshotRetentionDays; - } - - public Long getDailyModelSnapshotRetentionAfterDays() { - return dailyModelSnapshotRetentionAfterDays; - } - - public Long getResultsRetentionDays() { - return resultsRetentionDays; - } - - public Map getCustomSettings() { - return customSettings; - } - - public String getModelSnapshotId() { - return modelSnapshotId; - } - - public Boolean getDeleting() { - return deleting; - } - - public Boolean getAllowLazyOpen() { - return allowLazyOpen; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - final String humanReadableSuffix = "_string"; - - builder.field(ID.getPreferredName(), jobId); - builder.field(JOB_TYPE.getPreferredName(), jobType); - - if (groups.isEmpty() == false) { - builder.field(GROUPS.getPreferredName(), groups); - } - if (description != null) { - builder.field(DESCRIPTION.getPreferredName(), description); - } - if (createTime != null) { - builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + humanReadableSuffix, createTime.getTime()); - } - if (finishedTime != null) { - builder.timeField( - FINISHED_TIME.getPreferredName(), - FINISHED_TIME.getPreferredName() + humanReadableSuffix, - finishedTime.getTime() - ); - } - builder.field(ANALYSIS_CONFIG.getPreferredName(), analysisConfig, params); - if (analysisLimits != null) { - builder.field(ANALYSIS_LIMITS.getPreferredName(), analysisLimits, params); - } - if (dataDescription != null) { - builder.field(DATA_DESCRIPTION.getPreferredName(), dataDescription, params); - } - if (modelPlotConfig != null) { - builder.field(MODEL_PLOT_CONFIG.getPreferredName(), modelPlotConfig, params); - } - if (renormalizationWindowDays != null) { - builder.field(RENORMALIZATION_WINDOW_DAYS.getPreferredName(), renormalizationWindowDays); - } - if (backgroundPersistInterval != null) { - builder.field(BACKGROUND_PERSIST_INTERVAL.getPreferredName(), backgroundPersistInterval.getStringRep()); - } - if (modelSnapshotRetentionDays != null) { - builder.field(MODEL_SNAPSHOT_RETENTION_DAYS.getPreferredName(), modelSnapshotRetentionDays); - } - if (dailyModelSnapshotRetentionAfterDays != null) { - builder.field(DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS.getPreferredName(), dailyModelSnapshotRetentionAfterDays); - } - if (resultsRetentionDays != null) { - builder.field(RESULTS_RETENTION_DAYS.getPreferredName(), resultsRetentionDays); - } - if (customSettings != null) { - builder.field(CUSTOM_SETTINGS.getPreferredName(), customSettings); - } - if (modelSnapshotId != null) { - builder.field(MODEL_SNAPSHOT_ID.getPreferredName(), modelSnapshotId); - } - if (resultsIndexName != null) { - builder.field(RESULTS_INDEX_NAME.getPreferredName(), resultsIndexName); - } - if (deleting != null) { - builder.field(DELETING.getPreferredName(), deleting); - } - if (allowLazyOpen != null) { - builder.field(ALLOW_LAZY_OPEN.getPreferredName(), allowLazyOpen); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - Job that = (Job) other; - return Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.jobType, that.jobType) - && Objects.equals(this.groups, that.groups) - && Objects.equals(this.description, that.description) - && Objects.equals(this.createTime, that.createTime) - && Objects.equals(this.finishedTime, that.finishedTime) - && Objects.equals(this.analysisConfig, that.analysisConfig) - && Objects.equals(this.analysisLimits, that.analysisLimits) - && Objects.equals(this.dataDescription, that.dataDescription) - && Objects.equals(this.modelPlotConfig, that.modelPlotConfig) - && Objects.equals(this.renormalizationWindowDays, that.renormalizationWindowDays) - && Objects.equals(this.backgroundPersistInterval, that.backgroundPersistInterval) - && Objects.equals(this.modelSnapshotRetentionDays, that.modelSnapshotRetentionDays) - && Objects.equals(this.dailyModelSnapshotRetentionAfterDays, that.dailyModelSnapshotRetentionAfterDays) - && Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays) - && Objects.equals(this.customSettings, that.customSettings) - && Objects.equals(this.modelSnapshotId, that.modelSnapshotId) - && Objects.equals(this.resultsIndexName, that.resultsIndexName) - && Objects.equals(this.deleting, that.deleting) - && Objects.equals(this.allowLazyOpen, that.allowLazyOpen); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - jobType, - groups, - description, - createTime, - finishedTime, - analysisConfig, - analysisLimits, - dataDescription, - modelPlotConfig, - renormalizationWindowDays, - backgroundPersistInterval, - modelSnapshotRetentionDays, - dailyModelSnapshotRetentionAfterDays, - resultsRetentionDays, - customSettings, - modelSnapshotId, - resultsIndexName, - deleting, - allowLazyOpen - ); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - - public static Builder builder(String id) { - return new Builder(id); - } - - public static class Builder { - - private String id; - private String jobType = ANOMALY_DETECTOR_JOB_TYPE; - private List groups = Collections.emptyList(); - private String description; - private AnalysisConfig analysisConfig; - private AnalysisLimits analysisLimits; - private DataDescription dataDescription; - private Date createTime; - private Date finishedTime; - private ModelPlotConfig modelPlotConfig; - private Long renormalizationWindowDays; - private TimeValue backgroundPersistInterval; - private Long modelSnapshotRetentionDays; - private Long dailyModelSnapshotRetentionAfterDays; - private Long resultsRetentionDays; - private Map customSettings; - private String modelSnapshotId; - private String resultsIndexName; - private Boolean deleting; - private Boolean allowLazyOpen; - - private Builder() {} - - public Builder(String id) { - this.id = id; - } - - public Builder(Job job) { - this.id = job.getId(); - this.jobType = job.getJobType(); - this.groups = new ArrayList<>(job.getGroups()); - this.description = job.getDescription(); - this.analysisConfig = job.getAnalysisConfig(); - this.analysisLimits = job.getAnalysisLimits(); - this.dataDescription = job.getDataDescription(); - this.createTime = job.getCreateTime(); - this.finishedTime = job.getFinishedTime(); - this.modelPlotConfig = job.getModelPlotConfig(); - this.renormalizationWindowDays = job.getRenormalizationWindowDays(); - this.backgroundPersistInterval = job.getBackgroundPersistInterval(); - this.modelSnapshotRetentionDays = job.getModelSnapshotRetentionDays(); - this.dailyModelSnapshotRetentionAfterDays = job.getDailyModelSnapshotRetentionAfterDays(); - this.resultsRetentionDays = job.getResultsRetentionDays(); - this.customSettings = job.getCustomSettings() == null ? null : new LinkedHashMap<>(job.getCustomSettings()); - this.modelSnapshotId = job.getModelSnapshotId(); - this.resultsIndexName = job.getResultsIndexNameNoPrefix(); - this.deleting = job.getDeleting(); - this.allowLazyOpen = job.getAllowLazyOpen(); - } - - public Builder setId(String id) { - this.id = id; - return this; - } - - public String getId() { - return id; - } - - public Builder setJobType(String jobType) { - this.jobType = jobType; - return this; - } - - public Builder setGroups(List groups) { - this.groups = groups == null ? Collections.emptyList() : groups; - return this; - } - - public Builder setCustomSettings(Map customSettings) { - this.customSettings = customSettings; - return this; - } - - public Builder setDescription(String description) { - this.description = description; - return this; - } - - public Builder setAnalysisConfig(AnalysisConfig.Builder configBuilder) { - analysisConfig = Objects.requireNonNull(configBuilder, ANALYSIS_CONFIG.getPreferredName()).build(); - return this; - } - - public Builder setAnalysisLimits(AnalysisLimits analysisLimits) { - this.analysisLimits = Objects.requireNonNull(analysisLimits, ANALYSIS_LIMITS.getPreferredName()); - return this; - } - - Builder setCreateTime(Date createTime) { - this.createTime = createTime; - return this; - } - - Builder setFinishedTime(Date finishedTime) { - this.finishedTime = finishedTime; - return this; - } - - public Builder setDataDescription(DataDescription.Builder descriptionBuilder) { - dataDescription = Objects.requireNonNull(descriptionBuilder, DATA_DESCRIPTION.getPreferredName()).build(); - return this; - } - - public Builder setModelPlotConfig(ModelPlotConfig modelPlotConfig) { - this.modelPlotConfig = modelPlotConfig; - return this; - } - - public Builder setBackgroundPersistInterval(TimeValue backgroundPersistInterval) { - this.backgroundPersistInterval = backgroundPersistInterval; - return this; - } - - public Builder setRenormalizationWindowDays(Long renormalizationWindowDays) { - this.renormalizationWindowDays = renormalizationWindowDays; - return this; - } - - public Builder setModelSnapshotRetentionDays(Long modelSnapshotRetentionDays) { - this.modelSnapshotRetentionDays = modelSnapshotRetentionDays; - return this; - } - - public Builder setDailyModelSnapshotRetentionAfterDays(Long dailyModelSnapshotRetentionAfterDays) { - this.dailyModelSnapshotRetentionAfterDays = dailyModelSnapshotRetentionAfterDays; - return this; - } - - public Builder setResultsRetentionDays(Long resultsRetentionDays) { - this.resultsRetentionDays = resultsRetentionDays; - return this; - } - - public Builder setModelSnapshotId(String modelSnapshotId) { - this.modelSnapshotId = modelSnapshotId; - return this; - } - - public Builder setResultsIndexName(String resultsIndexName) { - this.resultsIndexName = resultsIndexName; - return this; - } - - Builder setDeleting(Boolean deleting) { - this.deleting = deleting; - return this; - } - - Builder setAllowLazyOpen(Boolean allowLazyOpen) { - this.allowLazyOpen = allowLazyOpen; - return this; - } - - /** - * Builds a job. - * - * @return The job - */ - public Job build() { - Objects.requireNonNull(id, "[" + ID.getPreferredName() + "] must not be null"); - Objects.requireNonNull(jobType, "[" + JOB_TYPE.getPreferredName() + "] must not be null"); - return new Job( - id, - jobType, - groups, - description, - createTime, - finishedTime, - analysisConfig, - analysisLimits, - dataDescription, - modelPlotConfig, - renormalizationWindowDays, - backgroundPersistInterval, - modelSnapshotRetentionDays, - dailyModelSnapshotRetentionAfterDays, - resultsRetentionDays, - customSettings, - modelSnapshotId, - resultsIndexName, - deleting, - allowLazyOpen - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java deleted file mode 100644 index 1a248ef137d53..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import java.util.Locale; - -/** - * Jobs whether running or complete are in one of these states. - * When a job is created it is initialised in the state closed - * i.e. it is not running. - */ -public enum JobState { - - CLOSING, - CLOSED, - OPENED, - FAILED, - OPENING; - - public static JobState fromString(String name) { - return valueOf(name.trim().toUpperCase(Locale.ROOT)); - } - - public String value() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobUpdate.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobUpdate.java deleted file mode 100644 index f0d70a2509a39..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobUpdate.java +++ /dev/null @@ -1,588 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * POJO for updating an existing Machine Learning {@link Job} - */ -public class JobUpdate implements ToXContentObject { - public static final ParseField DETECTORS = new ParseField("detectors"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "job_update", - true, - args -> new Builder((String) args[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), Job.ID); - PARSER.declareStringArray(Builder::setGroups, Job.GROUPS); - PARSER.declareStringOrNull(Builder::setDescription, Job.DESCRIPTION); - PARSER.declareObjectArray(Builder::setDetectorUpdates, DetectorUpdate.PARSER, DETECTORS); - PARSER.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.PARSER, Job.MODEL_PLOT_CONFIG); - PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSER, Job.ANALYSIS_LIMITS); - PARSER.declareString( - (builder, val) -> builder.setBackgroundPersistInterval( - TimeValue.parseTimeValue(val, Job.BACKGROUND_PERSIST_INTERVAL.getPreferredName()) - ), - Job.BACKGROUND_PERSIST_INTERVAL - ); - PARSER.declareLong(Builder::setRenormalizationWindowDays, Job.RENORMALIZATION_WINDOW_DAYS); - PARSER.declareLong(Builder::setResultsRetentionDays, Job.RESULTS_RETENTION_DAYS); - PARSER.declareLong(Builder::setModelSnapshotRetentionDays, Job.MODEL_SNAPSHOT_RETENTION_DAYS); - PARSER.declareLong(Builder::setDailyModelSnapshotRetentionAfterDays, Job.DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS); - PARSER.declareStringArray(Builder::setCategorizationFilters, AnalysisConfig.CATEGORIZATION_FILTERS); - PARSER.declareObject( - Builder::setPerPartitionCategorizationConfig, - PerPartitionCategorizationConfig.PARSER, - AnalysisConfig.PER_PARTITION_CATEGORIZATION - ); - PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), Job.CUSTOM_SETTINGS, ObjectParser.ValueType.OBJECT); - PARSER.declareBoolean(Builder::setAllowLazyOpen, Job.ALLOW_LAZY_OPEN); - PARSER.declareString( - (builder, val) -> builder.setModelPruneWindow( - TimeValue.parseTimeValue(val, AnalysisConfig.MODEL_PRUNE_WINDOW.getPreferredName()) - ), - AnalysisConfig.MODEL_PRUNE_WINDOW - ); - } - - private final String jobId; - private final List groups; - private final String description; - private final List detectorUpdates; - private final ModelPlotConfig modelPlotConfig; - private final AnalysisLimits analysisLimits; - private final Long renormalizationWindowDays; - private final TimeValue backgroundPersistInterval; - private final Long modelSnapshotRetentionDays; - private final Long dailyModelSnapshotRetentionAfterDays; - private final Long resultsRetentionDays; - private final List categorizationFilters; - private final PerPartitionCategorizationConfig perPartitionCategorizationConfig; - private final Map customSettings; - private final Boolean allowLazyOpen; - private final TimeValue modelPruneWindow; - - private JobUpdate( - String jobId, - @Nullable List groups, - @Nullable String description, - @Nullable List detectorUpdates, - @Nullable ModelPlotConfig modelPlotConfig, - @Nullable AnalysisLimits analysisLimits, - @Nullable TimeValue backgroundPersistInterval, - @Nullable Long renormalizationWindowDays, - @Nullable Long resultsRetentionDays, - @Nullable Long modelSnapshotRetentionDays, - @Nullable Long dailyModelSnapshotRetentionAfterDays, - @Nullable List categorizationFilters, - @Nullable PerPartitionCategorizationConfig perPartitionCategorizationConfig, - @Nullable Map customSettings, - @Nullable Boolean allowLazyOpen, - @Nullable TimeValue modelPruneWindow - ) { - this.jobId = jobId; - this.groups = groups; - this.description = description; - this.detectorUpdates = detectorUpdates; - this.modelPlotConfig = modelPlotConfig; - this.analysisLimits = analysisLimits; - this.renormalizationWindowDays = renormalizationWindowDays; - this.backgroundPersistInterval = backgroundPersistInterval; - this.modelSnapshotRetentionDays = modelSnapshotRetentionDays; - this.dailyModelSnapshotRetentionAfterDays = dailyModelSnapshotRetentionAfterDays; - this.resultsRetentionDays = resultsRetentionDays; - this.categorizationFilters = categorizationFilters; - this.perPartitionCategorizationConfig = perPartitionCategorizationConfig; - this.customSettings = customSettings; - this.allowLazyOpen = allowLazyOpen; - this.modelPruneWindow = modelPruneWindow; - } - - public String getJobId() { - return jobId; - } - - public List getGroups() { - return groups; - } - - public String getDescription() { - return description; - } - - public List getDetectorUpdates() { - return detectorUpdates; - } - - public ModelPlotConfig getModelPlotConfig() { - return modelPlotConfig; - } - - public AnalysisLimits getAnalysisLimits() { - return analysisLimits; - } - - public Long getRenormalizationWindowDays() { - return renormalizationWindowDays; - } - - public TimeValue getBackgroundPersistInterval() { - return backgroundPersistInterval; - } - - public Long getModelSnapshotRetentionDays() { - return modelSnapshotRetentionDays; - } - - public Long getResultsRetentionDays() { - return resultsRetentionDays; - } - - public List getCategorizationFilters() { - return categorizationFilters; - } - - public PerPartitionCategorizationConfig getPerPartitionCategorizationConfig() { - return perPartitionCategorizationConfig; - } - - public Map getCustomSettings() { - return customSettings; - } - - public Boolean getAllowLazyOpen() { - return allowLazyOpen; - } - - public TimeValue getModelPruneWindow() { - return modelPruneWindow; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (groups != null) { - builder.field(Job.GROUPS.getPreferredName(), groups); - } - if (description != null) { - builder.field(Job.DESCRIPTION.getPreferredName(), description); - } - if (detectorUpdates != null) { - builder.field(DETECTORS.getPreferredName(), detectorUpdates); - } - if (modelPlotConfig != null) { - builder.field(Job.MODEL_PLOT_CONFIG.getPreferredName(), modelPlotConfig); - } - if (analysisLimits != null) { - builder.field(Job.ANALYSIS_LIMITS.getPreferredName(), analysisLimits); - } - if (renormalizationWindowDays != null) { - builder.field(Job.RENORMALIZATION_WINDOW_DAYS.getPreferredName(), renormalizationWindowDays); - } - if (backgroundPersistInterval != null) { - builder.field(Job.BACKGROUND_PERSIST_INTERVAL.getPreferredName(), backgroundPersistInterval); - } - if (modelSnapshotRetentionDays != null) { - builder.field(Job.MODEL_SNAPSHOT_RETENTION_DAYS.getPreferredName(), modelSnapshotRetentionDays); - } - if (dailyModelSnapshotRetentionAfterDays != null) { - builder.field(Job.DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS.getPreferredName(), dailyModelSnapshotRetentionAfterDays); - } - if (resultsRetentionDays != null) { - builder.field(Job.RESULTS_RETENTION_DAYS.getPreferredName(), resultsRetentionDays); - } - if (categorizationFilters != null) { - builder.field(AnalysisConfig.CATEGORIZATION_FILTERS.getPreferredName(), categorizationFilters); - } - if (perPartitionCategorizationConfig != null) { - builder.field(AnalysisConfig.PER_PARTITION_CATEGORIZATION.getPreferredName(), perPartitionCategorizationConfig); - } - if (customSettings != null) { - builder.field(Job.CUSTOM_SETTINGS.getPreferredName(), customSettings); - } - if (allowLazyOpen != null) { - builder.field(Job.ALLOW_LAZY_OPEN.getPreferredName(), allowLazyOpen); - } - if (modelPruneWindow != null) { - builder.field(AnalysisConfig.MODEL_PRUNE_WINDOW.getPreferredName(), modelPruneWindow); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - JobUpdate that = (JobUpdate) other; - - return Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.groups, that.groups) - && Objects.equals(this.description, that.description) - && Objects.equals(this.detectorUpdates, that.detectorUpdates) - && Objects.equals(this.modelPlotConfig, that.modelPlotConfig) - && Objects.equals(this.analysisLimits, that.analysisLimits) - && Objects.equals(this.renormalizationWindowDays, that.renormalizationWindowDays) - && Objects.equals(this.backgroundPersistInterval, that.backgroundPersistInterval) - && Objects.equals(this.modelSnapshotRetentionDays, that.modelSnapshotRetentionDays) - && Objects.equals(this.dailyModelSnapshotRetentionAfterDays, that.dailyModelSnapshotRetentionAfterDays) - && Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays) - && Objects.equals(this.categorizationFilters, that.categorizationFilters) - && Objects.equals(this.perPartitionCategorizationConfig, that.perPartitionCategorizationConfig) - && Objects.equals(this.customSettings, that.customSettings) - && Objects.equals(this.allowLazyOpen, that.allowLazyOpen) - && Objects.equals(this.modelPruneWindow, that.modelPruneWindow); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - groups, - description, - detectorUpdates, - modelPlotConfig, - analysisLimits, - renormalizationWindowDays, - backgroundPersistInterval, - modelSnapshotRetentionDays, - dailyModelSnapshotRetentionAfterDays, - resultsRetentionDays, - categorizationFilters, - perPartitionCategorizationConfig, - customSettings, - allowLazyOpen, - modelPruneWindow - ); - } - - public static class DetectorUpdate implements ToXContentObject { - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "detector_update", - true, - a -> new DetectorUpdate((int) a[0], (String) a[1], (List) a[2]) - ); - - static { - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), Detector.DETECTOR_INDEX); - PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), Job.DESCRIPTION); - PARSER.declareObjectArray( - ConstructingObjectParser.optionalConstructorArg(), - (parser, parseFieldMatcher) -> DetectionRule.PARSER.apply(parser, parseFieldMatcher).build(), - Detector.CUSTOM_RULES_FIELD - ); - } - - private final int detectorIndex; - private final String description; - private final List rules; - - /** - * A detector update to apply to the Machine Learning Job - * - * @param detectorIndex The identifier of the detector to update. - * @param description The new description for the detector. - * @param rules The new list of rules for the detector. - */ - public DetectorUpdate(int detectorIndex, String description, List rules) { - this.detectorIndex = detectorIndex; - this.description = description; - this.rules = rules; - } - - public int getDetectorIndex() { - return detectorIndex; - } - - public String getDescription() { - return description; - } - - public List getRules() { - return rules; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - builder.field(Detector.DETECTOR_INDEX.getPreferredName(), detectorIndex); - if (description != null) { - builder.field(Job.DESCRIPTION.getPreferredName(), description); - } - if (rules != null) { - builder.field(Detector.CUSTOM_RULES_FIELD.getPreferredName(), rules); - } - builder.endObject(); - - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(detectorIndex, description, rules); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DetectorUpdate that = (DetectorUpdate) other; - return this.detectorIndex == that.detectorIndex - && Objects.equals(this.description, that.description) - && Objects.equals(this.rules, that.rules); - } - } - - public static class Builder { - - private final String jobId; - private List groups; - private String description; - private List detectorUpdates; - private ModelPlotConfig modelPlotConfig; - private AnalysisLimits analysisLimits; - private Long renormalizationWindowDays; - private TimeValue backgroundPersistInterval; - private Long modelSnapshotRetentionDays; - private Long dailyModelSnapshotRetentionAfterDays; - private Long resultsRetentionDays; - private List categorizationFilters; - private PerPartitionCategorizationConfig perPartitionCategorizationConfig; - private Map customSettings; - private Boolean allowLazyOpen; - private TimeValue modelPruneWindow; - - /** - * New {@link JobUpdate.Builder} object for the existing job - * - * @param jobId non-null `jobId` for referencing an exising {@link Job} - */ - public Builder(String jobId) { - this.jobId = jobId; - } - - /** - * Set the job groups - * - * Updates the {@link Job#groups} setting - * - * @param groups A list of group names - */ - public Builder setGroups(List groups) { - this.groups = groups; - return this; - } - - /** - * Set the job description - * - * Updates the {@link Job#description} setting - * - * @param description the desired Machine Learning job description - */ - public Builder setDescription(String description) { - this.description = description; - return this; - } - - /** - * The detector updates to apply to the job - * - * Updates the {@link AnalysisConfig#detectors} setting - * - * @param detectorUpdates list of {@link JobUpdate.DetectorUpdate} objects - */ - public Builder setDetectorUpdates(List detectorUpdates) { - this.detectorUpdates = detectorUpdates; - return this; - } - - /** - * Enables/disables the model plot config setting through {@link ModelPlotConfig#enabled} - * - * Updates the {@link Job#modelPlotConfig} setting - * - * @param modelPlotConfig {@link ModelPlotConfig} object with updated fields - */ - public Builder setModelPlotConfig(ModelPlotConfig modelPlotConfig) { - this.modelPlotConfig = modelPlotConfig; - return this; - } - - /** - * Sets new {@link AnalysisLimits} for the {@link Job} - * - * Updates the {@link Job#analysisLimits} setting - * - * @param analysisLimits Updates to {@link AnalysisLimits} - */ - public Builder setAnalysisLimits(AnalysisLimits analysisLimits) { - this.analysisLimits = analysisLimits; - return this; - } - - /** - * Advanced configuration option. The period over which adjustments to the score are applied, as new data is seen - * - * Updates the {@link Job#renormalizationWindowDays} setting - * - * @param renormalizationWindowDays number of renormalization window days - */ - public Builder setRenormalizationWindowDays(Long renormalizationWindowDays) { - this.renormalizationWindowDays = renormalizationWindowDays; - return this; - } - - /** - * Advanced configuration option. The time between each periodic persistence of the model - * - * Updates the {@link Job#backgroundPersistInterval} setting - * - * @param backgroundPersistInterval the time between background persistence - */ - public Builder setBackgroundPersistInterval(TimeValue backgroundPersistInterval) { - this.backgroundPersistInterval = backgroundPersistInterval; - return this; - } - - /** - * The time in days that model snapshots are retained for the job. - * - * Updates the {@link Job#modelSnapshotRetentionDays} setting - * - * @param modelSnapshotRetentionDays number of days to keep a model snapshot - */ - public Builder setModelSnapshotRetentionDays(Long modelSnapshotRetentionDays) { - this.modelSnapshotRetentionDays = modelSnapshotRetentionDays; - return this; - } - - /** - * The time in days after which only one model snapshot per day is retained for the job. - * - * Updates the {@link Job#dailyModelSnapshotRetentionAfterDays} setting - * - * @param dailyModelSnapshotRetentionAfterDays number of days to keep a model snapshot - */ - public Builder setDailyModelSnapshotRetentionAfterDays(Long dailyModelSnapshotRetentionAfterDays) { - this.dailyModelSnapshotRetentionAfterDays = dailyModelSnapshotRetentionAfterDays; - return this; - } - - /** - * Advanced configuration option. The number of days for which job results are retained - * - * Updates the {@link Job#resultsRetentionDays} setting - * - * @param resultsRetentionDays number of days to keep results. - */ - public Builder setResultsRetentionDays(Long resultsRetentionDays) { - this.resultsRetentionDays = resultsRetentionDays; - return this; - } - - /** - * Sets the categorization filters on the {@link Job} - * - * Updates the {@link AnalysisConfig#categorizationFilters} setting. - * Requires {@link AnalysisConfig#categorizationFieldName} to have been set on the existing Job. - * - * @param categorizationFilters list of categorization filters for the Job's {@link AnalysisConfig} - */ - public Builder setCategorizationFilters(List categorizationFilters) { - this.categorizationFilters = categorizationFilters; - return this; - } - - /** - * Sets the per-partition categorization options on the {@link Job} - * - * Updates the {@link AnalysisConfig#perPartitionCategorizationConfig} setting. - * Requires {@link AnalysisConfig#perPartitionCategorizationConfig} to have been set on the existing Job. - * - * @param perPartitionCategorizationConfig per-partition categorization options for the Job's {@link AnalysisConfig} - */ - public Builder setPerPartitionCategorizationConfig(PerPartitionCategorizationConfig perPartitionCategorizationConfig) { - this.perPartitionCategorizationConfig = perPartitionCategorizationConfig; - return this; - } - - /** - * Contains custom meta data about the job. - * - * Updates the {@link Job#customSettings} setting - * - * @param customSettings custom settings map for the job - */ - public Builder setCustomSettings(Map customSettings) { - this.customSettings = customSettings; - return this; - } - - public Builder setAllowLazyOpen(boolean allowLazyOpen) { - this.allowLazyOpen = allowLazyOpen; - return this; - } - - public Builder setModelPruneWindow(TimeValue modelPruneWindow) { - this.modelPruneWindow = modelPruneWindow; - return this; - } - - public JobUpdate build() { - return new JobUpdate( - jobId, - groups, - description, - detectorUpdates, - modelPlotConfig, - analysisLimits, - backgroundPersistInterval, - renormalizationWindowDays, - resultsRetentionDays, - modelSnapshotRetentionDays, - dailyModelSnapshotRetentionAfterDays, - categorizationFilters, - perPartitionCategorizationConfig, - customSettings, - allowLazyOpen, - modelPruneWindow - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/MlFilter.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/MlFilter.java deleted file mode 100644 index 7079ff69ea106..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/MlFilter.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.SortedSet; -import java.util.TreeSet; - -/** - * An MlFilter Object - * - * A filter contains a list of strings. - * It can be used by one or more jobs. - * - * Specifically, filters are referenced in the custom_rules property of detector configuration objects. - */ -public class MlFilter implements ToXContentObject { - - public static final ParseField TYPE = new ParseField("type"); - public static final ParseField ID = new ParseField("filter_id"); - public static final ParseField DESCRIPTION = new ParseField("description"); - public static final ParseField ITEMS = new ParseField("items"); - - // For QueryPage - public static final ParseField RESULTS_FIELD = new ParseField("filters"); - - public static final ObjectParser PARSER = new ObjectParser<>(TYPE.getPreferredName(), true, Builder::new); - - static { - PARSER.declareString((builder, s) -> {}, TYPE); - PARSER.declareString(Builder::setId, ID); - PARSER.declareStringOrNull(Builder::setDescription, DESCRIPTION); - PARSER.declareStringArray(Builder::setItems, ITEMS); - } - - private final String id; - private final String description; - private final SortedSet items; - - private MlFilter(String id, String description, SortedSet items) { - this.id = Objects.requireNonNull(id); - this.description = description; - this.items = Collections.unmodifiableSortedSet(items); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ID.getPreferredName(), id); - if (description != null) { - builder.field(DESCRIPTION.getPreferredName(), description); - } - builder.field(ITEMS.getPreferredName(), items); - // Don't include TYPE as it's fixed - builder.endObject(); - return builder; - } - - public String getId() { - return id; - } - - public String getDescription() { - return description; - } - - public SortedSet getItems() { - return items; - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - - if (obj instanceof MlFilter == false) { - return false; - } - - MlFilter other = (MlFilter) obj; - return id.equals(other.id) && Objects.equals(description, other.description) && items.equals(other.items); - } - - @Override - public int hashCode() { - return Objects.hash(id, description, items); - } - - /** - * Creates a new Builder object for creating an MlFilter object - * @param filterId The ID of the filter to create - */ - public static Builder builder(String filterId) { - return new Builder().setId(filterId); - } - - public static class Builder { - - private String id; - private String description; - private SortedSet items = new TreeSet<>(); - - private Builder() {} - - /** - * Set the ID of the filter - * @param id The id desired - */ - public Builder setId(String id) { - this.id = Objects.requireNonNull(id); - return this; - } - - @Nullable - public String getId() { - return id; - } - - /** - * Set the description of the filter - * @param description The description desired - */ - public Builder setDescription(String description) { - this.description = description; - return this; - } - - public Builder setItems(SortedSet items) { - this.items = Objects.requireNonNull(items); - return this; - } - - public Builder setItems(List items) { - this.items = new TreeSet<>(items); - return this; - } - - /** - * The items of the filter. - * - * A wildcard * can be used at the beginning or the end of an item. Up to 10000 items are allowed in each filter. - * - * @param items String list of items to be applied in the filter - */ - public Builder setItems(String... items) { - setItems(Arrays.asList(items)); - return this; - } - - public MlFilter build() { - return new MlFilter(id, description, items); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/ModelPlotConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/ModelPlotConfig.java deleted file mode 100644 index 4581409d64989..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/ModelPlotConfig.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class ModelPlotConfig implements ToXContentObject { - - private static final ParseField TYPE_FIELD = new ParseField("model_plot_config"); - private static final ParseField ENABLED_FIELD = new ParseField("enabled"); - private static final ParseField TERMS_FIELD = new ParseField("terms"); - private static final ParseField ANNOTATIONS_ENABLED_FIELD = new ParseField("annotations_enabled"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - TYPE_FIELD.getPreferredName(), - true, - a -> new ModelPlotConfig((boolean) a[0], (String) a[1], (Boolean) a[2]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_FIELD); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), TERMS_FIELD); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), ANNOTATIONS_ENABLED_FIELD); - } - - private final boolean enabled; - private final String terms; - private final Boolean annotationsEnabled; - - public ModelPlotConfig(boolean enabled, String terms, Boolean annotationsEnabled) { - this.enabled = enabled; - this.terms = terms; - this.annotationsEnabled = annotationsEnabled; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ENABLED_FIELD.getPreferredName(), enabled); - if (terms != null) { - builder.field(TERMS_FIELD.getPreferredName(), terms); - } - if (annotationsEnabled != null) { - builder.field(ANNOTATIONS_ENABLED_FIELD.getPreferredName(), annotationsEnabled); - } - builder.endObject(); - return builder; - } - - public boolean isEnabled() { - return enabled; - } - - public String getTerms() { - return this.terms; - } - - public Boolean annotationsEnabled() { - return annotationsEnabled; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other instanceof ModelPlotConfig == false) { - return false; - } - - ModelPlotConfig that = (ModelPlotConfig) other; - return this.enabled == that.enabled - && Objects.equals(this.terms, that.terms) - && Objects.equals(this.annotationsEnabled, that.annotationsEnabled); - } - - @Override - public int hashCode() { - return Objects.hash(enabled, terms, annotationsEnabled); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Operator.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Operator.java deleted file mode 100644 index 3d7ac2af70a66..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Operator.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ParseField; - -import java.util.Locale; - -/** - * Enum representing logical comparisons on doubles - */ -public enum Operator { - GT { - @Override - public boolean test(double lhs, double rhs) { - return Double.compare(lhs, rhs) > 0; - } - }, - GTE { - @Override - public boolean test(double lhs, double rhs) { - return Double.compare(lhs, rhs) >= 0; - } - }, - LT { - @Override - public boolean test(double lhs, double rhs) { - return Double.compare(lhs, rhs) < 0; - } - }, - LTE { - @Override - public boolean test(double lhs, double rhs) { - return Double.compare(lhs, rhs) <= 0; - } - }; - // EQ was considered but given the oddity of such a - // condition and the fact that it would be a numerically - // unstable condition, it was rejected. - - public static final ParseField OPERATOR_FIELD = new ParseField("operator"); - - public boolean test(double lhs, double rhs) { - return false; - } - - public static Operator fromString(String name) { - return valueOf(name.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/PerPartitionCategorizationConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/PerPartitionCategorizationConfig.java deleted file mode 100644 index 86cd40f45c601..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/PerPartitionCategorizationConfig.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class PerPartitionCategorizationConfig implements ToXContentObject { - - public static final ParseField TYPE_FIELD = new ParseField("per_partition_categorization"); - public static final ParseField ENABLED_FIELD = new ParseField("enabled"); - public static final ParseField STOP_ON_WARN = new ParseField("stop_on_warn"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - TYPE_FIELD.getPreferredName(), - true, - a -> new PerPartitionCategorizationConfig((boolean) a[0], (Boolean) a[1]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_FIELD); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), STOP_ON_WARN); - } - - private final boolean enabled; - private final boolean stopOnWarn; - - public PerPartitionCategorizationConfig() { - this(false, null); - } - - public PerPartitionCategorizationConfig(boolean enabled, Boolean stopOnWarn) { - this.enabled = enabled; - this.stopOnWarn = (stopOnWarn == null) ? false : stopOnWarn; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(ENABLED_FIELD.getPreferredName(), enabled); - if (enabled) { - builder.field(STOP_ON_WARN.getPreferredName(), stopOnWarn); - } - builder.endObject(); - return builder; - } - - public boolean isEnabled() { - return enabled; - } - - public boolean isStopOnWarn() { - return stopOnWarn; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other instanceof PerPartitionCategorizationConfig == false) { - return false; - } - - PerPartitionCategorizationConfig that = (PerPartitionCategorizationConfig) other; - return this.enabled == that.enabled && this.stopOnWarn == that.stopOnWarn; - } - - @Override - public int hashCode() { - return Objects.hash(enabled, stopOnWarn); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleAction.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleAction.java deleted file mode 100644 index 21d9c99134660..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleAction.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import java.util.Locale; - -public enum RuleAction { - SKIP_RESULT, - SKIP_MODEL_UPDATE; - - /** - * Case-insensitive from string method. - * - * @param value String representation - * @return The rule action - */ - public static RuleAction fromString(String value) { - return RuleAction.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleCondition.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleCondition.java deleted file mode 100644 index d7f9054c23485..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleCondition.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Locale; -import java.util.Objects; - -public class RuleCondition implements ToXContentObject { - - public static final ParseField RULE_CONDITION_FIELD = new ParseField("rule_condition"); - - public static final ParseField APPLIES_TO_FIELD = new ParseField("applies_to"); - public static final ParseField VALUE_FIELD = new ParseField("value"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RULE_CONDITION_FIELD.getPreferredName(), - true, - a -> new RuleCondition((AppliesTo) a[0], (Operator) a[1], (double) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), AppliesTo::fromString, APPLIES_TO_FIELD); - PARSER.declareString(ConstructingObjectParser.constructorArg(), Operator::fromString, Operator.OPERATOR_FIELD); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), VALUE_FIELD); - } - - private final AppliesTo appliesTo; - private final Operator operator; - private final double value; - - public RuleCondition(AppliesTo appliesTo, Operator operator, double value) { - this.appliesTo = appliesTo; - this.operator = operator; - this.value = value; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(APPLIES_TO_FIELD.getPreferredName(), appliesTo); - builder.field(Operator.OPERATOR_FIELD.getPreferredName(), operator); - builder.field(VALUE_FIELD.getPreferredName(), value); - builder.endObject(); - return builder; - } - - public AppliesTo getAppliesTo() { - return appliesTo; - } - - public Operator getOperator() { - return operator; - } - - public double getValue() { - return value; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj instanceof RuleCondition == false) { - return false; - } - - RuleCondition other = (RuleCondition) obj; - return appliesTo == other.appliesTo && operator == other.operator && value == other.value; - } - - @Override - public int hashCode() { - return Objects.hash(appliesTo, operator, value); - } - - public static RuleCondition createTime(Operator operator, long epochSeconds) { - return new RuleCondition(AppliesTo.TIME, operator, epochSeconds); - } - - public enum AppliesTo { - ACTUAL, - TYPICAL, - DIFF_FROM_TYPICAL, - TIME; - - public static AppliesTo fromString(String value) { - return valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleScope.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleScope.java deleted file mode 100644 index 352d240943a9c..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleScope.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ContextParser; -import org.elasticsearch.xcontent.DeprecationHandler; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.stream.Collectors; - -public class RuleScope implements ToXContentObject { - - public static ContextParser parser() { - return (p, c) -> { - Map unparsedScope = p.map(); - if (unparsedScope.isEmpty()) { - return new RuleScope(); - } - Map scope = new HashMap<>(); - for (Map.Entry entry : unparsedScope.entrySet()) { - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - @SuppressWarnings("unchecked") - Map value = (Map) entry.getValue(); - builder.map(value); - try ( - XContentParser scopeParser = XContentFactory.xContent(builder.contentType()) - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, Strings.toString(builder)) - ) { - scope.put(entry.getKey(), FilterRef.PARSER.parse(scopeParser, null)); - } - } - } - return new RuleScope(scope); - }; - } - - private final Map scope; - - public RuleScope() { - scope = Collections.emptyMap(); - } - - public RuleScope(Map scope) { - this.scope = Collections.unmodifiableMap(scope); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.map(scope); - } - - public boolean isEmpty() { - return scope.isEmpty(); - } - - public Set getReferencedFilters() { - return scope.values().stream().map(FilterRef::getFilterId).collect(Collectors.toSet()); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj instanceof RuleScope == false) { - return false; - } - - RuleScope other = (RuleScope) obj; - return Objects.equals(scope, other.scope); - } - - @Override - public int hashCode() { - return Objects.hash(scope); - } - - public static Builder builder() { - return new Builder(); - } - - public static class Builder { - - private Map scope = new HashMap<>(); - - public Builder() {} - - public Builder(RuleScope otherScope) { - scope = new HashMap<>(otherScope.scope); - } - - public Builder exclude(String field, String filterId) { - scope.put(field, new FilterRef(filterId, FilterRef.FilterType.EXCLUDE)); - return this; - } - - public Builder include(String field, String filterId) { - scope.put(field, new FilterRef(filterId, FilterRef.FilterType.INCLUDE)); - return this; - } - - public RuleScope build() { - return new RuleScope(scope); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/DataCounts.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/DataCounts.java deleted file mode 100644 index 20cc5db284302..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/DataCounts.java +++ /dev/null @@ -1,480 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.process; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.time.Instant; -import java.util.Date; -import java.util.Objects; - -/** - * Job processed record counts. - *

- * The getInput... methods return the actual number of - * fields/records sent the API including invalid records. - * The getProcessed... methods are the number sent to the - * Engine. - *

- * The inputRecordCount field is calculated so it - * should not be set in deserialization but it should be serialised - * so the field is visible. - */ -public class DataCounts implements ToXContentObject { - - public static final ParseField PROCESSED_RECORD_COUNT = new ParseField("processed_record_count"); - public static final ParseField PROCESSED_FIELD_COUNT = new ParseField("processed_field_count"); - public static final ParseField INPUT_BYTES = new ParseField("input_bytes"); - public static final ParseField INPUT_RECORD_COUNT = new ParseField("input_record_count"); - public static final ParseField INPUT_FIELD_COUNT = new ParseField("input_field_count"); - public static final ParseField INVALID_DATE_COUNT = new ParseField("invalid_date_count"); - public static final ParseField MISSING_FIELD_COUNT = new ParseField("missing_field_count"); - public static final ParseField OUT_OF_ORDER_TIME_COUNT = new ParseField("out_of_order_timestamp_count"); - public static final ParseField EMPTY_BUCKET_COUNT = new ParseField("empty_bucket_count"); - public static final ParseField SPARSE_BUCKET_COUNT = new ParseField("sparse_bucket_count"); - public static final ParseField BUCKET_COUNT = new ParseField("bucket_count"); - public static final ParseField EARLIEST_RECORD_TIME = new ParseField("earliest_record_timestamp"); - public static final ParseField LATEST_RECORD_TIME = new ParseField("latest_record_timestamp"); - public static final ParseField LAST_DATA_TIME = new ParseField("last_data_time"); - public static final ParseField LATEST_EMPTY_BUCKET_TIME = new ParseField("latest_empty_bucket_timestamp"); - public static final ParseField LATEST_SPARSE_BUCKET_TIME = new ParseField("latest_sparse_bucket_timestamp"); - public static final ParseField LOG_TIME = new ParseField("log_time"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "data_counts", - true, - a -> new DataCounts( - (String) a[0], - (long) a[1], - (long) a[2], - (long) a[3], - (long) a[4], - (long) a[5], - (long) a[6], - (long) a[7], - (long) a[8], - (long) a[9], - (long) a[10], - (Date) a[11], - (Date) a[12], - (Date) a[13], - (Date) a[14], - (Date) a[15], - (Instant) a[16] - ) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), PROCESSED_RECORD_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), PROCESSED_FIELD_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), INPUT_BYTES); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), INPUT_FIELD_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), INVALID_DATE_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), MISSING_FIELD_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), OUT_OF_ORDER_TIME_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), EMPTY_BUCKET_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), SPARSE_BUCKET_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_COUNT); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p) -> TimeUtil.parseTimeField(p, EARLIEST_RECORD_TIME.getPreferredName()), - EARLIEST_RECORD_TIME, - ValueType.VALUE - ); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p) -> TimeUtil.parseTimeField(p, LATEST_RECORD_TIME.getPreferredName()), - LATEST_RECORD_TIME, - ValueType.VALUE - ); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p) -> TimeUtil.parseTimeField(p, LAST_DATA_TIME.getPreferredName()), - LAST_DATA_TIME, - ValueType.VALUE - ); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p) -> TimeUtil.parseTimeField(p, LATEST_EMPTY_BUCKET_TIME.getPreferredName()), - LATEST_EMPTY_BUCKET_TIME, - ValueType.VALUE - ); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p) -> TimeUtil.parseTimeField(p, LATEST_SPARSE_BUCKET_TIME.getPreferredName()), - LATEST_SPARSE_BUCKET_TIME, - ValueType.VALUE - ); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - p -> TimeUtil.parseTimeFieldToInstant(p, LOG_TIME.getPreferredName()), - LOG_TIME, - ValueType.VALUE - ); - } - - private final String jobId; - private long processedRecordCount; - private long processedFieldCount; - private long inputBytes; - private long inputFieldCount; - private long invalidDateCount; - private long missingFieldCount; - private long outOfOrderTimeStampCount; - private long emptyBucketCount; - private long sparseBucketCount; - private long bucketCount; - private Date earliestRecordTimeStamp; - private Date latestRecordTimeStamp; - private Date lastDataTimeStamp; - private Date latestEmptyBucketTimeStamp; - private Date latestSparseBucketTimeStamp; - private Instant logTime; - - public DataCounts( - String jobId, - long processedRecordCount, - long processedFieldCount, - long inputBytes, - long inputFieldCount, - long invalidDateCount, - long missingFieldCount, - long outOfOrderTimeStampCount, - long emptyBucketCount, - long sparseBucketCount, - long bucketCount, - Date earliestRecordTimeStamp, - Date latestRecordTimeStamp, - Date lastDataTimeStamp, - Date latestEmptyBucketTimeStamp, - Date latestSparseBucketTimeStamp, - Instant logTime - ) { - this.jobId = jobId; - this.processedRecordCount = processedRecordCount; - this.processedFieldCount = processedFieldCount; - this.inputBytes = inputBytes; - this.inputFieldCount = inputFieldCount; - this.invalidDateCount = invalidDateCount; - this.missingFieldCount = missingFieldCount; - this.outOfOrderTimeStampCount = outOfOrderTimeStampCount; - this.emptyBucketCount = emptyBucketCount; - this.sparseBucketCount = sparseBucketCount; - this.bucketCount = bucketCount; - this.latestRecordTimeStamp = latestRecordTimeStamp; - this.earliestRecordTimeStamp = earliestRecordTimeStamp; - this.lastDataTimeStamp = lastDataTimeStamp; - this.latestEmptyBucketTimeStamp = latestEmptyBucketTimeStamp; - this.latestSparseBucketTimeStamp = latestSparseBucketTimeStamp; - this.logTime = logTime == null ? null : Instant.ofEpochMilli(logTime.toEpochMilli()); - } - - DataCounts(String jobId) { - this.jobId = jobId; - } - - public String getJobId() { - return jobId; - } - - /** - * Number of records processed by this job. - * This value is the number of records sent passed on to - * the engine i.e. {@linkplain #getInputRecordCount()} minus - * records with bad dates or out of order - * - * @return Number of records processed by this job {@code long} - */ - public long getProcessedRecordCount() { - return processedRecordCount; - } - - /** - * Number of data points (processed record count * the number - * of analysed fields) processed by this job. This count does - * not include the time field. - * - * @return Number of data points processed by this job {@code long} - */ - public long getProcessedFieldCount() { - return processedFieldCount; - } - - /** - * Total number of input records read. - * This = processed record count + date parse error records count - * + out of order record count. - *

- * Records with missing fields are counted as they are still written. - * - * @return Total number of input records read {@code long} - */ - public long getInputRecordCount() { - return processedRecordCount + outOfOrderTimeStampCount + invalidDateCount; - } - - /** - * The total number of bytes sent to this job. - * This value includes the bytes from any records - * that have been discarded for any reason - * e.g. because the date cannot be read - * - * @return Volume in bytes - */ - public long getInputBytes() { - return inputBytes; - } - - /** - * The total number of fields sent to the job - * including fields that aren't analysed. - * - * @return The total number of fields sent to the job - */ - public long getInputFieldCount() { - return inputFieldCount; - } - - /** - * The number of records with an invalid date field that could - * not be parsed or converted to epoch time. - * - * @return The number of records with an invalid date field - */ - public long getInvalidDateCount() { - return invalidDateCount; - } - - /** - * The number of missing fields that had been - * configured for analysis. - * - * @return The number of missing fields - */ - public long getMissingFieldCount() { - return missingFieldCount; - } - - /** - * The number of records with a timestamp that is - * before the time of the latest record. Records should - * be in ascending chronological order - * - * @return The number of records with a timestamp that is before the time of the latest record - */ - public long getOutOfOrderTimeStampCount() { - return outOfOrderTimeStampCount; - } - - /** - * The number of buckets with no records in it. Used to measure general data fitness and/or - * configuration problems (bucket span). - * - * @return Number of empty buckets processed by this job {@code long} - */ - public long getEmptyBucketCount() { - return emptyBucketCount; - } - - /** - * The number of buckets with few records compared to the overall counts. - * Used to measure general data fitness and/or configuration problems (bucket span). - * - * @return Number of sparse buckets processed by this job {@code long} - */ - public long getSparseBucketCount() { - return sparseBucketCount; - } - - /** - * The number of buckets overall. - * - * @return Number of buckets processed by this job {@code long} - */ - public long getBucketCount() { - return bucketCount; - } - - /** - * The time of the first record seen. - * - * @return The first record time - */ - public Date getEarliestRecordTimeStamp() { - return earliestRecordTimeStamp; - } - - /** - * The time of the latest record seen. - * - * @return Latest record time - */ - public Date getLatestRecordTimeStamp() { - return latestRecordTimeStamp; - } - - /** - * The wall clock time the latest record was seen. - * - * @return Wall clock time of the lastest record - */ - public Date getLastDataTimeStamp() { - return lastDataTimeStamp; - } - - /** - * The time of the latest empty bucket seen. - * - * @return Latest empty bucket time - */ - public Date getLatestEmptyBucketTimeStamp() { - return latestEmptyBucketTimeStamp; - } - - /** - * The time of the latest sparse bucket seen. - * - * @return Latest sparse bucket time - */ - public Date getLatestSparseBucketTimeStamp() { - return latestSparseBucketTimeStamp; - } - - /** - * The wall clock time at the point when this instance was created. - * - * @return The wall clock time - */ - public Instant getLogTime() { - return logTime; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(PROCESSED_RECORD_COUNT.getPreferredName(), processedRecordCount); - builder.field(PROCESSED_FIELD_COUNT.getPreferredName(), processedFieldCount); - builder.field(INPUT_BYTES.getPreferredName(), inputBytes); - builder.field(INPUT_FIELD_COUNT.getPreferredName(), inputFieldCount); - builder.field(INVALID_DATE_COUNT.getPreferredName(), invalidDateCount); - builder.field(MISSING_FIELD_COUNT.getPreferredName(), missingFieldCount); - builder.field(OUT_OF_ORDER_TIME_COUNT.getPreferredName(), outOfOrderTimeStampCount); - builder.field(EMPTY_BUCKET_COUNT.getPreferredName(), emptyBucketCount); - builder.field(SPARSE_BUCKET_COUNT.getPreferredName(), sparseBucketCount); - builder.field(BUCKET_COUNT.getPreferredName(), bucketCount); - if (earliestRecordTimeStamp != null) { - builder.timeField( - EARLIEST_RECORD_TIME.getPreferredName(), - EARLIEST_RECORD_TIME.getPreferredName() + "_string", - earliestRecordTimeStamp.getTime() - ); - } - if (latestRecordTimeStamp != null) { - builder.timeField( - LATEST_RECORD_TIME.getPreferredName(), - LATEST_RECORD_TIME.getPreferredName() + "_string", - latestRecordTimeStamp.getTime() - ); - } - if (lastDataTimeStamp != null) { - builder.timeField( - LAST_DATA_TIME.getPreferredName(), - LAST_DATA_TIME.getPreferredName() + "_string", - lastDataTimeStamp.getTime() - ); - } - if (latestEmptyBucketTimeStamp != null) { - builder.timeField( - LATEST_EMPTY_BUCKET_TIME.getPreferredName(), - LATEST_EMPTY_BUCKET_TIME.getPreferredName() + "_string", - latestEmptyBucketTimeStamp.getTime() - ); - } - if (latestSparseBucketTimeStamp != null) { - builder.timeField( - LATEST_SPARSE_BUCKET_TIME.getPreferredName(), - LATEST_SPARSE_BUCKET_TIME.getPreferredName() + "_string", - latestSparseBucketTimeStamp.getTime() - ); - } - builder.field(INPUT_RECORD_COUNT.getPreferredName(), getInputRecordCount()); - if (logTime != null) { - builder.timeField(LOG_TIME.getPreferredName(), LOG_TIME.getPreferredName() + "_string", logTime.toEpochMilli()); - } - - builder.endObject(); - return builder; - } - - /** - * Equality test - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DataCounts that = (DataCounts) other; - - return Objects.equals(this.jobId, that.jobId) - && this.processedRecordCount == that.processedRecordCount - && this.processedFieldCount == that.processedFieldCount - && this.inputBytes == that.inputBytes - && this.inputFieldCount == that.inputFieldCount - && this.invalidDateCount == that.invalidDateCount - && this.missingFieldCount == that.missingFieldCount - && this.outOfOrderTimeStampCount == that.outOfOrderTimeStampCount - && this.emptyBucketCount == that.emptyBucketCount - && this.sparseBucketCount == that.sparseBucketCount - && this.bucketCount == that.bucketCount - && Objects.equals(this.latestRecordTimeStamp, that.latestRecordTimeStamp) - && Objects.equals(this.earliestRecordTimeStamp, that.earliestRecordTimeStamp) - && Objects.equals(this.lastDataTimeStamp, that.lastDataTimeStamp) - && Objects.equals(this.latestEmptyBucketTimeStamp, that.latestEmptyBucketTimeStamp) - && Objects.equals(this.latestSparseBucketTimeStamp, that.latestSparseBucketTimeStamp) - && Objects.equals(this.logTime, that.logTime); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - processedRecordCount, - processedFieldCount, - inputBytes, - inputFieldCount, - invalidDateCount, - missingFieldCount, - outOfOrderTimeStampCount, - lastDataTimeStamp, - emptyBucketCount, - sparseBucketCount, - bucketCount, - latestRecordTimeStamp, - earliestRecordTimeStamp, - latestEmptyBucketTimeStamp, - latestSparseBucketTimeStamp, - logTime - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSizeStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSizeStats.java deleted file mode 100644 index db3a3fa011738..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSizeStats.java +++ /dev/null @@ -1,606 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.process; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.results.Result; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Date; -import java.util.Locale; -import java.util.Objects; - -/** - * Provide access to the C++ model size stats for the Java process. - */ -public class ModelSizeStats implements ToXContentObject { - - /** - * Result type - */ - public static final String RESULT_TYPE_VALUE = "model_size_stats"; - public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE); - - /** - * Field Names - */ - public static final ParseField MODEL_BYTES_FIELD = new ParseField("model_bytes"); - public static final ParseField PEAK_MODEL_BYTES_FIELD = new ParseField("peak_model_bytes"); - public static final ParseField MODEL_BYTES_EXCEEDED_FIELD = new ParseField("model_bytes_exceeded"); - public static final ParseField MODEL_BYTES_MEMORY_LIMIT_FIELD = new ParseField("model_bytes_memory_limit"); - public static final ParseField TOTAL_BY_FIELD_COUNT_FIELD = new ParseField("total_by_field_count"); - public static final ParseField TOTAL_OVER_FIELD_COUNT_FIELD = new ParseField("total_over_field_count"); - public static final ParseField TOTAL_PARTITION_FIELD_COUNT_FIELD = new ParseField("total_partition_field_count"); - public static final ParseField BUCKET_ALLOCATION_FAILURES_COUNT_FIELD = new ParseField("bucket_allocation_failures_count"); - public static final ParseField MEMORY_STATUS_FIELD = new ParseField("memory_status"); - public static final ParseField ASSIGNMENT_MEMORY_BASIS_FIELD = new ParseField("assignment_memory_basis"); - public static final ParseField CATEGORIZED_DOC_COUNT_FIELD = new ParseField("categorized_doc_count"); - public static final ParseField TOTAL_CATEGORY_COUNT_FIELD = new ParseField("total_category_count"); - public static final ParseField FREQUENT_CATEGORY_COUNT_FIELD = new ParseField("frequent_category_count"); - public static final ParseField RARE_CATEGORY_COUNT_FIELD = new ParseField("rare_category_count"); - public static final ParseField DEAD_CATEGORY_COUNT_FIELD = new ParseField("dead_category_count"); - public static final ParseField FAILED_CATEGORY_COUNT_FIELD = new ParseField("failed_category_count"); - public static final ParseField CATEGORIZATION_STATUS_FIELD = new ParseField("categorization_status"); - public static final ParseField LOG_TIME_FIELD = new ParseField("log_time"); - public static final ParseField TIMESTAMP_FIELD = new ParseField("timestamp"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RESULT_TYPE_VALUE, - true, - a -> new Builder((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareLong(Builder::setModelBytes, MODEL_BYTES_FIELD); - PARSER.declareLong(Builder::setPeakModelBytes, PEAK_MODEL_BYTES_FIELD); - PARSER.declareLong(Builder::setModelBytesExceeded, MODEL_BYTES_EXCEEDED_FIELD); - PARSER.declareLong(Builder::setModelBytesMemoryLimit, MODEL_BYTES_MEMORY_LIMIT_FIELD); - PARSER.declareLong(Builder::setBucketAllocationFailuresCount, BUCKET_ALLOCATION_FAILURES_COUNT_FIELD); - PARSER.declareLong(Builder::setTotalByFieldCount, TOTAL_BY_FIELD_COUNT_FIELD); - PARSER.declareLong(Builder::setTotalOverFieldCount, TOTAL_OVER_FIELD_COUNT_FIELD); - PARSER.declareLong(Builder::setTotalPartitionFieldCount, TOTAL_PARTITION_FIELD_COUNT_FIELD); - PARSER.declareField(Builder::setMemoryStatus, p -> MemoryStatus.fromString(p.text()), MEMORY_STATUS_FIELD, ValueType.STRING); - PARSER.declareField( - Builder::setAssignmentMemoryBasis, - p -> AssignmentMemoryBasis.fromString(p.text()), - ASSIGNMENT_MEMORY_BASIS_FIELD, - ValueType.STRING - ); - PARSER.declareLong(Builder::setCategorizedDocCount, CATEGORIZED_DOC_COUNT_FIELD); - PARSER.declareLong(Builder::setTotalCategoryCount, TOTAL_CATEGORY_COUNT_FIELD); - PARSER.declareLong(Builder::setFrequentCategoryCount, FREQUENT_CATEGORY_COUNT_FIELD); - PARSER.declareLong(Builder::setRareCategoryCount, RARE_CATEGORY_COUNT_FIELD); - PARSER.declareLong(Builder::setDeadCategoryCount, DEAD_CATEGORY_COUNT_FIELD); - PARSER.declareLong(Builder::setFailedCategoryCount, FAILED_CATEGORY_COUNT_FIELD); - PARSER.declareField( - Builder::setCategorizationStatus, - p -> CategorizationStatus.fromString(p.text()), - CATEGORIZATION_STATUS_FIELD, - ValueType.STRING - ); - PARSER.declareField( - Builder::setLogTime, - (p) -> TimeUtil.parseTimeField(p, LOG_TIME_FIELD.getPreferredName()), - LOG_TIME_FIELD, - ValueType.VALUE - ); - PARSER.declareField( - Builder::setTimestamp, - (p) -> TimeUtil.parseTimeField(p, TIMESTAMP_FIELD.getPreferredName()), - TIMESTAMP_FIELD, - ValueType.VALUE - ); - } - - /** - * The status of the memory monitored by the ResourceMonitor. OK is default, - * SOFT_LIMIT means that the models have done some aggressive pruning to - * keep the memory below the limit, and HARD_LIMIT means that samples have - * been dropped - */ - public enum MemoryStatus { - OK, - SOFT_LIMIT, - HARD_LIMIT; - - public static MemoryStatus fromString(String statusName) { - return valueOf(statusName.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - /** - * Where will we get the memory requirement from when assigning this job to - * a node? There are three possibilities: - * 1. The job's model_memory_limit - * 2. The current model memory, i.e. what's reported in model_bytes of this object - * 3. The peak model memory, i.e. what's reported in peak_model_bytes of this object - * The field storing this enum can also be null, which means the - * assignment code will decide on the fly - this was the old behaviour prior - * to 7.11. - */ - public enum AssignmentMemoryBasis { - MODEL_MEMORY_LIMIT, - CURRENT_MODEL_BYTES, - PEAK_MODEL_BYTES; - - public static AssignmentMemoryBasis fromString(String statusName) { - return valueOf(statusName.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - /** - * The status of categorization for a job. OK is default, WARN - * means that inappropriate numbers of categories are being found - */ - public enum CategorizationStatus { - OK, - WARN; - - public static CategorizationStatus fromString(String statusName) { - return valueOf(statusName.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - private final String jobId; - private final long modelBytes; - private final Long peakModelBytes; - private final Long modelBytesExceeded; - private final Long modelBytesMemoryLimit; - private final long totalByFieldCount; - private final long totalOverFieldCount; - private final long totalPartitionFieldCount; - private final long bucketAllocationFailuresCount; - private final MemoryStatus memoryStatus; - private final AssignmentMemoryBasis assignmentMemoryBasis; - private final long categorizedDocCount; - private final long totalCategoryCount; - private final long frequentCategoryCount; - private final long rareCategoryCount; - private final long deadCategoryCount; - private final long failedCategoryCount; - private final CategorizationStatus categorizationStatus; - private final Date timestamp; - private final Date logTime; - - private ModelSizeStats( - String jobId, - long modelBytes, - Long peakModelBytes, - Long modelBytesExceeded, - Long modelBytesMemoryLimit, - long totalByFieldCount, - long totalOverFieldCount, - long totalPartitionFieldCount, - long bucketAllocationFailuresCount, - MemoryStatus memoryStatus, - AssignmentMemoryBasis assignmentMemoryBasis, - long categorizedDocCount, - long totalCategoryCount, - long frequentCategoryCount, - long rareCategoryCount, - long deadCategoryCount, - long failedCategoryCount, - CategorizationStatus categorizationStatus, - Date timestamp, - Date logTime - ) { - this.jobId = jobId; - this.modelBytes = modelBytes; - this.peakModelBytes = peakModelBytes; - this.modelBytesExceeded = modelBytesExceeded; - this.modelBytesMemoryLimit = modelBytesMemoryLimit; - this.totalByFieldCount = totalByFieldCount; - this.totalOverFieldCount = totalOverFieldCount; - this.totalPartitionFieldCount = totalPartitionFieldCount; - this.bucketAllocationFailuresCount = bucketAllocationFailuresCount; - this.memoryStatus = memoryStatus; - this.assignmentMemoryBasis = assignmentMemoryBasis; - this.categorizedDocCount = categorizedDocCount; - this.totalCategoryCount = totalCategoryCount; - this.frequentCategoryCount = frequentCategoryCount; - this.rareCategoryCount = rareCategoryCount; - this.deadCategoryCount = deadCategoryCount; - this.failedCategoryCount = failedCategoryCount; - this.categorizationStatus = categorizationStatus; - this.timestamp = timestamp; - this.logTime = logTime; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); - builder.field(MODEL_BYTES_FIELD.getPreferredName(), modelBytes); - if (peakModelBytes != null) { - builder.field(PEAK_MODEL_BYTES_FIELD.getPreferredName(), peakModelBytes); - } - if (modelBytesExceeded != null) { - builder.field(MODEL_BYTES_EXCEEDED_FIELD.getPreferredName(), modelBytesExceeded); - } - if (modelBytesMemoryLimit != null) { - builder.field(MODEL_BYTES_MEMORY_LIMIT_FIELD.getPreferredName(), modelBytesMemoryLimit); - } - builder.field(TOTAL_BY_FIELD_COUNT_FIELD.getPreferredName(), totalByFieldCount); - builder.field(TOTAL_OVER_FIELD_COUNT_FIELD.getPreferredName(), totalOverFieldCount); - builder.field(TOTAL_PARTITION_FIELD_COUNT_FIELD.getPreferredName(), totalPartitionFieldCount); - builder.field(BUCKET_ALLOCATION_FAILURES_COUNT_FIELD.getPreferredName(), bucketAllocationFailuresCount); - builder.field(MEMORY_STATUS_FIELD.getPreferredName(), memoryStatus); - if (assignmentMemoryBasis != null) { - builder.field(ASSIGNMENT_MEMORY_BASIS_FIELD.getPreferredName(), assignmentMemoryBasis); - } - builder.field(CATEGORIZED_DOC_COUNT_FIELD.getPreferredName(), categorizedDocCount); - builder.field(TOTAL_CATEGORY_COUNT_FIELD.getPreferredName(), totalCategoryCount); - builder.field(FREQUENT_CATEGORY_COUNT_FIELD.getPreferredName(), frequentCategoryCount); - builder.field(RARE_CATEGORY_COUNT_FIELD.getPreferredName(), rareCategoryCount); - builder.field(DEAD_CATEGORY_COUNT_FIELD.getPreferredName(), deadCategoryCount); - builder.field(FAILED_CATEGORY_COUNT_FIELD.getPreferredName(), failedCategoryCount); - builder.field(CATEGORIZATION_STATUS_FIELD.getPreferredName(), categorizationStatus); - builder.timeField(LOG_TIME_FIELD.getPreferredName(), LOG_TIME_FIELD.getPreferredName() + "_string", logTime.getTime()); - if (timestamp != null) { - builder.timeField(TIMESTAMP_FIELD.getPreferredName(), TIMESTAMP_FIELD.getPreferredName() + "_string", timestamp.getTime()); - } - - builder.endObject(); - return builder; - } - - public String getJobId() { - return jobId; - } - - public long getModelBytes() { - return modelBytes; - } - - public Long getPeakModelBytes() { - return peakModelBytes; - } - - public Long getModelBytesExceeded() { - return modelBytesExceeded; - } - - public Long getModelBytesMemoryLimit() { - return modelBytesMemoryLimit; - } - - public long getTotalByFieldCount() { - return totalByFieldCount; - } - - public long getTotalPartitionFieldCount() { - return totalPartitionFieldCount; - } - - public long getTotalOverFieldCount() { - return totalOverFieldCount; - } - - public long getBucketAllocationFailuresCount() { - return bucketAllocationFailuresCount; - } - - public MemoryStatus getMemoryStatus() { - return memoryStatus; - } - - @Nullable - public AssignmentMemoryBasis getAssignmentMemoryBasis() { - return assignmentMemoryBasis; - } - - public long getCategorizedDocCount() { - return categorizedDocCount; - } - - public long getTotalCategoryCount() { - return totalCategoryCount; - } - - public long getFrequentCategoryCount() { - return frequentCategoryCount; - } - - public long getRareCategoryCount() { - return rareCategoryCount; - } - - public long getDeadCategoryCount() { - return deadCategoryCount; - } - - public long getFailedCategoryCount() { - return failedCategoryCount; - } - - public CategorizationStatus getCategorizationStatus() { - return categorizationStatus; - } - - /** - * The timestamp of the last processed record when this instance was created. - * - * @return The record time - */ - public Date getTimestamp() { - return timestamp; - } - - /** - * The wall clock time at the point when this instance was created. - * - * @return The wall clock time - */ - public Date getLogTime() { - return logTime; - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - modelBytes, - peakModelBytes, - modelBytesExceeded, - modelBytesMemoryLimit, - totalByFieldCount, - totalOverFieldCount, - totalPartitionFieldCount, - this.bucketAllocationFailuresCount, - memoryStatus, - assignmentMemoryBasis, - categorizedDocCount, - totalCategoryCount, - frequentCategoryCount, - rareCategoryCount, - deadCategoryCount, - failedCategoryCount, - categorizationStatus, - timestamp, - logTime - ); - } - - /** - * Compare all the fields. - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - ModelSizeStats that = (ModelSizeStats) other; - - return this.modelBytes == that.modelBytes - && Objects.equals(this.peakModelBytes, that.peakModelBytes) - && Objects.equals(this.modelBytesExceeded, that.modelBytesExceeded) - && Objects.equals(this.modelBytesMemoryLimit, that.modelBytesMemoryLimit) - && this.totalByFieldCount == that.totalByFieldCount - && this.totalOverFieldCount == that.totalOverFieldCount - && this.totalPartitionFieldCount == that.totalPartitionFieldCount - && this.bucketAllocationFailuresCount == that.bucketAllocationFailuresCount - && Objects.equals(this.memoryStatus, that.memoryStatus) - && Objects.equals(this.assignmentMemoryBasis, that.assignmentMemoryBasis) - && this.categorizedDocCount == that.categorizedDocCount - && this.totalCategoryCount == that.totalCategoryCount - && this.frequentCategoryCount == that.frequentCategoryCount - && this.rareCategoryCount == that.rareCategoryCount - && this.deadCategoryCount == that.deadCategoryCount - && this.failedCategoryCount == that.failedCategoryCount - && Objects.equals(this.categorizationStatus, that.categorizationStatus) - && Objects.equals(this.timestamp, that.timestamp) - && Objects.equals(this.logTime, that.logTime) - && Objects.equals(this.jobId, that.jobId); - } - - public static class Builder { - - private final String jobId; - private long modelBytes; - private Long peakModelBytes; - private Long modelBytesExceeded; - private Long modelBytesMemoryLimit; - private long totalByFieldCount; - private long totalOverFieldCount; - private long totalPartitionFieldCount; - private long bucketAllocationFailuresCount; - private MemoryStatus memoryStatus; - private AssignmentMemoryBasis assignmentMemoryBasis; - private long categorizedDocCount; - private long totalCategoryCount; - private long frequentCategoryCount; - private long rareCategoryCount; - private long deadCategoryCount; - private long failedCategoryCount; - private CategorizationStatus categorizationStatus; - private Date timestamp; - private Date logTime; - - public Builder(String jobId) { - this.jobId = jobId; - memoryStatus = MemoryStatus.OK; - categorizationStatus = CategorizationStatus.OK; - logTime = new Date(); - } - - public Builder(ModelSizeStats modelSizeStats) { - this.jobId = modelSizeStats.jobId; - this.modelBytes = modelSizeStats.modelBytes; - this.peakModelBytes = modelSizeStats.peakModelBytes; - this.modelBytesExceeded = modelSizeStats.modelBytesExceeded; - this.modelBytesMemoryLimit = modelSizeStats.modelBytesMemoryLimit; - this.totalByFieldCount = modelSizeStats.totalByFieldCount; - this.totalOverFieldCount = modelSizeStats.totalOverFieldCount; - this.totalPartitionFieldCount = modelSizeStats.totalPartitionFieldCount; - this.bucketAllocationFailuresCount = modelSizeStats.bucketAllocationFailuresCount; - this.memoryStatus = modelSizeStats.memoryStatus; - this.assignmentMemoryBasis = modelSizeStats.assignmentMemoryBasis; - this.categorizedDocCount = modelSizeStats.categorizedDocCount; - this.totalCategoryCount = modelSizeStats.totalCategoryCount; - this.frequentCategoryCount = modelSizeStats.frequentCategoryCount; - this.rareCategoryCount = modelSizeStats.rareCategoryCount; - this.deadCategoryCount = modelSizeStats.deadCategoryCount; - this.failedCategoryCount = modelSizeStats.failedCategoryCount; - this.categorizationStatus = modelSizeStats.categorizationStatus; - this.timestamp = modelSizeStats.timestamp; - this.logTime = modelSizeStats.logTime; - } - - public Builder setModelBytes(long modelBytes) { - this.modelBytes = modelBytes; - return this; - } - - public Builder setPeakModelBytes(long peakModelBytes) { - this.peakModelBytes = peakModelBytes; - return this; - } - - public Builder setModelBytesExceeded(long modelBytesExceeded) { - this.modelBytesExceeded = modelBytesExceeded; - return this; - } - - public Builder setModelBytesMemoryLimit(long modelBytesMemoryLimit) { - this.modelBytesMemoryLimit = modelBytesMemoryLimit; - return this; - } - - public Builder setTotalByFieldCount(long totalByFieldCount) { - this.totalByFieldCount = totalByFieldCount; - return this; - } - - public Builder setTotalPartitionFieldCount(long totalPartitionFieldCount) { - this.totalPartitionFieldCount = totalPartitionFieldCount; - return this; - } - - public Builder setTotalOverFieldCount(long totalOverFieldCount) { - this.totalOverFieldCount = totalOverFieldCount; - return this; - } - - public Builder setBucketAllocationFailuresCount(long bucketAllocationFailuresCount) { - this.bucketAllocationFailuresCount = bucketAllocationFailuresCount; - return this; - } - - public Builder setMemoryStatus(MemoryStatus memoryStatus) { - Objects.requireNonNull(memoryStatus, "[" + MEMORY_STATUS_FIELD.getPreferredName() + "] must not be null"); - this.memoryStatus = memoryStatus; - return this; - } - - public Builder setAssignmentMemoryBasis(AssignmentMemoryBasis assignmentMemoryBasis) { - this.assignmentMemoryBasis = assignmentMemoryBasis; - return this; - } - - public Builder setCategorizedDocCount(long categorizedDocCount) { - this.categorizedDocCount = categorizedDocCount; - return this; - } - - public Builder setTotalCategoryCount(long totalCategoryCount) { - this.totalCategoryCount = totalCategoryCount; - return this; - } - - public Builder setFrequentCategoryCount(long frequentCategoryCount) { - this.frequentCategoryCount = frequentCategoryCount; - return this; - } - - public Builder setRareCategoryCount(long rareCategoryCount) { - this.rareCategoryCount = rareCategoryCount; - return this; - } - - public Builder setDeadCategoryCount(long deadCategoryCount) { - this.deadCategoryCount = deadCategoryCount; - return this; - } - - public Builder setFailedCategoryCount(long failedCategoryCount) { - this.failedCategoryCount = failedCategoryCount; - return this; - } - - public Builder setCategorizationStatus(CategorizationStatus categorizationStatus) { - Objects.requireNonNull(categorizationStatus, "[" + CATEGORIZATION_STATUS_FIELD.getPreferredName() + "] must not be null"); - this.categorizationStatus = categorizationStatus; - return this; - } - - public Builder setTimestamp(Date timestamp) { - this.timestamp = timestamp; - return this; - } - - public Builder setLogTime(Date logTime) { - this.logTime = logTime; - return this; - } - - public ModelSizeStats build() { - return new ModelSizeStats( - jobId, - modelBytes, - peakModelBytes, - modelBytesExceeded, - modelBytesMemoryLimit, - totalByFieldCount, - totalOverFieldCount, - totalPartitionFieldCount, - bucketAllocationFailuresCount, - memoryStatus, - assignmentMemoryBasis, - categorizedDocCount, - totalCategoryCount, - frequentCategoryCount, - rareCategoryCount, - deadCategoryCount, - failedCategoryCount, - categorizationStatus, - timestamp, - logTime - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSnapshot.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSnapshot.java deleted file mode 100644 index e21b8f512a143..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSnapshot.java +++ /dev/null @@ -1,361 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.process; - -import org.elasticsearch.Version; -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Date; -import java.util.Objects; - -/** - * ModelSnapshot Result POJO - */ -public class ModelSnapshot implements ToXContentObject { - /** - * Field Names - */ - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - public static final ParseField DESCRIPTION = new ParseField("description"); - public static final ParseField SNAPSHOT_DOC_COUNT = new ParseField("snapshot_doc_count"); - public static final ParseField LATEST_RECORD_TIME = new ParseField("latest_record_time_stamp"); - public static final ParseField LATEST_RESULT_TIME = new ParseField("latest_result_time_stamp"); - public static final ParseField QUANTILES = new ParseField("quantiles"); - public static final ParseField RETAIN = new ParseField("retain"); - public static final ParseField MIN_VERSION = new ParseField("min_version"); - public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id"); - - public static final ObjectParser PARSER = new ObjectParser<>("model_snapshot", true, Builder::new); - - static { - PARSER.declareString(Builder::setJobId, Job.ID); - PARSER.declareString(Builder::setMinVersion, MIN_VERSION); - PARSER.declareField( - Builder::setTimestamp, - (p) -> TimeUtil.parseTimeField(p, TIMESTAMP.getPreferredName()), - TIMESTAMP, - ValueType.VALUE - ); - PARSER.declareString(Builder::setDescription, DESCRIPTION); - PARSER.declareString(Builder::setSnapshotId, SNAPSHOT_ID); - PARSER.declareInt(Builder::setSnapshotDocCount, SNAPSHOT_DOC_COUNT); - PARSER.declareObject(Builder::setModelSizeStats, ModelSizeStats.PARSER, ModelSizeStats.RESULT_TYPE_FIELD); - PARSER.declareField( - Builder::setLatestRecordTimeStamp, - (p) -> TimeUtil.parseTimeField(p, LATEST_RECORD_TIME.getPreferredName()), - LATEST_RECORD_TIME, - ValueType.VALUE - ); - PARSER.declareField( - Builder::setLatestResultTimeStamp, - (p) -> TimeUtil.parseTimeField(p, LATEST_RESULT_TIME.getPreferredName()), - LATEST_RESULT_TIME, - ValueType.VALUE - ); - PARSER.declareObject(Builder::setQuantiles, Quantiles.PARSER, QUANTILES); - PARSER.declareBoolean(Builder::setRetain, RETAIN); - } - - private final String jobId; - - /** - * The minimum version a node should have to be able - * to read this model snapshot. - */ - private final Version minVersion; - - private final Date timestamp; - private final String description; - private final String snapshotId; - private final int snapshotDocCount; - private final ModelSizeStats modelSizeStats; - private final Date latestRecordTimeStamp; - private final Date latestResultTimeStamp; - private final Quantiles quantiles; - private final boolean retain; - - private ModelSnapshot( - String jobId, - Version minVersion, - Date timestamp, - String description, - String snapshotId, - int snapshotDocCount, - ModelSizeStats modelSizeStats, - Date latestRecordTimeStamp, - Date latestResultTimeStamp, - Quantiles quantiles, - boolean retain - ) { - this.jobId = jobId; - this.minVersion = minVersion; - this.timestamp = timestamp; - this.description = description; - this.snapshotId = snapshotId; - this.snapshotDocCount = snapshotDocCount; - this.modelSizeStats = modelSizeStats; - this.latestRecordTimeStamp = latestRecordTimeStamp; - this.latestResultTimeStamp = latestResultTimeStamp; - this.quantiles = quantiles; - this.retain = retain; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(MIN_VERSION.getPreferredName(), minVersion); - if (timestamp != null) { - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); - } - if (description != null) { - builder.field(DESCRIPTION.getPreferredName(), description); - } - if (snapshotId != null) { - builder.field(SNAPSHOT_ID.getPreferredName(), snapshotId); - } - builder.field(SNAPSHOT_DOC_COUNT.getPreferredName(), snapshotDocCount); - if (modelSizeStats != null) { - builder.field(ModelSizeStats.RESULT_TYPE_FIELD.getPreferredName(), modelSizeStats); - } - if (latestRecordTimeStamp != null) { - builder.timeField( - LATEST_RECORD_TIME.getPreferredName(), - LATEST_RECORD_TIME.getPreferredName() + "_string", - latestRecordTimeStamp.getTime() - ); - } - if (latestResultTimeStamp != null) { - builder.timeField( - LATEST_RESULT_TIME.getPreferredName(), - LATEST_RESULT_TIME.getPreferredName() + "_string", - latestResultTimeStamp.getTime() - ); - } - if (quantiles != null) { - builder.field(QUANTILES.getPreferredName(), quantiles); - } - builder.field(RETAIN.getPreferredName(), retain); - builder.endObject(); - return builder; - } - - public String getJobId() { - return jobId; - } - - public Version getMinVersion() { - return minVersion; - } - - public Date getTimestamp() { - return timestamp; - } - - public String getDescription() { - return description; - } - - public String getSnapshotId() { - return snapshotId; - } - - public int getSnapshotDocCount() { - return snapshotDocCount; - } - - public ModelSizeStats getModelSizeStats() { - return modelSizeStats; - } - - public Quantiles getQuantiles() { - return quantiles; - } - - public boolean getRetain() { - return retain; - } - - public Date getLatestRecordTimeStamp() { - return latestRecordTimeStamp; - } - - public Date getLatestResultTimeStamp() { - return latestResultTimeStamp; - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - minVersion, - timestamp, - description, - snapshotId, - quantiles, - snapshotDocCount, - modelSizeStats, - latestRecordTimeStamp, - latestResultTimeStamp, - retain - ); - } - - /** - * Compare all the fields. - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - ModelSnapshot that = (ModelSnapshot) other; - - return Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.minVersion, that.minVersion) - && Objects.equals(this.timestamp, that.timestamp) - && Objects.equals(this.description, that.description) - && Objects.equals(this.snapshotId, that.snapshotId) - && this.snapshotDocCount == that.snapshotDocCount - && Objects.equals(this.modelSizeStats, that.modelSizeStats) - && Objects.equals(this.quantiles, that.quantiles) - && Objects.equals(this.latestRecordTimeStamp, that.latestRecordTimeStamp) - && Objects.equals(this.latestResultTimeStamp, that.latestResultTimeStamp) - && this.retain == that.retain; - } - - public static class Builder { - private String jobId; - - // Stored snapshot documents created prior to 6.3.0 will have no value for min_version. - private Version minVersion = Version.fromString("6.3.0"); - - private Date timestamp; - private String description; - private String snapshotId; - private int snapshotDocCount; - private ModelSizeStats modelSizeStats; - private Date latestRecordTimeStamp; - private Date latestResultTimeStamp; - private Quantiles quantiles; - private boolean retain; - - public Builder() {} - - public Builder(String jobId) { - this.jobId = jobId; - } - - public Builder(ModelSnapshot modelSnapshot) { - this.jobId = modelSnapshot.jobId; - this.timestamp = modelSnapshot.timestamp; - this.description = modelSnapshot.description; - this.snapshotId = modelSnapshot.snapshotId; - this.snapshotDocCount = modelSnapshot.snapshotDocCount; - this.modelSizeStats = modelSnapshot.modelSizeStats; - this.latestRecordTimeStamp = modelSnapshot.latestRecordTimeStamp; - this.latestResultTimeStamp = modelSnapshot.latestResultTimeStamp; - this.quantiles = modelSnapshot.quantiles; - this.retain = modelSnapshot.retain; - this.minVersion = modelSnapshot.minVersion; - } - - public Builder setJobId(String jobId) { - this.jobId = jobId; - return this; - } - - Builder setMinVersion(Version minVersion) { - this.minVersion = minVersion; - return this; - } - - Builder setMinVersion(String minVersion) { - this.minVersion = Version.fromString(minVersion); - return this; - } - - public Builder setTimestamp(Date timestamp) { - this.timestamp = timestamp; - return this; - } - - public Builder setDescription(String description) { - this.description = description; - return this; - } - - public Builder setSnapshotId(String snapshotId) { - this.snapshotId = snapshotId; - return this; - } - - public Builder setSnapshotDocCount(int snapshotDocCount) { - this.snapshotDocCount = snapshotDocCount; - return this; - } - - public Builder setModelSizeStats(ModelSizeStats.Builder modelSizeStats) { - this.modelSizeStats = modelSizeStats.build(); - return this; - } - - public Builder setModelSizeStats(ModelSizeStats modelSizeStats) { - this.modelSizeStats = modelSizeStats; - return this; - } - - public Builder setLatestRecordTimeStamp(Date latestRecordTimeStamp) { - this.latestRecordTimeStamp = latestRecordTimeStamp; - return this; - } - - public Builder setLatestResultTimeStamp(Date latestResultTimeStamp) { - this.latestResultTimeStamp = latestResultTimeStamp; - return this; - } - - public Builder setQuantiles(Quantiles quantiles) { - this.quantiles = quantiles; - return this; - } - - public Builder setRetain(boolean value) { - this.retain = value; - return this; - } - - public ModelSnapshot build() { - return new ModelSnapshot( - jobId, - minVersion, - timestamp, - description, - snapshotId, - snapshotDocCount, - modelSizeStats, - latestRecordTimeStamp, - latestResultTimeStamp, - quantiles, - retain - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/Quantiles.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/Quantiles.java deleted file mode 100644 index 968447bcfa4dd..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/Quantiles.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.process; - -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Date; -import java.util.Objects; - -/** - * Quantiles Result POJO - */ -public class Quantiles implements ToXContentObject { - - /** - * Field Names - */ - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - public static final ParseField QUANTILE_STATE = new ParseField("quantile_state"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "quantiles", - true, - a -> new Quantiles((String) a[0], (Date) a[1], (String) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> new Date(p.longValue()), TIMESTAMP, ValueType.LONG); - PARSER.declareString(ConstructingObjectParser.constructorArg(), QUANTILE_STATE); - } - - private final String jobId; - private final Date timestamp; - private final String quantileState; - - public Quantiles(String jobId, Date timestamp, String quantileState) { - this.jobId = jobId; - this.timestamp = Objects.requireNonNull(timestamp); - this.quantileState = Objects.requireNonNull(quantileState); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (timestamp != null) { - builder.field(TIMESTAMP.getPreferredName(), timestamp.getTime()); - } - if (quantileState != null) { - builder.field(QUANTILE_STATE.getPreferredName(), quantileState); - } - builder.endObject(); - return builder; - } - - public String getJobId() { - return jobId; - } - - public Date getTimestamp() { - return timestamp; - } - - public String getQuantileState() { - return quantileState; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, timestamp, quantileState); - } - - /** - * Compare all the fields. - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - Quantiles that = (Quantiles) other; - - return Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.timestamp, that.timestamp) - && Objects.equals(this.quantileState, that.quantileState); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/TimingStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/TimingStats.java deleted file mode 100644 index 60ed9252affde..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/TimingStats.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.process; - -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Stats that give more insight into timing of various operations performed as part of anomaly detection job. - */ -public class TimingStats implements ToXContentObject { - - public static final ParseField BUCKET_COUNT = new ParseField("bucket_count"); - public static final ParseField TOTAL_BUCKET_PROCESSING_TIME_MS = new ParseField("total_bucket_processing_time_ms"); - public static final ParseField MIN_BUCKET_PROCESSING_TIME_MS = new ParseField("minimum_bucket_processing_time_ms"); - public static final ParseField MAX_BUCKET_PROCESSING_TIME_MS = new ParseField("maximum_bucket_processing_time_ms"); - public static final ParseField AVG_BUCKET_PROCESSING_TIME_MS = new ParseField("average_bucket_processing_time_ms"); - public static final ParseField EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_MS = new ParseField( - "exponential_average_bucket_processing_time_ms" - ); - public static final ParseField EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_PER_HOUR_MS = new ParseField( - "exponential_average_bucket_processing_time_per_hour_ms" - ); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("timing_stats", true, args -> { - String jobId = (String) args[0]; - Long bucketCount = (Long) args[1]; - Double totalBucketProcessingTimeMs = (Double) args[2]; - Double minBucketProcessingTimeMs = (Double) args[3]; - Double maxBucketProcessingTimeMs = (Double) args[4]; - Double avgBucketProcessingTimeMs = (Double) args[5]; - Double exponentialAvgBucketProcessingTimeMs = (Double) args[6]; - Double exponentialAvgBucketProcessingTimePerHourMs = (Double) args[7]; - return new TimingStats( - jobId, - getOrDefault(bucketCount, 0L), - getOrDefault(totalBucketProcessingTimeMs, 0.0), - minBucketProcessingTimeMs, - maxBucketProcessingTimeMs, - avgBucketProcessingTimeMs, - exponentialAvgBucketProcessingTimeMs, - exponentialAvgBucketProcessingTimePerHourMs - ); - }); - - static { - PARSER.declareString(constructorArg(), Job.ID); - PARSER.declareLong(optionalConstructorArg(), BUCKET_COUNT); - PARSER.declareDouble(optionalConstructorArg(), TOTAL_BUCKET_PROCESSING_TIME_MS); - PARSER.declareDouble(optionalConstructorArg(), MIN_BUCKET_PROCESSING_TIME_MS); - PARSER.declareDouble(optionalConstructorArg(), MAX_BUCKET_PROCESSING_TIME_MS); - PARSER.declareDouble(optionalConstructorArg(), AVG_BUCKET_PROCESSING_TIME_MS); - PARSER.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_MS); - PARSER.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_PER_HOUR_MS); - } - - private final String jobId; - private long bucketCount; - private double totalBucketProcessingTimeMs; - private Double minBucketProcessingTimeMs; - private Double maxBucketProcessingTimeMs; - private Double avgBucketProcessingTimeMs; - private Double exponentialAvgBucketProcessingTimeMs; - private Double exponentialAvgBucketProcessingTimePerHourMs; - - public TimingStats( - String jobId, - long bucketCount, - double totalBucketProcessingTimeMs, - @Nullable Double minBucketProcessingTimeMs, - @Nullable Double maxBucketProcessingTimeMs, - @Nullable Double avgBucketProcessingTimeMs, - @Nullable Double exponentialAvgBucketProcessingTimeMs, - @Nullable Double exponentialAvgBucketProcessingTimePerHourMs - ) { - this.jobId = jobId; - this.bucketCount = bucketCount; - this.totalBucketProcessingTimeMs = totalBucketProcessingTimeMs; - this.minBucketProcessingTimeMs = minBucketProcessingTimeMs; - this.maxBucketProcessingTimeMs = maxBucketProcessingTimeMs; - this.avgBucketProcessingTimeMs = avgBucketProcessingTimeMs; - this.exponentialAvgBucketProcessingTimeMs = exponentialAvgBucketProcessingTimeMs; - this.exponentialAvgBucketProcessingTimePerHourMs = exponentialAvgBucketProcessingTimePerHourMs; - } - - public String getJobId() { - return jobId; - } - - public long getBucketCount() { - return bucketCount; - } - - public double getTotalBucketProcessingTimeMs() { - return totalBucketProcessingTimeMs; - } - - public Double getMinBucketProcessingTimeMs() { - return minBucketProcessingTimeMs; - } - - public Double getMaxBucketProcessingTimeMs() { - return maxBucketProcessingTimeMs; - } - - public Double getAvgBucketProcessingTimeMs() { - return avgBucketProcessingTimeMs; - } - - public Double getExponentialAvgBucketProcessingTimeMs() { - return exponentialAvgBucketProcessingTimeMs; - } - - public Double getExponentialAvgBucketProcessingTimePerHourMs() { - return exponentialAvgBucketProcessingTimePerHourMs; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(BUCKET_COUNT.getPreferredName(), bucketCount); - builder.field(TOTAL_BUCKET_PROCESSING_TIME_MS.getPreferredName(), totalBucketProcessingTimeMs); - if (minBucketProcessingTimeMs != null) { - builder.field(MIN_BUCKET_PROCESSING_TIME_MS.getPreferredName(), minBucketProcessingTimeMs); - } - if (maxBucketProcessingTimeMs != null) { - builder.field(MAX_BUCKET_PROCESSING_TIME_MS.getPreferredName(), maxBucketProcessingTimeMs); - } - if (avgBucketProcessingTimeMs != null) { - builder.field(AVG_BUCKET_PROCESSING_TIME_MS.getPreferredName(), avgBucketProcessingTimeMs); - } - if (exponentialAvgBucketProcessingTimeMs != null) { - builder.field(EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_MS.getPreferredName(), exponentialAvgBucketProcessingTimeMs); - } - if (exponentialAvgBucketProcessingTimePerHourMs != null) { - builder.field( - EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_PER_HOUR_MS.getPreferredName(), - exponentialAvgBucketProcessingTimePerHourMs - ); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - TimingStats that = (TimingStats) o; - return Objects.equals(this.jobId, that.jobId) - && this.bucketCount == that.bucketCount - && this.totalBucketProcessingTimeMs == that.totalBucketProcessingTimeMs - && Objects.equals(this.minBucketProcessingTimeMs, that.minBucketProcessingTimeMs) - && Objects.equals(this.maxBucketProcessingTimeMs, that.maxBucketProcessingTimeMs) - && Objects.equals(this.avgBucketProcessingTimeMs, that.avgBucketProcessingTimeMs) - && Objects.equals(this.exponentialAvgBucketProcessingTimeMs, that.exponentialAvgBucketProcessingTimeMs) - && Objects.equals(this.exponentialAvgBucketProcessingTimePerHourMs, that.exponentialAvgBucketProcessingTimePerHourMs); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - bucketCount, - totalBucketProcessingTimeMs, - minBucketProcessingTimeMs, - maxBucketProcessingTimeMs, - avgBucketProcessingTimeMs, - exponentialAvgBucketProcessingTimeMs, - exponentialAvgBucketProcessingTimePerHourMs - ); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - private static T getOrDefault(@Nullable T value, T defaultValue) { - return value != null ? value : defaultValue; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyCause.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyCause.java deleted file mode 100644 index 5be75c52b19a6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyCause.java +++ /dev/null @@ -1,322 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.ml.job.config.DetectorFunction; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -/** - * Anomaly Cause POJO. - * Used as a nested level inside population anomaly records. - */ -public class AnomalyCause implements ToXContentObject { - - public static final ParseField ANOMALY_CAUSE = new ParseField("anomaly_cause"); - - /** - * Result fields - */ - public static final ParseField PROBABILITY = new ParseField("probability"); - public static final ParseField OVER_FIELD_NAME = new ParseField("over_field_name"); - public static final ParseField OVER_FIELD_VALUE = new ParseField("over_field_value"); - public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name"); - public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value"); - public static final ParseField CORRELATED_BY_FIELD_VALUE = new ParseField("correlated_by_field_value"); - public static final ParseField PARTITION_FIELD_NAME = new ParseField("partition_field_name"); - public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partition_field_value"); - public static final ParseField FUNCTION = new ParseField("function"); - public static final ParseField FUNCTION_DESCRIPTION = new ParseField("function_description"); - public static final ParseField TYPICAL = new ParseField("typical"); - public static final ParseField ACTUAL = new ParseField("actual"); - public static final ParseField INFLUENCERS = new ParseField("influencers"); - - /** - * Metric Results - */ - public static final ParseField FIELD_NAME = new ParseField("field_name"); - - public static final ObjectParser PARSER = new ObjectParser<>( - ANOMALY_CAUSE.getPreferredName(), - true, - AnomalyCause::new - ); - - static { - PARSER.declareDouble(AnomalyCause::setProbability, PROBABILITY); - PARSER.declareString(AnomalyCause::setByFieldName, BY_FIELD_NAME); - PARSER.declareString(AnomalyCause::setByFieldValue, BY_FIELD_VALUE); - PARSER.declareString(AnomalyCause::setCorrelatedByFieldValue, CORRELATED_BY_FIELD_VALUE); - PARSER.declareString(AnomalyCause::setPartitionFieldName, PARTITION_FIELD_NAME); - PARSER.declareString(AnomalyCause::setPartitionFieldValue, PARTITION_FIELD_VALUE); - PARSER.declareString(AnomalyCause::setFunction, FUNCTION); - PARSER.declareString(AnomalyCause::setFunctionDescription, FUNCTION_DESCRIPTION); - PARSER.declareDoubleArray(AnomalyCause::setTypical, TYPICAL); - PARSER.declareDoubleArray(AnomalyCause::setActual, ACTUAL); - PARSER.declareString(AnomalyCause::setFieldName, FIELD_NAME); - PARSER.declareString(AnomalyCause::setOverFieldName, OVER_FIELD_NAME); - PARSER.declareString(AnomalyCause::setOverFieldValue, OVER_FIELD_VALUE); - PARSER.declareObjectArray(AnomalyCause::setInfluencers, Influence.PARSER, INFLUENCERS); - } - - private double probability; - private String byFieldName; - private String byFieldValue; - private String correlatedByFieldValue; - private String partitionFieldName; - private String partitionFieldValue; - private String function; - private String functionDescription; - private List typical; - private List actual; - private String fieldName; - private String overFieldName; - private String overFieldValue; - - private List influencers; - - AnomalyCause() {} - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(PROBABILITY.getPreferredName(), probability); - if (byFieldName != null) { - builder.field(BY_FIELD_NAME.getPreferredName(), byFieldName); - } - if (byFieldValue != null) { - builder.field(BY_FIELD_VALUE.getPreferredName(), byFieldValue); - } - if (correlatedByFieldValue != null) { - builder.field(CORRELATED_BY_FIELD_VALUE.getPreferredName(), correlatedByFieldValue); - } - if (partitionFieldName != null) { - builder.field(PARTITION_FIELD_NAME.getPreferredName(), partitionFieldName); - } - if (partitionFieldValue != null) { - builder.field(PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue); - } - if (function != null) { - builder.field(FUNCTION.getPreferredName(), function); - } - if (functionDescription != null) { - builder.field(FUNCTION_DESCRIPTION.getPreferredName(), functionDescription); - } - if (typical != null) { - builder.field(TYPICAL.getPreferredName(), typical); - } - if (actual != null) { - builder.field(ACTUAL.getPreferredName(), actual); - } - if (fieldName != null) { - builder.field(FIELD_NAME.getPreferredName(), fieldName); - } - if (overFieldName != null) { - builder.field(OVER_FIELD_NAME.getPreferredName(), overFieldName); - } - if (overFieldValue != null) { - builder.field(OVER_FIELD_VALUE.getPreferredName(), overFieldValue); - } - if (influencers != null) { - builder.field(INFLUENCERS.getPreferredName(), influencers); - } - builder.endObject(); - return builder; - } - - public double getProbability() { - return probability; - } - - void setProbability(double value) { - probability = value; - } - - public String getByFieldName() { - return byFieldName; - } - - void setByFieldName(String value) { - byFieldName = value; - } - - public String getByFieldValue() { - return byFieldValue; - } - - void setByFieldValue(String value) { - byFieldValue = value; - } - - public String getCorrelatedByFieldValue() { - return correlatedByFieldValue; - } - - void setCorrelatedByFieldValue(String value) { - correlatedByFieldValue = value; - } - - public String getPartitionFieldName() { - return partitionFieldName; - } - - void setPartitionFieldName(String field) { - partitionFieldName = field; - } - - public String getPartitionFieldValue() { - return partitionFieldValue; - } - - void setPartitionFieldValue(String value) { - partitionFieldValue = value; - } - - public String getFunction() { - return function; - } - - void setFunction(String name) { - function = name; - } - - public String getFunctionDescription() { - return functionDescription; - } - - void setFunctionDescription(String functionDescription) { - this.functionDescription = functionDescription; - } - - public List getTypical() { - return typical; - } - - void setTypical(List typical) { - this.typical = Collections.unmodifiableList(typical); - } - - public List getActual() { - return actual; - } - - void setActual(List actual) { - this.actual = Collections.unmodifiableList(actual); - } - - public String getFieldName() { - return fieldName; - } - - void setFieldName(String field) { - fieldName = field; - } - - public String getOverFieldName() { - return overFieldName; - } - - void setOverFieldName(String name) { - overFieldName = name; - } - - public String getOverFieldValue() { - return overFieldValue; - } - - void setOverFieldValue(String value) { - overFieldValue = value; - } - - public List getInfluencers() { - return influencers; - } - - void setInfluencers(List influencers) { - this.influencers = Collections.unmodifiableList(influencers); - } - - @Nullable - public GeoPoint getTypicalGeoPoint() { - if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || typical == null) { - return null; - } - if (typical.size() == 2) { - return new GeoPoint(typical.get(0), typical.get(1)); - } - return null; - } - - @Nullable - public GeoPoint getActualGeoPoint() { - if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || actual == null) { - return null; - } - if (actual.size() == 2) { - return new GeoPoint(actual.get(0), actual.get(1)); - } - return null; - } - - @Override - public int hashCode() { - return Objects.hash( - probability, - actual, - typical, - byFieldName, - byFieldValue, - correlatedByFieldValue, - fieldName, - function, - functionDescription, - overFieldName, - overFieldValue, - partitionFieldName, - partitionFieldValue, - influencers - ); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - AnomalyCause that = (AnomalyCause) other; - - return this.probability == that.probability - && Objects.equals(this.typical, that.typical) - && Objects.equals(this.actual, that.actual) - && Objects.equals(this.function, that.function) - && Objects.equals(this.functionDescription, that.functionDescription) - && Objects.equals(this.fieldName, that.fieldName) - && Objects.equals(this.byFieldName, that.byFieldName) - && Objects.equals(this.byFieldValue, that.byFieldValue) - && Objects.equals(this.correlatedByFieldValue, that.correlatedByFieldValue) - && Objects.equals(this.partitionFieldName, that.partitionFieldName) - && Objects.equals(this.partitionFieldValue, that.partitionFieldValue) - && Objects.equals(this.overFieldName, that.overFieldName) - && Objects.equals(this.overFieldValue, that.overFieldValue) - && Objects.equals(this.influencers, that.influencers); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java deleted file mode 100644 index f3a93703a0275..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java +++ /dev/null @@ -1,476 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.DetectorFunction; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.Objects; - -/** - * Anomaly Record POJO. - * Uses the object wrappers Boolean and Double so null values - * can be returned if the members have not been set. - */ -public class AnomalyRecord implements ToXContentObject { - - /** - * Result type - */ - public static final String RESULT_TYPE_VALUE = "record"; - - /** - * Result fields (all detector types) - */ - public static final ParseField PROBABILITY = new ParseField("probability"); - public static final ParseField MULTI_BUCKET_IMPACT = new ParseField("multi_bucket_impact"); - public static final ParseField DETECTOR_INDEX = new ParseField("detector_index"); - public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name"); - public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value"); - public static final ParseField CORRELATED_BY_FIELD_VALUE = new ParseField("correlated_by_field_value"); - public static final ParseField PARTITION_FIELD_NAME = new ParseField("partition_field_name"); - public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partition_field_value"); - public static final ParseField FUNCTION = new ParseField("function"); - public static final ParseField FUNCTION_DESCRIPTION = new ParseField("function_description"); - public static final ParseField TYPICAL = new ParseField("typical"); - public static final ParseField ACTUAL = new ParseField("actual"); - public static final ParseField INFLUENCERS = new ParseField("influencers"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - - // Used for QueryPage - public static final ParseField RESULTS_FIELD = new ParseField("records"); - - /** - * Metric Results (including population metrics) - */ - public static final ParseField FIELD_NAME = new ParseField("field_name"); - - /** - * Population results - */ - public static final ParseField OVER_FIELD_NAME = new ParseField("over_field_name"); - public static final ParseField OVER_FIELD_VALUE = new ParseField("over_field_value"); - public static final ParseField CAUSES = new ParseField("causes"); - - /** - * Normalization - */ - public static final ParseField RECORD_SCORE = new ParseField("record_score"); - public static final ParseField INITIAL_RECORD_SCORE = new ParseField("initial_record_score"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RESULT_TYPE_VALUE, - true, - a -> new AnomalyRecord((String) a[0], (Date) a[1], (long) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), - Result.TIMESTAMP, - ValueType.VALUE - ); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); - PARSER.declareString((anomalyRecord, s) -> {}, Result.RESULT_TYPE); - PARSER.declareDouble(AnomalyRecord::setProbability, PROBABILITY); - PARSER.declareDouble(AnomalyRecord::setMultiBucketImpact, MULTI_BUCKET_IMPACT); - PARSER.declareDouble(AnomalyRecord::setRecordScore, RECORD_SCORE); - PARSER.declareDouble(AnomalyRecord::setInitialRecordScore, INITIAL_RECORD_SCORE); - PARSER.declareInt(AnomalyRecord::setDetectorIndex, DETECTOR_INDEX); - PARSER.declareBoolean(AnomalyRecord::setInterim, Result.IS_INTERIM); - PARSER.declareString(AnomalyRecord::setByFieldName, BY_FIELD_NAME); - PARSER.declareString(AnomalyRecord::setByFieldValue, BY_FIELD_VALUE); - PARSER.declareString(AnomalyRecord::setCorrelatedByFieldValue, CORRELATED_BY_FIELD_VALUE); - PARSER.declareString(AnomalyRecord::setPartitionFieldName, PARTITION_FIELD_NAME); - PARSER.declareString(AnomalyRecord::setPartitionFieldValue, PARTITION_FIELD_VALUE); - PARSER.declareString(AnomalyRecord::setFunction, FUNCTION); - PARSER.declareString(AnomalyRecord::setFunctionDescription, FUNCTION_DESCRIPTION); - PARSER.declareDoubleArray(AnomalyRecord::setTypical, TYPICAL); - PARSER.declareDoubleArray(AnomalyRecord::setActual, ACTUAL); - PARSER.declareString(AnomalyRecord::setFieldName, FIELD_NAME); - PARSER.declareString(AnomalyRecord::setOverFieldName, OVER_FIELD_NAME); - PARSER.declareString(AnomalyRecord::setOverFieldValue, OVER_FIELD_VALUE); - PARSER.declareObjectArray(AnomalyRecord::setCauses, AnomalyCause.PARSER, CAUSES); - PARSER.declareObjectArray(AnomalyRecord::setInfluencers, Influence.PARSER, INFLUENCERS); - } - - private final String jobId; - private int detectorIndex; - private double probability; - private Double multiBucketImpact; - private String byFieldName; - private String byFieldValue; - private String correlatedByFieldValue; - private String partitionFieldName; - private String partitionFieldValue; - private String function; - private String functionDescription; - private List typical; - private List actual; - private boolean isInterim; - - private String fieldName; - - private String overFieldName; - private String overFieldValue; - private List causes; - - private double recordScore; - - private double initialRecordScore; - - private final Date timestamp; - private final long bucketSpan; - - private List influences; - - AnomalyRecord(String jobId, Date timestamp, long bucketSpan) { - this.jobId = jobId; - this.timestamp = Objects.requireNonNull(timestamp); - this.bucketSpan = bucketSpan; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); - builder.field(PROBABILITY.getPreferredName(), probability); - if (multiBucketImpact != null) { - builder.field(MULTI_BUCKET_IMPACT.getPreferredName(), multiBucketImpact); - } - builder.field(RECORD_SCORE.getPreferredName(), recordScore); - builder.field(INITIAL_RECORD_SCORE.getPreferredName(), initialRecordScore); - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); - builder.field(DETECTOR_INDEX.getPreferredName(), detectorIndex); - builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); - if (byFieldName != null) { - builder.field(BY_FIELD_NAME.getPreferredName(), byFieldName); - } - if (byFieldValue != null) { - builder.field(BY_FIELD_VALUE.getPreferredName(), byFieldValue); - } - if (correlatedByFieldValue != null) { - builder.field(CORRELATED_BY_FIELD_VALUE.getPreferredName(), correlatedByFieldValue); - } - if (partitionFieldName != null) { - builder.field(PARTITION_FIELD_NAME.getPreferredName(), partitionFieldName); - } - if (partitionFieldValue != null) { - builder.field(PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue); - } - if (function != null) { - builder.field(FUNCTION.getPreferredName(), function); - } - if (functionDescription != null) { - builder.field(FUNCTION_DESCRIPTION.getPreferredName(), functionDescription); - } - if (typical != null) { - builder.field(TYPICAL.getPreferredName(), typical); - } - if (actual != null) { - builder.field(ACTUAL.getPreferredName(), actual); - } - if (fieldName != null) { - builder.field(FIELD_NAME.getPreferredName(), fieldName); - } - if (overFieldName != null) { - builder.field(OVER_FIELD_NAME.getPreferredName(), overFieldName); - } - if (overFieldValue != null) { - builder.field(OVER_FIELD_VALUE.getPreferredName(), overFieldValue); - } - if (causes != null) { - builder.field(CAUSES.getPreferredName(), causes); - } - if (influences != null) { - builder.field(INFLUENCERS.getPreferredName(), influences); - } - builder.endObject(); - return builder; - } - - public String getJobId() { - return this.jobId; - } - - public int getDetectorIndex() { - return detectorIndex; - } - - void setDetectorIndex(int detectorIndex) { - this.detectorIndex = detectorIndex; - } - - public double getRecordScore() { - return recordScore; - } - - void setRecordScore(double recordScore) { - this.recordScore = recordScore; - } - - public double getInitialRecordScore() { - return initialRecordScore; - } - - void setInitialRecordScore(double initialRecordScore) { - this.initialRecordScore = initialRecordScore; - } - - public Date getTimestamp() { - return timestamp; - } - - /** - * Bucketspan expressed in seconds - */ - public long getBucketSpan() { - return bucketSpan; - } - - public double getProbability() { - return probability; - } - - void setProbability(double value) { - probability = value; - } - - public double getMultiBucketImpact() { - return multiBucketImpact; - } - - void setMultiBucketImpact(double value) { - multiBucketImpact = value; - } - - public String getByFieldName() { - return byFieldName; - } - - void setByFieldName(String value) { - byFieldName = value; - } - - public String getByFieldValue() { - return byFieldValue; - } - - void setByFieldValue(String value) { - byFieldValue = value; - } - - public String getCorrelatedByFieldValue() { - return correlatedByFieldValue; - } - - void setCorrelatedByFieldValue(String value) { - correlatedByFieldValue = value; - } - - public String getPartitionFieldName() { - return partitionFieldName; - } - - void setPartitionFieldName(String field) { - partitionFieldName = field; - } - - public String getPartitionFieldValue() { - return partitionFieldValue; - } - - void setPartitionFieldValue(String value) { - partitionFieldValue = value; - } - - public String getFunction() { - return function; - } - - void setFunction(String name) { - function = name; - } - - public String getFunctionDescription() { - return functionDescription; - } - - void setFunctionDescription(String functionDescription) { - this.functionDescription = functionDescription; - } - - public List getTypical() { - return typical; - } - - void setTypical(List typical) { - this.typical = Collections.unmodifiableList(typical); - } - - public List getActual() { - return actual; - } - - void setActual(List actual) { - this.actual = Collections.unmodifiableList(actual); - } - - public boolean isInterim() { - return isInterim; - } - - void setInterim(boolean interim) { - this.isInterim = interim; - } - - public String getFieldName() { - return fieldName; - } - - void setFieldName(String field) { - fieldName = field; - } - - public String getOverFieldName() { - return overFieldName; - } - - void setOverFieldName(String name) { - overFieldName = name; - } - - public String getOverFieldValue() { - return overFieldValue; - } - - void setOverFieldValue(String value) { - overFieldValue = value; - } - - public List getCauses() { - return causes; - } - - void setCauses(List causes) { - this.causes = Collections.unmodifiableList(causes); - } - - public List getInfluencers() { - return influences; - } - - void setInfluencers(List influencers) { - this.influences = Collections.unmodifiableList(influencers); - } - - @Nullable - public GeoPoint getTypicalGeoPoint() { - if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || typical == null) { - return null; - } - if (typical.size() == 2) { - return new GeoPoint(typical.get(0), typical.get(1)); - } - return null; - } - - @Nullable - public GeoPoint getActualGeoPoint() { - if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || actual == null) { - return null; - } - if (actual.size() == 2) { - return new GeoPoint(actual.get(0), actual.get(1)); - } - return null; - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - detectorIndex, - bucketSpan, - probability, - multiBucketImpact, - recordScore, - initialRecordScore, - typical, - actual, - function, - functionDescription, - fieldName, - byFieldName, - byFieldValue, - correlatedByFieldValue, - partitionFieldName, - partitionFieldValue, - overFieldName, - overFieldValue, - timestamp, - isInterim, - causes, - influences, - jobId - ); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - AnomalyRecord that = (AnomalyRecord) other; - - return Objects.equals(this.jobId, that.jobId) - && this.detectorIndex == that.detectorIndex - && this.bucketSpan == that.bucketSpan - && this.probability == that.probability - && Objects.equals(this.multiBucketImpact, that.multiBucketImpact) - && this.recordScore == that.recordScore - && this.initialRecordScore == that.initialRecordScore - && Objects.deepEquals(this.typical, that.typical) - && Objects.deepEquals(this.actual, that.actual) - && Objects.equals(this.function, that.function) - && Objects.equals(this.functionDescription, that.functionDescription) - && Objects.equals(this.fieldName, that.fieldName) - && Objects.equals(this.byFieldName, that.byFieldName) - && Objects.equals(this.byFieldValue, that.byFieldValue) - && Objects.equals(this.correlatedByFieldValue, that.correlatedByFieldValue) - && Objects.equals(this.partitionFieldName, that.partitionFieldName) - && Objects.equals(this.partitionFieldValue, that.partitionFieldValue) - && Objects.equals(this.overFieldName, that.overFieldName) - && Objects.equals(this.overFieldValue, that.overFieldValue) - && Objects.equals(this.timestamp, that.timestamp) - && Objects.equals(this.isInterim, that.isInterim) - && Objects.equals(this.causes, that.causes) - && Objects.equals(this.influences, that.influences); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Bucket.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Bucket.java deleted file mode 100644 index 8d74effaac390..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Bucket.java +++ /dev/null @@ -1,249 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.Objects; - -/** - * Bucket Result POJO - */ -public class Bucket implements ToXContentObject { - - public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score"); - public static final ParseField INITIAL_ANOMALY_SCORE = new ParseField("initial_anomaly_score"); - public static final ParseField EVENT_COUNT = new ParseField("event_count"); - public static final ParseField RECORDS = new ParseField("records"); - public static final ParseField BUCKET_INFLUENCERS = new ParseField("bucket_influencers"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - public static final ParseField PROCESSING_TIME_MS = new ParseField("processing_time_ms"); - public static final ParseField SCHEDULED_EVENTS = new ParseField("scheduled_events"); - - // Used for QueryPage - public static final ParseField RESULTS_FIELD = new ParseField("buckets"); - - /** - * Result type - */ - public static final String RESULT_TYPE_VALUE = "bucket"; - public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RESULT_TYPE_VALUE, - true, - a -> new Bucket((String) a[0], (Date) a[1], (long) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), - Result.TIMESTAMP, - ValueType.VALUE - ); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); - PARSER.declareDouble(Bucket::setAnomalyScore, ANOMALY_SCORE); - PARSER.declareDouble(Bucket::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE); - PARSER.declareBoolean(Bucket::setInterim, Result.IS_INTERIM); - PARSER.declareLong(Bucket::setEventCount, EVENT_COUNT); - PARSER.declareObjectArray(Bucket::setRecords, AnomalyRecord.PARSER, RECORDS); - PARSER.declareObjectArray(Bucket::setBucketInfluencers, BucketInfluencer.PARSER, BUCKET_INFLUENCERS); - PARSER.declareLong(Bucket::setProcessingTimeMs, PROCESSING_TIME_MS); - PARSER.declareString((bucket, s) -> {}, Result.RESULT_TYPE); - PARSER.declareStringArray(Bucket::setScheduledEvents, SCHEDULED_EVENTS); - } - - private final String jobId; - private final Date timestamp; - private final long bucketSpan; - private double anomalyScore; - private double initialAnomalyScore; - private List records = new ArrayList<>(); - private long eventCount; - private boolean isInterim; - private List bucketInfluencers = new ArrayList<>(); // Can't use emptyList as might be appended to - private long processingTimeMs; - private List scheduledEvents = Collections.emptyList(); - - Bucket(String jobId, Date timestamp, long bucketSpan) { - this.jobId = jobId; - this.timestamp = Objects.requireNonNull(timestamp); - this.bucketSpan = bucketSpan; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); - builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore); - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); - builder.field(INITIAL_ANOMALY_SCORE.getPreferredName(), initialAnomalyScore); - if (records.isEmpty() == false) { - builder.field(RECORDS.getPreferredName(), records); - } - builder.field(EVENT_COUNT.getPreferredName(), eventCount); - builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); - builder.field(BUCKET_INFLUENCERS.getPreferredName(), bucketInfluencers); - builder.field(PROCESSING_TIME_MS.getPreferredName(), processingTimeMs); - if (scheduledEvents.isEmpty() == false) { - builder.field(SCHEDULED_EVENTS.getPreferredName(), scheduledEvents); - } - builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); - builder.endObject(); - return builder; - } - - public String getJobId() { - return jobId; - } - - public Date getTimestamp() { - return timestamp; - } - - /** - * Bucketspan expressed in seconds - */ - public long getBucketSpan() { - return bucketSpan; - } - - public double getAnomalyScore() { - return anomalyScore; - } - - void setAnomalyScore(double anomalyScore) { - this.anomalyScore = anomalyScore; - } - - public double getInitialAnomalyScore() { - return initialAnomalyScore; - } - - void setInitialAnomalyScore(double initialAnomalyScore) { - this.initialAnomalyScore = initialAnomalyScore; - } - - /** - * Get all the anomaly records associated with this bucket. - * The records are not part of the bucket document. They will - * only be present when the bucket was retrieved and expanded - * to contain the associated records. - * - * @return the anomaly records for the bucket IF the bucket was expanded. - */ - public List getRecords() { - return records; - } - - void setRecords(List records) { - this.records = Collections.unmodifiableList(records); - } - - /** - * The number of records (events) actually processed in this bucket. - */ - public long getEventCount() { - return eventCount; - } - - void setEventCount(long value) { - eventCount = value; - } - - public boolean isInterim() { - return isInterim; - } - - void setInterim(boolean interim) { - this.isInterim = interim; - } - - public long getProcessingTimeMs() { - return processingTimeMs; - } - - void setProcessingTimeMs(long timeMs) { - processingTimeMs = timeMs; - } - - public List getBucketInfluencers() { - return bucketInfluencers; - } - - void setBucketInfluencers(List bucketInfluencers) { - this.bucketInfluencers = Collections.unmodifiableList(bucketInfluencers); - } - - public List getScheduledEvents() { - return scheduledEvents; - } - - void setScheduledEvents(List scheduledEvents) { - this.scheduledEvents = Collections.unmodifiableList(scheduledEvents); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - timestamp, - eventCount, - initialAnomalyScore, - anomalyScore, - records, - isInterim, - bucketSpan, - bucketInfluencers, - processingTimeMs, - scheduledEvents - ); - } - - /** - * Compare all the fields and embedded anomaly records (if any) - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - Bucket that = (Bucket) other; - - return Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.timestamp, that.timestamp) - && (this.eventCount == that.eventCount) - && (this.bucketSpan == that.bucketSpan) - && (this.anomalyScore == that.anomalyScore) - && (this.initialAnomalyScore == that.initialAnomalyScore) - && Objects.equals(this.records, that.records) - && Objects.equals(this.isInterim, that.isInterim) - && Objects.equals(this.bucketInfluencers, that.bucketInfluencers) - && (this.processingTimeMs == that.processingTimeMs) - && Objects.equals(this.scheduledEvents, that.scheduledEvents); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/BucketInfluencer.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/BucketInfluencer.java deleted file mode 100644 index 62df14ce4e817..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/BucketInfluencer.java +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Date; -import java.util.Objects; - -public class BucketInfluencer implements ToXContentObject { - - /** - * Result type - */ - public static final String RESULT_TYPE_VALUE = "bucket_influencer"; - public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE); - - /** - * Field names - */ - public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name"); - public static final ParseField INITIAL_ANOMALY_SCORE = new ParseField("initial_anomaly_score"); - public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score"); - public static final ParseField RAW_ANOMALY_SCORE = new ParseField("raw_anomaly_score"); - public static final ParseField PROBABILITY = new ParseField("probability"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RESULT_TYPE_FIELD.getPreferredName(), - true, - a -> new BucketInfluencer((String) a[0], (Date) a[1], (long) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), - Result.TIMESTAMP, - ValueType.VALUE - ); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); - PARSER.declareString((bucketInfluencer, s) -> {}, Result.RESULT_TYPE); - PARSER.declareString(BucketInfluencer::setInfluencerFieldName, INFLUENCER_FIELD_NAME); - PARSER.declareDouble(BucketInfluencer::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE); - PARSER.declareDouble(BucketInfluencer::setAnomalyScore, ANOMALY_SCORE); - PARSER.declareDouble(BucketInfluencer::setRawAnomalyScore, RAW_ANOMALY_SCORE); - PARSER.declareDouble(BucketInfluencer::setProbability, PROBABILITY); - PARSER.declareBoolean(BucketInfluencer::setIsInterim, Result.IS_INTERIM); - } - - private final String jobId; - private String influenceField; - private double initialAnomalyScore; - private double anomalyScore; - private double rawAnomalyScore; - private double probability; - private boolean isInterim; - private final Date timestamp; - private final long bucketSpan; - - BucketInfluencer(String jobId, Date timestamp, long bucketSpan) { - this.jobId = jobId; - this.timestamp = Objects.requireNonNull(timestamp); - this.bucketSpan = bucketSpan; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); - if (influenceField != null) { - builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), influenceField); - } - builder.field(INITIAL_ANOMALY_SCORE.getPreferredName(), initialAnomalyScore); - builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore); - builder.field(RAW_ANOMALY_SCORE.getPreferredName(), rawAnomalyScore); - builder.field(PROBABILITY.getPreferredName(), probability); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); - builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); - builder.endObject(); - return builder; - } - - public String getJobId() { - return jobId; - } - - public double getProbability() { - return probability; - } - - void setProbability(double probability) { - this.probability = probability; - } - - public String getInfluencerFieldName() { - return influenceField; - } - - void setInfluencerFieldName(String fieldName) { - this.influenceField = fieldName; - } - - public double getInitialAnomalyScore() { - return initialAnomalyScore; - } - - void setInitialAnomalyScore(double influenceScore) { - this.initialAnomalyScore = influenceScore; - } - - public double getAnomalyScore() { - return anomalyScore; - } - - void setAnomalyScore(double score) { - anomalyScore = score; - } - - public double getRawAnomalyScore() { - return rawAnomalyScore; - } - - void setRawAnomalyScore(double score) { - rawAnomalyScore = score; - } - - void setIsInterim(boolean isInterim) { - this.isInterim = isInterim; - } - - public boolean isInterim() { - return isInterim; - } - - public Date getTimestamp() { - return timestamp; - } - - @Override - public int hashCode() { - return Objects.hash( - influenceField, - initialAnomalyScore, - anomalyScore, - rawAnomalyScore, - probability, - isInterim, - timestamp, - jobId, - bucketSpan - ); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null) { - return false; - } - - if (getClass() != obj.getClass()) { - return false; - } - - BucketInfluencer other = (BucketInfluencer) obj; - - return Objects.equals(influenceField, other.influenceField) - && Double.compare(initialAnomalyScore, other.initialAnomalyScore) == 0 - && Double.compare(anomalyScore, other.anomalyScore) == 0 - && Double.compare(rawAnomalyScore, other.rawAnomalyScore) == 0 - && Double.compare(probability, other.probability) == 0 - && Objects.equals(isInterim, other.isInterim) - && Objects.equals(timestamp, other.timestamp) - && Objects.equals(jobId, other.jobId) - && bucketSpan == other.bucketSpan; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/CategoryDefinition.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/CategoryDefinition.java deleted file mode 100644 index 4b204d7279c38..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/CategoryDefinition.java +++ /dev/null @@ -1,232 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.Set; -import java.util.TreeSet; - -public class CategoryDefinition implements ToXContentObject { - - public static final ParseField TYPE = new ParseField("category_definition"); - - public static final ParseField CATEGORY_ID = new ParseField("category_id"); - public static final ParseField PARTITION_FIELD_NAME = new ParseField("partition_field_name"); - public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partition_field_value"); - public static final ParseField TERMS = new ParseField("terms"); - public static final ParseField REGEX = new ParseField("regex"); - public static final ParseField MAX_MATCHING_LENGTH = new ParseField("max_matching_length"); - public static final ParseField EXAMPLES = new ParseField("examples"); - public static final ParseField GROK_PATTERN = new ParseField("grok_pattern"); - public static final ParseField NUM_MATCHES = new ParseField("num_matches"); - public static final ParseField PREFERRED_TO_CATEGORIES = new ParseField("preferred_to_categories"); - - // Used for QueryPage - public static final ParseField RESULTS_FIELD = new ParseField("categories"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - TYPE.getPreferredName(), - true, - a -> new CategoryDefinition((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareLong(CategoryDefinition::setCategoryId, CATEGORY_ID); - PARSER.declareString(CategoryDefinition::setPartitionFieldName, PARTITION_FIELD_NAME); - PARSER.declareString(CategoryDefinition::setPartitionFieldValue, PARTITION_FIELD_VALUE); - PARSER.declareString(CategoryDefinition::setTerms, TERMS); - PARSER.declareString(CategoryDefinition::setRegex, REGEX); - PARSER.declareLong(CategoryDefinition::setMaxMatchingLength, MAX_MATCHING_LENGTH); - PARSER.declareStringArray(CategoryDefinition::setExamples, EXAMPLES); - PARSER.declareString(CategoryDefinition::setGrokPattern, GROK_PATTERN); - PARSER.declareLong(CategoryDefinition::setNumMatches, NUM_MATCHES); - PARSER.declareLongArray(CategoryDefinition::setPreferredToCategories, PREFERRED_TO_CATEGORIES); - } - - private final String jobId; - private long categoryId = 0L; - private String partitionFieldName; - private String partitionFieldValue; - private String terms = ""; - private String regex = ""; - private long maxMatchingLength = 0L; - private final Set examples = new TreeSet<>(); - private String grokPattern; - private long numMatches = 0L; - private List preferredToCategories; - - CategoryDefinition(String jobId) { - this.jobId = jobId; - } - - public String getJobId() { - return jobId; - } - - public long getCategoryId() { - return categoryId; - } - - void setCategoryId(long categoryId) { - this.categoryId = categoryId; - } - - public String getPartitionFieldName() { - return partitionFieldName; - } - - public void setPartitionFieldName(String partitionFieldName) { - this.partitionFieldName = partitionFieldName; - } - - public String getPartitionFieldValue() { - return partitionFieldValue; - } - - public void setPartitionFieldValue(String partitionFieldValue) { - this.partitionFieldValue = partitionFieldValue; - } - - public String getTerms() { - return terms; - } - - void setTerms(String terms) { - this.terms = terms; - } - - public String getRegex() { - return regex; - } - - void setRegex(String regex) { - this.regex = regex; - } - - public long getMaxMatchingLength() { - return maxMatchingLength; - } - - void setMaxMatchingLength(long maxMatchingLength) { - this.maxMatchingLength = maxMatchingLength; - } - - public List getExamples() { - return new ArrayList<>(examples); - } - - void setExamples(Collection examples) { - this.examples.clear(); - this.examples.addAll(examples); - } - - void addExample(String example) { - examples.add(example); - } - - public String getGrokPattern() { - return grokPattern; - } - - void setGrokPattern(String grokPattern) { - this.grokPattern = grokPattern; - } - - public long getNumMatches() { - return numMatches; - } - - public void setNumMatches(long numMatches) { - this.numMatches = numMatches; - } - - public List getPreferredToCategories() { - return preferredToCategories; - } - - public void setPreferredToCategories(List preferredToCategories) { - this.preferredToCategories = Collections.unmodifiableList(preferredToCategories); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(CATEGORY_ID.getPreferredName(), categoryId); - if (partitionFieldName != null) { - builder.field(PARTITION_FIELD_NAME.getPreferredName(), partitionFieldName); - } - if (partitionFieldValue != null) { - builder.field(PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue); - } - builder.field(TERMS.getPreferredName(), terms); - builder.field(REGEX.getPreferredName(), regex); - builder.field(MAX_MATCHING_LENGTH.getPreferredName(), maxMatchingLength); - builder.field(EXAMPLES.getPreferredName(), examples); - if (grokPattern != null) { - builder.field(GROK_PATTERN.getPreferredName(), grokPattern); - } - builder.field(NUM_MATCHES.getPreferredName(), numMatches); - if (preferredToCategories != null && (preferredToCategories.isEmpty() == false)) { - builder.field(PREFERRED_TO_CATEGORIES.getPreferredName(), preferredToCategories); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - if (other == null || getClass() != other.getClass()) { - return false; - } - CategoryDefinition that = (CategoryDefinition) other; - return Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.categoryId, that.categoryId) - && Objects.equals(this.partitionFieldName, that.partitionFieldName) - && Objects.equals(this.partitionFieldValue, that.partitionFieldValue) - && Objects.equals(this.terms, that.terms) - && Objects.equals(this.regex, that.regex) - && Objects.equals(this.maxMatchingLength, that.maxMatchingLength) - && Objects.equals(this.examples, that.examples) - && Objects.equals(this.preferredToCategories, that.preferredToCategories) - && Objects.equals(this.numMatches, that.numMatches) - && Objects.equals(this.grokPattern, that.grokPattern); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - categoryId, - partitionFieldName, - partitionFieldValue, - terms, - regex, - maxMatchingLength, - examples, - preferredToCategories, - numMatches, - grokPattern - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influence.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influence.java deleted file mode 100644 index 0969b5983c75e..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influence.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -/** - * Influence field name and list of influence field values/score pairs - */ -public class Influence implements ToXContentObject { - - /** - * Note all X-Content serialized field names are "influencer" not "influence" - */ - public static final ParseField INFLUENCER = new ParseField("influencer"); - public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name"); - public static final ParseField INFLUENCER_FIELD_VALUES = new ParseField("influencer_field_values"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - INFLUENCER.getPreferredName(), - true, - a -> new Influence((String) a[0], (List) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME); - PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUES); - } - - private String field; - private List fieldValues; - - Influence(String field, List fieldValues) { - this.field = field; - this.fieldValues = Collections.unmodifiableList(fieldValues); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), field); - builder.field(INFLUENCER_FIELD_VALUES.getPreferredName(), fieldValues); - builder.endObject(); - return builder; - } - - public String getInfluencerFieldName() { - return field; - } - - public List getInfluencerFieldValues() { - return fieldValues; - } - - @Override - public int hashCode() { - return Objects.hash(field, fieldValues); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null) { - return false; - } - - if (getClass() != obj.getClass()) { - return false; - } - - Influence other = (Influence) obj; - return Objects.equals(field, other.field) && Objects.equals(fieldValues, other.fieldValues); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influencer.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influencer.java deleted file mode 100644 index 46c7516b9853a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influencer.java +++ /dev/null @@ -1,190 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Date; -import java.util.Objects; - -public class Influencer implements ToXContentObject { - - /** - * Result type - */ - public static final String RESULT_TYPE_VALUE = "influencer"; - public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE); - - /* - * Field names - */ - public static final ParseField PROBABILITY = new ParseField("probability"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name"); - public static final ParseField INFLUENCER_FIELD_VALUE = new ParseField("influencer_field_value"); - public static final ParseField INITIAL_INFLUENCER_SCORE = new ParseField("initial_influencer_score"); - public static final ParseField INFLUENCER_SCORE = new ParseField("influencer_score"); - - // Used for QueryPage - public static final ParseField RESULTS_FIELD = new ParseField("influencers"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RESULT_TYPE_FIELD.getPreferredName(), - true, - a -> new Influencer((String) a[0], (String) a[1], (String) a[2], (Date) a[3], (long) a[4]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME); - PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUE); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), - Result.TIMESTAMP, - ValueType.VALUE - ); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); - PARSER.declareString((influencer, s) -> {}, Result.RESULT_TYPE); - PARSER.declareDouble(Influencer::setProbability, PROBABILITY); - PARSER.declareDouble(Influencer::setInfluencerScore, INFLUENCER_SCORE); - PARSER.declareDouble(Influencer::setInitialInfluencerScore, INITIAL_INFLUENCER_SCORE); - PARSER.declareBoolean(Influencer::setInterim, Result.IS_INTERIM); - } - - private final String jobId; - private final Date timestamp; - private final long bucketSpan; - private String influenceField; - private String influenceValue; - private double probability; - private double initialInfluencerScore; - private double influencerScore; - private boolean isInterim; - - Influencer(String jobId, String fieldName, String fieldValue, Date timestamp, long bucketSpan) { - this.jobId = jobId; - influenceField = fieldName; - influenceValue = fieldValue; - this.timestamp = Objects.requireNonNull(timestamp); - this.bucketSpan = bucketSpan; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); - builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), influenceField); - builder.field(INFLUENCER_FIELD_VALUE.getPreferredName(), influenceValue); - builder.field(INFLUENCER_SCORE.getPreferredName(), influencerScore); - builder.field(INITIAL_INFLUENCER_SCORE.getPreferredName(), initialInfluencerScore); - builder.field(PROBABILITY.getPreferredName(), probability); - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); - builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); - builder.endObject(); - return builder; - } - - public String getJobId() { - return jobId; - } - - public double getProbability() { - return probability; - } - - void setProbability(double probability) { - this.probability = probability; - } - - public Date getTimestamp() { - return timestamp; - } - - public String getInfluencerFieldName() { - return influenceField; - } - - public String getInfluencerFieldValue() { - return influenceValue; - } - - public double getInitialInfluencerScore() { - return initialInfluencerScore; - } - - void setInitialInfluencerScore(double score) { - initialInfluencerScore = score; - } - - public double getInfluencerScore() { - return influencerScore; - } - - void setInfluencerScore(double score) { - influencerScore = score; - } - - public boolean isInterim() { - return isInterim; - } - - void setInterim(boolean value) { - isInterim = value; - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - timestamp, - influenceField, - influenceValue, - initialInfluencerScore, - influencerScore, - probability, - isInterim, - bucketSpan - ); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null) { - return false; - } - - if (getClass() != obj.getClass()) { - return false; - } - - Influencer other = (Influencer) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(timestamp, other.timestamp) - && Objects.equals(influenceField, other.influenceField) - && Objects.equals(influenceValue, other.influenceValue) - && Double.compare(initialInfluencerScore, other.initialInfluencerScore) == 0 - && Double.compare(influencerScore, other.influencerScore) == 0 - && Double.compare(probability, other.probability) == 0 - && (isInterim == other.isInterim) - && (bucketSpan == other.bucketSpan); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/OverallBucket.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/OverallBucket.java deleted file mode 100644 index 9a6bb40682e6f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/OverallBucket.java +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.Objects; - -/** - * Overall Bucket Result POJO - */ -public class OverallBucket implements ToXContentObject { - - public static final ParseField OVERALL_SCORE = new ParseField("overall_score"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - public static final ParseField JOBS = new ParseField("jobs"); - - // Used for QueryPage - public static final ParseField RESULTS_FIELD = new ParseField("overall_buckets"); - - /** - * Result type - */ - public static final String RESULT_TYPE_VALUE = "overall_bucket"; - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RESULT_TYPE_VALUE, - true, - a -> new OverallBucket((Date) a[0], (long) a[1], (double) a[2], (boolean) a[3]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), - Result.TIMESTAMP, - ObjectParser.ValueType.VALUE - ); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), OVERALL_SCORE); - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), Result.IS_INTERIM); - PARSER.declareObjectArray(OverallBucket::setJobs, JobInfo.PARSER, JOBS); - } - - private final Date timestamp; - private final long bucketSpan; - private final double overallScore; - private final boolean isInterim; - private List jobs = Collections.emptyList(); - - OverallBucket(Date timestamp, long bucketSpan, double overallScore, boolean isInterim) { - this.timestamp = Objects.requireNonNull(timestamp); - this.bucketSpan = bucketSpan; - this.overallScore = overallScore; - this.isInterim = isInterim; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); - builder.field(OVERALL_SCORE.getPreferredName(), overallScore); - builder.field(JOBS.getPreferredName(), jobs); - builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); - builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); - builder.endObject(); - return builder; - } - - public Date getTimestamp() { - return timestamp; - } - - /** - * Bucketspan expressed in seconds - */ - public long getBucketSpan() { - return bucketSpan; - } - - public double getOverallScore() { - return overallScore; - } - - public List getJobs() { - return jobs; - } - - void setJobs(List jobs) { - this.jobs = Collections.unmodifiableList(jobs); - } - - public boolean isInterim() { - return isInterim; - } - - @Override - public int hashCode() { - return Objects.hash(timestamp, bucketSpan, overallScore, jobs, isInterim); - } - - /** - * Compare all the fields and embedded anomaly records (if any) - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - OverallBucket that = (OverallBucket) other; - - return Objects.equals(this.timestamp, that.timestamp) - && this.bucketSpan == that.bucketSpan - && this.overallScore == that.overallScore - && Objects.equals(this.jobs, that.jobs) - && this.isInterim == that.isInterim; - } - - public static class JobInfo implements ToXContentObject, Comparable { - - private static final ParseField MAX_ANOMALY_SCORE = new ParseField("max_anomaly_score"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "job_info", - true, - a -> new JobInfo((String) a[0], (double) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MAX_ANOMALY_SCORE); - } - - private final String jobId; - private final double maxAnomalyScore; - - JobInfo(String jobId, double maxAnomalyScore) { - this.jobId = Objects.requireNonNull(jobId); - this.maxAnomalyScore = maxAnomalyScore; - } - - public String getJobId() { - return jobId; - } - - public double getMaxAnomalyScore() { - return maxAnomalyScore; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(MAX_ANOMALY_SCORE.getPreferredName(), maxAnomalyScore); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, maxAnomalyScore); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - if (other == null || getClass() != other.getClass()) { - return false; - } - JobInfo that = (JobInfo) other; - return Objects.equals(this.jobId, that.jobId) && this.maxAnomalyScore == that.maxAnomalyScore; - } - - @Override - public int compareTo(JobInfo other) { - int result = this.jobId.compareTo(other.jobId); - if (result == 0) { - result = Double.compare(this.maxAnomalyScore, other.maxAnomalyScore); - } - return result; - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Result.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Result.java deleted file mode 100644 index 6f5408bb2ae0a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Result.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.xcontent.ParseField; - -/** - * Contains common attributes for results. - */ -public final class Result { - - /** - * Serialisation fields - */ - public static final ParseField RESULT_TYPE = new ParseField("result_type"); - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - public static final ParseField IS_INTERIM = new ParseField("is_interim"); - - private Result() {} -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java deleted file mode 100644 index 796cb18f3eb2d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.stats; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * A class to hold statistics about forecasts. - */ -public class ForecastStats implements ToXContentObject { - - public static final ParseField TOTAL = new ParseField("total"); - public static final ParseField FORECASTED_JOBS = new ParseField("forecasted_jobs"); - public static final ParseField MEMORY_BYTES = new ParseField("memory_bytes"); - public static final ParseField PROCESSING_TIME_MS = new ParseField("processing_time_ms"); - public static final ParseField RECORDS = new ParseField("records"); - public static final ParseField STATUS = new ParseField("status"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "forecast_stats", - true, - (a) -> { - int i = 0; - long total = (long) a[i++]; - SimpleStats memoryStats = (SimpleStats) a[i++]; - SimpleStats recordStats = (SimpleStats) a[i++]; - SimpleStats runtimeStats = (SimpleStats) a[i++]; - Map statusCounts = (Map) a[i]; - return new ForecastStats(total, memoryStats, recordStats, runtimeStats, statusCounts); - } - ); - - static { - PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, MEMORY_BYTES); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, RECORDS); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, PROCESSING_TIME_MS); - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { - Map counts = new HashMap<>(); - p.map().forEach((key, value) -> counts.put(key, ((Number) value).longValue())); - return counts; - }, STATUS, ObjectParser.ValueType.OBJECT); - } - - private final long total; - private final long forecastedJobs; - private SimpleStats memoryStats; - private SimpleStats recordStats; - private SimpleStats runtimeStats; - private Map statusCounts; - - public ForecastStats( - long total, - SimpleStats memoryStats, - SimpleStats recordStats, - SimpleStats runtimeStats, - Map statusCounts - ) { - this.total = total; - this.forecastedJobs = total > 0 ? 1 : 0; - if (total > 0) { - this.memoryStats = Objects.requireNonNull(memoryStats); - this.recordStats = Objects.requireNonNull(recordStats); - this.runtimeStats = Objects.requireNonNull(runtimeStats); - this.statusCounts = Collections.unmodifiableMap(statusCounts); - } - } - - /** - * The number of forecasts currently available for this model. - */ - public long getTotal() { - return total; - } - - /** - * The number of jobs that have at least one forecast. - */ - public long getForecastedJobs() { - return forecastedJobs; - } - - /** - * Statistics about the memory usage: minimum, maximum, average and total. - */ - public SimpleStats getMemoryStats() { - return memoryStats; - } - - /** - * Statistics about the number of forecast records: minimum, maximum, average and total. - */ - public SimpleStats getRecordStats() { - return recordStats; - } - - /** - * Statistics about the forecast runtime in milliseconds: minimum, maximum, average and total - */ - public SimpleStats getRuntimeStats() { - return runtimeStats; - } - - /** - * Counts per forecast status, for example: {"finished" : 2}. - */ - public Map getStatusCounts() { - return statusCounts; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(TOTAL.getPreferredName(), total); - builder.field(FORECASTED_JOBS.getPreferredName(), forecastedJobs); - - if (total > 0) { - builder.field(MEMORY_BYTES.getPreferredName(), memoryStats); - builder.field(RECORDS.getPreferredName(), recordStats); - builder.field(PROCESSING_TIME_MS.getPreferredName(), runtimeStats); - builder.field(STATUS.getPreferredName(), statusCounts); - } - return builder.endObject(); - } - - @Override - public int hashCode() { - return Objects.hash(total, forecastedJobs, memoryStats, recordStats, runtimeStats, statusCounts); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - ForecastStats other = (ForecastStats) obj; - return Objects.equals(total, other.total) - && Objects.equals(forecastedJobs, other.forecastedJobs) - && Objects.equals(memoryStats, other.memoryStats) - && Objects.equals(recordStats, other.recordStats) - && Objects.equals(runtimeStats, other.runtimeStats) - && Objects.equals(statusCounts, other.statusCounts); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java deleted file mode 100644 index abf2a278ba763..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java +++ /dev/null @@ -1,229 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.stats; - -import org.elasticsearch.client.ml.NodeAttributes; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.config.JobState; -import org.elasticsearch.client.ml.job.process.DataCounts; -import org.elasticsearch.client.ml.job.process.ModelSizeStats; -import org.elasticsearch.client.ml.job.process.TimingStats; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Class containing the statistics for a Machine Learning job. - * - */ -public class JobStats implements ToXContentObject { - - private static final ParseField DATA_COUNTS = new ParseField("data_counts"); - private static final ParseField MODEL_SIZE_STATS = new ParseField("model_size_stats"); - private static final ParseField TIMING_STATS = new ParseField("timing_stats"); - private static final ParseField FORECASTS_STATS = new ParseField("forecasts_stats"); - private static final ParseField STATE = new ParseField("state"); - private static final ParseField NODE = new ParseField("node"); - private static final ParseField OPEN_TIME = new ParseField("open_time"); - private static final ParseField ASSIGNMENT_EXPLANATION = new ParseField("assignment_explanation"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("job_stats", true, (a) -> { - int i = 0; - String jobId = (String) a[i++]; - DataCounts dataCounts = (DataCounts) a[i++]; - JobState jobState = (JobState) a[i++]; - ModelSizeStats.Builder modelSizeStatsBuilder = (ModelSizeStats.Builder) a[i++]; - ModelSizeStats modelSizeStats = modelSizeStatsBuilder == null ? null : modelSizeStatsBuilder.build(); - TimingStats timingStats = (TimingStats) a[i++]; - ForecastStats forecastStats = (ForecastStats) a[i++]; - NodeAttributes node = (NodeAttributes) a[i++]; - String assignmentExplanation = (String) a[i++]; - TimeValue openTime = (TimeValue) a[i]; - return new JobStats(jobId, dataCounts, jobState, modelSizeStats, timingStats, forecastStats, node, assignmentExplanation, openTime); - }); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), DataCounts.PARSER, DATA_COUNTS); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> JobState.fromString(p.text()), - STATE, - ObjectParser.ValueType.VALUE - ); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ModelSizeStats.PARSER, MODEL_SIZE_STATS); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), TimingStats.PARSER, TIMING_STATS); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ForecastStats.PARSER, FORECASTS_STATS); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), NodeAttributes.PARSER, NODE); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), ASSIGNMENT_EXPLANATION); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> TimeValue.parseTimeValue(p.textOrNull(), OPEN_TIME.getPreferredName()), - OPEN_TIME, - ObjectParser.ValueType.STRING_OR_NULL - ); - } - - private final String jobId; - private final DataCounts dataCounts; - private final JobState state; - private final ModelSizeStats modelSizeStats; - private final TimingStats timingStats; - private final ForecastStats forecastStats; - private final NodeAttributes node; - private final String assignmentExplanation; - private final TimeValue openTime; - - JobStats( - String jobId, - DataCounts dataCounts, - JobState state, - @Nullable ModelSizeStats modelSizeStats, - @Nullable TimingStats timingStats, - @Nullable ForecastStats forecastStats, - @Nullable NodeAttributes node, - @Nullable String assignmentExplanation, - @Nullable TimeValue openTime - ) { - this.jobId = Objects.requireNonNull(jobId); - this.dataCounts = Objects.requireNonNull(dataCounts); - this.state = Objects.requireNonNull(state); - this.modelSizeStats = modelSizeStats; - this.timingStats = timingStats; - this.forecastStats = forecastStats; - this.node = node; - this.assignmentExplanation = assignmentExplanation; - this.openTime = openTime; - } - - /** - * The jobId referencing the job for these statistics - */ - public String getJobId() { - return jobId; - } - - /** - * An object that describes the number of records processed and any related error counts - * See {@link DataCounts} - */ - public DataCounts getDataCounts() { - return dataCounts; - } - - /** - * An object that provides information about the size and contents of the model. - * See {@link ModelSizeStats} - */ - public ModelSizeStats getModelSizeStats() { - return modelSizeStats; - } - - public TimingStats getTimingStats() { - return timingStats; - } - - /** - * An object that provides statistical information about forecasts of this job. - * See {@link ForecastStats} - */ - public ForecastStats getForecastStats() { - return forecastStats; - } - - /** - * The status of the job - * See {@link JobState} - */ - public JobState getState() { - return state; - } - - /** - * For open jobs only, contains information about the node where the job runs - * See {@link NodeAttributes} - */ - public NodeAttributes getNode() { - return node; - } - - /** - * For open jobs only, contains messages relating to the selection of a node to run the job. - */ - public String getAssignmentExplanation() { - return assignmentExplanation; - } - - /** - * For open jobs only, the elapsed time for which the job has been open - */ - public TimeValue getOpenTime() { - return openTime; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(DATA_COUNTS.getPreferredName(), dataCounts); - builder.field(STATE.getPreferredName(), state.toString()); - if (modelSizeStats != null) { - builder.field(MODEL_SIZE_STATS.getPreferredName(), modelSizeStats); - } - if (timingStats != null) { - builder.field(TIMING_STATS.getPreferredName(), timingStats); - } - if (forecastStats != null) { - builder.field(FORECASTS_STATS.getPreferredName(), forecastStats); - } - if (node != null) { - builder.field(NODE.getPreferredName(), node); - } - if (assignmentExplanation != null) { - builder.field(ASSIGNMENT_EXPLANATION.getPreferredName(), assignmentExplanation); - } - if (openTime != null) { - builder.field(OPEN_TIME.getPreferredName(), openTime.getStringRep()); - } - return builder.endObject(); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, dataCounts, modelSizeStats, timingStats, forecastStats, state, node, assignmentExplanation, openTime); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - JobStats other = (JobStats) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(this.dataCounts, other.dataCounts) - && Objects.equals(this.modelSizeStats, other.modelSizeStats) - && Objects.equals(this.timingStats, other.timingStats) - && Objects.equals(this.forecastStats, other.forecastStats) - && Objects.equals(this.state, other.state) - && Objects.equals(this.node, other.node) - && Objects.equals(this.assignmentExplanation, other.assignmentExplanation) - && Objects.equals(this.openTime, other.openTime); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java deleted file mode 100644 index 01050d93b1a91..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.stats; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Helper class for min, max, avg and total statistics for a quantity - */ -public class SimpleStats implements ToXContentObject { - - public static final ParseField MIN = new ParseField("min"); - public static final ParseField MAX = new ParseField("max"); - public static final ParseField AVG = new ParseField("avg"); - public static final ParseField TOTAL = new ParseField("total"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("simple_stats", true, (a) -> { - int i = 0; - double total = (double) a[i++]; - double min = (double) a[i++]; - double max = (double) a[i++]; - double avg = (double) a[i++]; - return new SimpleStats(total, min, max, avg); - }); - - static { - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), TOTAL); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MIN); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MAX); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), AVG); - } - - private final double total; - private final double min; - private final double max; - private final double avg; - - SimpleStats(double total, double min, double max, double avg) { - this.total = total; - this.min = min; - this.max = max; - this.avg = avg; - } - - public double getMin() { - return min; - } - - public double getMax() { - return max; - } - - public double getAvg() { - return avg; - } - - public double getTotal() { - return total; - } - - @Override - public int hashCode() { - return Objects.hash(total, min, max, avg); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - SimpleStats other = (SimpleStats) obj; - return Objects.equals(total, other.total) - && Objects.equals(min, other.min) - && Objects.equals(avg, other.avg) - && Objects.equals(max, other.max); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(MIN.getPreferredName(), min); - builder.field(MAX.getPreferredName(), max); - builder.field(AVG.getPreferredName(), avg); - builder.field(TOTAL.getPreferredName(), total); - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/ChangePasswordRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/ChangePasswordRequest.java deleted file mode 100644 index ae13a77abc456..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/ChangePasswordRequest.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.security; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.core.CharArrays; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Objects; - -/** - * Request object to change the password of a user of a native realm or a built-in user. - */ -public final class ChangePasswordRequest implements Validatable, ToXContentObject { - - private final String username; - private final char[] password; - private final RefreshPolicy refreshPolicy; - - /** - * @param username The username of the user whose password should be changed or null for the current user. - * @param password The new password. The password array is not cleared by the {@link ChangePasswordRequest} object so the - * calling code must clear it after receiving the response. - * @param refreshPolicy The refresh policy for the request. - */ - public ChangePasswordRequest(@Nullable String username, char[] password, RefreshPolicy refreshPolicy) { - this.username = username; - this.password = Objects.requireNonNull(password, "password is required"); - this.refreshPolicy = refreshPolicy == null ? RefreshPolicy.getDefault() : refreshPolicy; - } - - public String getUsername() { - return username; - } - - public char[] getPassword() { - return password; - } - - public RefreshPolicy getRefreshPolicy() { - return refreshPolicy; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - byte[] charBytes = CharArrays.toUtf8Bytes(password); - try { - return builder.startObject().field("password").utf8Value(charBytes, 0, charBytes.length).endObject(); - } finally { - Arrays.fill(charBytes, (byte) 0); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DeleteUserRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DeleteUserRequest.java deleted file mode 100644 index cb2e3ea74582e..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DeleteUserRequest.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.security; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -/** - * A request to delete a user from the native realm. - */ -public final class DeleteUserRequest implements Validatable { - - private final String name; - private final RefreshPolicy refreshPolicy; - - public DeleteUserRequest(String name) { - this(name, RefreshPolicy.IMMEDIATE); - } - - public DeleteUserRequest(String name, RefreshPolicy refreshPolicy) { - this.name = Objects.requireNonNull(name, "user name is required"); - this.refreshPolicy = Objects.requireNonNull(refreshPolicy, "refresh policy is required"); - } - - public String getName() { - return name; - } - - public RefreshPolicy getRefreshPolicy() { - return refreshPolicy; - } - - @Override - public int hashCode() { - return Objects.hash(name, refreshPolicy); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - final DeleteUserRequest other = (DeleteUserRequest) obj; - - return (refreshPolicy == other.refreshPolicy) && Objects.equals(name, other.name); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DeleteUserResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DeleteUserResponse.java deleted file mode 100644 index 8de14c31e3d40..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DeleteUserResponse.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.security; - -import org.elasticsearch.client.core.AcknowledgedResponse; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; - -/** - * Response for a user being deleted from the native realm - */ -public final class DeleteUserResponse extends AcknowledgedResponse { - - private static final String PARSE_FIELD_NAME = "found"; - - private static final ConstructingObjectParser PARSER = AcknowledgedResponse.generateParser( - "delete_user_response", - DeleteUserResponse::new, - PARSE_FIELD_NAME - ); - - public DeleteUserResponse(boolean acknowledged) { - super(acknowledged); - } - - public static DeleteUserResponse fromXContent(final XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - protected String getFieldName() { - return PARSE_FIELD_NAME; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DisableUserRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DisableUserRequest.java deleted file mode 100644 index 2b9df8d14ab73..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DisableUserRequest.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.security; - -/** - * Request object to disable a native realm or built-in user. - */ -public final class DisableUserRequest extends SetUserEnabledRequest { - - public DisableUserRequest(String username, RefreshPolicy refreshPolicy) { - super(false, username, refreshPolicy); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/EnableUserRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/EnableUserRequest.java deleted file mode 100644 index 96324909dda2b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/EnableUserRequest.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.security; - -/** - * Request object to enable a native realm or built-in user. - */ -public final class EnableUserRequest extends SetUserEnabledRequest { - - public EnableUserRequest(String username, RefreshPolicy refreshPolicy) { - super(true, username, refreshPolicy); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/PutUserRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/PutUserRequest.java deleted file mode 100644 index 28ffa7bb10da6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/PutUserRequest.java +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.security; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.security.user.User; -import org.elasticsearch.core.CharArrays; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Objects; -import java.util.Optional; - -/** - * Request object to create or update a user in the native realm. - */ -public final class PutUserRequest implements Validatable, ToXContentObject { - - private final User user; - private final @Nullable char[] password; - private final @Nullable char[] passwordHash; - private final boolean enabled; - private final RefreshPolicy refreshPolicy; - - /** - * Create or update a user in the native realm, with the user's new or updated password specified in plaintext. - * @param user the user to be created or updated - * @param password the password of the user. The password array is not modified by this class. - * It is the responsibility of the caller to clear the password after receiving - * a response. - * @param enabled true if the user is enabled and allowed to access elasticsearch - * @param refreshPolicy the refresh policy for the request. - */ - public static PutUserRequest withPassword(User user, char[] password, boolean enabled, RefreshPolicy refreshPolicy) { - return new PutUserRequest(user, password, null, enabled, refreshPolicy); - } - - /** - * Create or update a user in the native realm, with the user's new or updated password specified as a cryptographic hash. - * @param user the user to be created or updated - * @param passwordHash the hash of the password of the user. It must be in the correct format for the password hashing algorithm in - * use on this elasticsearch cluster. The array is not modified by this class. - * It is the responsibility of the caller to clear the hash after receiving a response. - * @param enabled true if the user is enabled and allowed to access elasticsearch - * @param refreshPolicy the refresh policy for the request. - */ - public static PutUserRequest withPasswordHash(User user, char[] passwordHash, boolean enabled, RefreshPolicy refreshPolicy) { - return new PutUserRequest(user, null, passwordHash, enabled, refreshPolicy); - } - - /** - * Update an existing user in the native realm without modifying their password. - * @param user the user to be created or updated - * @param enabled true if the user is enabled and allowed to access elasticsearch - * @param refreshPolicy the refresh policy for the request. - */ - public static PutUserRequest updateUser(User user, boolean enabled, RefreshPolicy refreshPolicy) { - return new PutUserRequest(user, null, null, enabled, refreshPolicy); - } - - /** - * Creates a new request that is used to create or update a user in the native realm. - * - * @param user the user to be created or updated - * @param password the password of the user. The password array is not modified by this class. - * It is the responsibility of the caller to clear the password after receiving - * a response. - * @param enabled true if the user is enabled and allowed to access elasticsearch - * @param refreshPolicy the refresh policy for the request. - * @deprecated Use {@link #withPassword(User, char[], boolean, RefreshPolicy)} or - * {@link #updateUser(User, boolean, RefreshPolicy)} instead. - */ - @Deprecated - public PutUserRequest(User user, @Nullable char[] password, boolean enabled, @Nullable RefreshPolicy refreshPolicy) { - this(user, password, null, enabled, refreshPolicy); - } - - /** - * Creates a new request that is used to create or update a user in the native realm. - * @param user the user to be created or updated - * @param password the password of the user. The password array is not modified by this class. - * It is the responsibility of the caller to clear the password after receiving - * a response. - * @param passwordHash the hash of the password. Only one of "password" or "passwordHash" may be populated. - * The other parameter must be {@code null}. - * @param enabled true if the user is enabled and allowed to access elasticsearch - * @param refreshPolicy the refresh policy for the request. - */ - private PutUserRequest( - User user, - @Nullable char[] password, - @Nullable char[] passwordHash, - boolean enabled, - RefreshPolicy refreshPolicy - ) { - this.user = Objects.requireNonNull(user, "user is required, cannot be null"); - if (password != null && passwordHash != null) { - throw new IllegalArgumentException("cannot specify both password and passwordHash"); - } - this.password = password; - this.passwordHash = passwordHash; - this.enabled = enabled; - this.refreshPolicy = refreshPolicy == null ? RefreshPolicy.getDefault() : refreshPolicy; - } - - public User getUser() { - return user; - } - - public @Nullable char[] getPassword() { - return password; - } - - public boolean isEnabled() { - return enabled; - } - - public RefreshPolicy getRefreshPolicy() { - return refreshPolicy; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - final PutUserRequest that = (PutUserRequest) o; - return Objects.equals(user, that.user) - && Arrays.equals(password, that.password) - && Arrays.equals(passwordHash, that.passwordHash) - && enabled == that.enabled - && refreshPolicy == that.refreshPolicy; - } - - @Override - public int hashCode() { - int result = Objects.hash(user, enabled, refreshPolicy); - result = 31 * result + Arrays.hashCode(password); - result = 31 * result + Arrays.hashCode(passwordHash); - return result; - } - - @Override - public Optional validate() { - if (user.getMetadata() != null && user.getMetadata().keySet().stream().anyMatch(s -> s.startsWith("_"))) { - ValidationException validationException = new ValidationException(); - validationException.addValidationError("user metadata keys may not start with [_]"); - return Optional.of(validationException); - } - return Optional.empty(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("username", user.getUsername()); - if (password != null) { - charField(builder, "password", password); - } - if (passwordHash != null) { - charField(builder, "password_hash", passwordHash); - } - builder.field("roles", user.getRoles()); - if (user.getFullName() != null) { - builder.field("full_name", user.getFullName()); - } - if (user.getEmail() != null) { - builder.field("email", user.getEmail()); - } - builder.field("metadata", user.getMetadata()); - builder.field("enabled", enabled); - return builder.endObject(); - } - - private void charField(XContentBuilder builder, String fieldName, char[] chars) throws IOException { - byte[] charBytes = CharArrays.toUtf8Bytes(chars); - try { - builder.field(fieldName).utf8Value(charBytes, 0, charBytes.length); - } finally { - Arrays.fill(charBytes, (byte) 0); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/PutUserResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/PutUserResponse.java deleted file mode 100644 index b03de98390158..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/PutUserResponse.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.security; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Response when adding a user to the native realm. Returns a - * single boolean field for whether the user was created or updated. - */ -public final class PutUserResponse { - - private final boolean created; - - public PutUserResponse(boolean created) { - this.created = created; - } - - public boolean isCreated() { - return created; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PutUserResponse that = (PutUserResponse) o; - return created == that.created; - } - - @Override - public int hashCode() { - return Objects.hash(created); - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "put_user_response", - true, - args -> new PutUserResponse((boolean) args[0]) - ); - - static { - PARSER.declareBoolean(constructorArg(), new ParseField("created")); - } - - public static PutUserResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/SetUserEnabledRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/SetUserEnabledRequest.java deleted file mode 100644 index 927a4dc1bb437..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/SetUserEnabledRequest.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.security; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -/** - * Abstract request object to enable or disable a built-in or native user. - */ -public abstract class SetUserEnabledRequest implements Validatable { - - private final boolean enabled; - private final String username; - private final RefreshPolicy refreshPolicy; - - SetUserEnabledRequest(boolean enabled, String username, RefreshPolicy refreshPolicy) { - this.enabled = enabled; - this.username = Objects.requireNonNull(username, "username is required"); - this.refreshPolicy = refreshPolicy == null ? RefreshPolicy.getDefault() : refreshPolicy; - } - - public boolean isEnabled() { - return enabled; - } - - public String getUsername() { - return username; - } - - public RefreshPolicy getRefreshPolicy() { - return refreshPolicy; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/CancelTasksRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/CancelTasksRequest.java deleted file mode 100644 index c80bc6d55ccae..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/CancelTasksRequest.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.core.TimeValue; - -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; -import java.util.Optional; - -public class CancelTasksRequest implements Validatable { - - private final List nodes = new ArrayList<>(); - private final List actions = new ArrayList<>(); - private Optional timeout = Optional.empty(); - private Optional parentTaskId = Optional.empty(); - private Optional taskId = Optional.empty(); - private Boolean waitForCompletion; - - CancelTasksRequest() {} - - void setNodes(List nodes) { - this.nodes.addAll(nodes); - } - - public List getNodes() { - return nodes; - } - - void setTimeout(TimeValue timeout) { - this.timeout = Optional.of(timeout); - } - - public Optional getTimeout() { - return timeout; - } - - void setActions(List actions) { - this.actions.addAll(actions); - } - - public List getActions() { - return actions; - } - - void setParentTaskId(TaskId parentTaskId) { - this.parentTaskId = Optional.of(parentTaskId); - } - - public Optional getParentTaskId() { - return parentTaskId; - } - - void setTaskId(TaskId taskId) { - this.taskId = Optional.of(taskId); - } - - public Optional getTaskId() { - return taskId; - } - - public Boolean getWaitForCompletion() { - return waitForCompletion; - } - - public void setWaitForCompletion(boolean waitForCompletion) { - this.waitForCompletion = waitForCompletion; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof CancelTasksRequest) == false) return false; - CancelTasksRequest that = (CancelTasksRequest) o; - return Objects.equals(getNodes(), that.getNodes()) - && Objects.equals(getActions(), that.getActions()) - && Objects.equals(getTimeout(), that.getTimeout()) - && Objects.equals(getParentTaskId(), that.getParentTaskId()) - && Objects.equals(getTaskId(), that.getTaskId()) - && Objects.equals(waitForCompletion, that.waitForCompletion); - } - - @Override - public int hashCode() { - return Objects.hash(getNodes(), getActions(), getTimeout(), getParentTaskId(), getTaskId(), waitForCompletion); - } - - @Override - public String toString() { - return "CancelTasksRequest{" - + "nodes=" - + nodes - + ", actions=" - + actions - + ", timeout=" - + timeout - + ", parentTaskId=" - + parentTaskId - + ", taskId=" - + taskId - + ", waitForCompletion=" - + waitForCompletion - + '}'; - } - - @SuppressWarnings("HiddenField") - public static class Builder { - private Optional timeout = Optional.empty(); - private Optional taskId = Optional.empty(); - private Optional parentTaskId = Optional.empty(); - private List actionsFilter = new ArrayList<>(); - private List nodesFilter = new ArrayList<>(); - private Boolean waitForCompletion; - - public Builder withTimeout(TimeValue timeout) { - this.timeout = Optional.of(timeout); - return this; - } - - public Builder withTaskId(TaskId taskId) { - this.taskId = Optional.of(taskId); - return this; - } - - public Builder withParentTaskId(TaskId taskId) { - this.parentTaskId = Optional.of(taskId); - return this; - } - - public Builder withActionsFiltered(List actions) { - this.actionsFilter.clear(); - this.actionsFilter.addAll(actions); - return this; - } - - public Builder withNodesFiltered(List nodes) { - this.nodesFilter.clear(); - this.nodesFilter.addAll(nodes); - return this; - } - - public Builder withWaitForCompletion(boolean waitForCompletion) { - this.waitForCompletion = waitForCompletion; - return this; - } - - public CancelTasksRequest build() { - CancelTasksRequest request = new CancelTasksRequest(); - timeout.ifPresent(request::setTimeout); - taskId.ifPresent(request::setTaskId); - parentTaskId.ifPresent(request::setParentTaskId); - request.setNodes(nodesFilter); - request.setActions(actionsFilter); - if (waitForCompletion != null) { - request.setWaitForCompletion(waitForCompletion); - } - return request; - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/CancelTasksResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/CancelTasksResponse.java deleted file mode 100644 index 9023a53fcfe25..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/CancelTasksResponse.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * cancel tasks response that contains - * - task failures - * - node failures - * - tasks - */ -public class CancelTasksResponse extends ListTasksResponse { - - CancelTasksResponse(List nodesInfoData, List taskFailures, List nodeFailures) { - super(nodesInfoData, taskFailures, nodeFailures); - } - - public static CancelTasksResponse fromXContent(final XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private static ConstructingObjectParser PARSER; - - static { - ConstructingObjectParser parser = new ConstructingObjectParser<>( - "cancel_tasks_response", - true, - constructingObjects -> { - int i = 0; - @SuppressWarnings("unchecked") - List tasksFailures = (List) constructingObjects[i++]; - @SuppressWarnings("unchecked") - List nodeFailures = (List) constructingObjects[i++]; - @SuppressWarnings("unchecked") - List nodesInfoData = (List) constructingObjects[i]; - return new CancelTasksResponse(nodesInfoData, tasksFailures, nodeFailures); - } - ); - - parser.declareObjectArray( - optionalConstructorArg(), - (p, c) -> TaskOperationFailure.fromXContent(p), - new ParseField("task_failures") - ); - parser.declareObjectArray( - optionalConstructorArg(), - (p, c) -> ElasticsearchException.fromXContent(p), - new ParseField("node_failures") - ); - parser.declareNamedObjects(optionalConstructorArg(), NodeData.PARSER, new ParseField("nodes")); - PARSER = parser; - } - - @Override - public boolean equals(Object o) { - return super.equals(o); - } - - @Override - public int hashCode() { - return super.hashCode(); - } - - @Override - public String toString() { - return "CancelTasksResponse{" - + "taskFailures=" - + taskFailures - + ", nodeFailures=" - + nodeFailures - + ", nodesInfoData=" - + nodesInfoData - + ", tasks=" - + tasks - + ", taskGroups=" - + taskGroups - + '}'; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/ElasticsearchException.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/ElasticsearchException.java deleted file mode 100644 index 1fc7811ac0b9b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/ElasticsearchException.java +++ /dev/null @@ -1,221 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - -/** - * client side counterpart of server side - * {@link org.elasticsearch.ElasticsearchException} - * It wraps the same content but it is not throwable. - */ -public class ElasticsearchException { - - private static final String TYPE = "type"; - private static final String REASON = "reason"; - private static final String CAUSED_BY = "caused_by"; - private static final ParseField SUPPRESSED = new ParseField("suppressed"); - private static final String STACK_TRACE = "stack_trace"; - private static final String HEADER = "header"; - private static final String ROOT_CAUSE = "root_cause"; - - private String msg; - private ElasticsearchException cause; - private final Map> headers = new HashMap<>(); - private final List suppressed = new ArrayList<>(); - - ElasticsearchException(String msg) { - this.msg = msg; - this.cause = null; - } - - ElasticsearchException(String msg, ElasticsearchException cause) { - this.msg = msg; - this.cause = cause; - } - - public String getMsg() { - return msg; - } - - public ElasticsearchException getCause() { - return cause; - } - - public List getSuppressed() { - return suppressed; - } - - void addSuppressed(List suppressedExceptions) { - this.suppressed.addAll(suppressedExceptions); - } - - /** - * Generate a {@link ElasticsearchException} from a {@link XContentParser}. This does not - * return the original exception type (ie NodeClosedException for example) but just wraps - * the type, the reason and the cause of the exception. It also recursively parses the - * tree structure of the cause, returning it as a tree structure of {@link ElasticsearchException} - * instances. - */ - static ElasticsearchException fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token = parser.nextToken(); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - return innerFromXContent(parser, false); - } - - private static ElasticsearchException innerFromXContent(XContentParser parser, boolean parseRootCauses) throws IOException { - XContentParser.Token token = parser.currentToken(); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - - String type = null, reason = null, stack = null; - ElasticsearchException cause = null; - Map> headers = new HashMap<>(); - List rootCauses = new ArrayList<>(); - List suppressed = new ArrayList<>(); - - for (; token == XContentParser.Token.FIELD_NAME; token = parser.nextToken()) { - String currentFieldName = parser.currentName(); - token = parser.nextToken(); - - if (token.isValue()) { - if (TYPE.equals(currentFieldName)) { - type = parser.text(); - } else if (REASON.equals(currentFieldName)) { - reason = parser.text(); - } else if (STACK_TRACE.equals(currentFieldName)) { - stack = parser.text(); - } - } else if (token == XContentParser.Token.START_OBJECT) { - if (CAUSED_BY.equals(currentFieldName)) { - cause = fromXContent(parser); - } else if (HEADER.equals(currentFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - List values = headers.getOrDefault(currentFieldName, new ArrayList<>()); - if (token == XContentParser.Token.VALUE_STRING) { - values.add(parser.text()); - } else if (token == XContentParser.Token.START_ARRAY) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token == XContentParser.Token.VALUE_STRING) { - values.add(parser.text()); - } else { - parser.skipChildren(); - } - } - } else if (token == XContentParser.Token.START_OBJECT) { - parser.skipChildren(); - } - headers.put(currentFieldName, values); - } - } - } else { - // Any additional metadata object added by the metadataToXContent method is ignored - // and skipped, so that the parser does not fail on unknown fields. The parser only - // support metadata key-pairs and metadata arrays of values. - parser.skipChildren(); - } - } else if (token == XContentParser.Token.START_ARRAY) { - if (parseRootCauses && ROOT_CAUSE.equals(currentFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - rootCauses.add(fromXContent(parser)); - } - } else if (SUPPRESSED.match(currentFieldName, parser.getDeprecationHandler())) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - suppressed.add(fromXContent(parser)); - } - } else { - // Parse the array and add each item to the corresponding list of metadata. - // Arrays of objects are not supported yet and just ignored and skipped. - List values = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token == XContentParser.Token.VALUE_STRING) { - values.add(parser.text()); - } else { - parser.skipChildren(); - } - } - } - } - } - - ElasticsearchException e = new ElasticsearchException(buildMessage(type, reason, stack), cause); - for (Map.Entry> header : headers.entrySet()) { - e.addHeader(header.getKey(), header.getValue()); - } - - // Adds root causes as suppressed exception. This way they are not lost - // after parsing and can be retrieved using getSuppressed() method. - e.suppressed.addAll(rootCauses); - e.suppressed.addAll(suppressed); - - return e; - } - - void addHeader(String key, List value) { - headers.put(key, value); - - } - - public Map> getHeaders() { - return headers; - } - - static String buildMessage(String type, String reason, String stack) { - StringBuilder message = new StringBuilder("Elasticsearch exception ["); - message.append(TYPE).append('=').append(type).append(", "); - message.append(REASON).append('=').append(reason); - if (stack != null) { - message.append(", ").append(STACK_TRACE).append('=').append(stack); - } - message.append(']'); - return message.toString(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof ElasticsearchException) == false) return false; - ElasticsearchException that = (ElasticsearchException) o; - return Objects.equals(getMsg(), that.getMsg()) - && Objects.equals(getCause(), that.getCause()) - && Objects.equals(getHeaders(), that.getHeaders()) - && Objects.equals(getSuppressed(), that.getSuppressed()); - } - - @Override - public int hashCode() { - return Objects.hash(getMsg(), getCause(), getHeaders(), getSuppressed()); - } - - @Override - public String toString() { - return "ElasticsearchException{" - + "msg='" - + msg - + '\'' - + ", cause=" - + cause - + ", headers=" - + headers - + ", suppressed=" - + suppressed - + '}'; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/GetTaskRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/GetTaskRequest.java deleted file mode 100644 index 070bec0718511..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/GetTaskRequest.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.core.TimeValue; - -import java.util.Objects; -import java.util.Optional; - -public class GetTaskRequest implements Validatable { - private final String nodeId; - private final long taskId; - private boolean waitForCompletion = false; - private TimeValue timeout = null; - - public GetTaskRequest(String nodeId, long taskId) { - this.nodeId = nodeId; - this.taskId = taskId; - } - - public String getNodeId() { - return nodeId; - } - - public long getTaskId() { - return taskId; - } - - /** - * Should this request wait for all found tasks to complete? - */ - public boolean getWaitForCompletion() { - return waitForCompletion; - } - - /** - * Should this request wait for all found tasks to complete? - */ - public GetTaskRequest setWaitForCompletion(boolean waitForCompletion) { - this.waitForCompletion = waitForCompletion; - return this; - } - - /** - * Timeout to wait for any async actions this request must take. It must take anywhere from 0 to 2. - */ - public TimeValue getTimeout() { - return timeout; - } - - /** - * Timeout to wait for any async actions this request must take. - */ - public GetTaskRequest setTimeout(TimeValue timeout) { - this.timeout = timeout; - return this; - } - - @Override - public Optional validate() { - final ValidationException validationException = new ValidationException(); - if (timeout != null && waitForCompletion == false) { - validationException.addValidationError("Timeout settings are only accepted if waitForCompletion is also set"); - } - if (validationException.validationErrors().isEmpty()) { - return Optional.empty(); - } - return Optional.of(validationException); - } - - @Override - public int hashCode() { - return Objects.hash(nodeId, taskId, waitForCompletion, timeout); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetTaskRequest other = (GetTaskRequest) obj; - return Objects.equals(nodeId, other.nodeId) - && taskId == other.taskId - && waitForCompletion == other.waitForCompletion - && Objects.equals(timeout, other.timeout); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/GetTaskResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/GetTaskResponse.java deleted file mode 100644 index 64159b199488d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/GetTaskResponse.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import org.elasticsearch.tasks.TaskInfo; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class GetTaskResponse { - private final boolean completed; - private final TaskInfo taskInfo; - public static final ParseField COMPLETED = new ParseField("completed"); - public static final ParseField TASK = new ParseField("task"); - - public GetTaskResponse(boolean completed, TaskInfo taskInfo) { - this.completed = completed; - this.taskInfo = taskInfo; - } - - public boolean isCompleted() { - return completed; - } - - public TaskInfo getTaskInfo() { - return taskInfo; - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_task", - true, - a -> new GetTaskResponse((boolean) a[0], (TaskInfo) a[1]) - ); - static { - PARSER.declareBoolean(constructorArg(), COMPLETED); - PARSER.declareObject(constructorArg(), (p, c) -> TaskInfo.fromXContent(p), TASK); - } - - public static GetTaskResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/ListTasksResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/ListTasksResponse.java deleted file mode 100644 index 151549097a989..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/ListTasksResponse.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; - -import static java.util.stream.Collectors.groupingBy; -import static java.util.stream.Collectors.toList; - -public class ListTasksResponse { - - protected final List taskFailures = new ArrayList<>(); - protected final List nodeFailures = new ArrayList<>(); - protected final List nodesInfoData = new ArrayList<>(); - protected final List tasks = new ArrayList<>(); - protected final List taskGroups = new ArrayList<>(); - - ListTasksResponse(List nodesInfoData, List taskFailures, List nodeFailures) { - if (taskFailures != null) { - this.taskFailures.addAll(taskFailures); - } - if (nodeFailures != null) { - this.nodeFailures.addAll(nodeFailures); - } - if (nodesInfoData != null) { - this.nodesInfoData.addAll(nodesInfoData); - } - this.tasks.addAll(this.nodesInfoData.stream().flatMap(nodeData -> nodeData.getTasks().stream()).collect(toList())); - this.taskGroups.addAll(buildTaskGroups()); - } - - private List buildTaskGroups() { - Map taskIdToBuilderMap = new HashMap<>(); - List topLevelTasks = new ArrayList<>(); - // First populate all tasks - for (TaskInfo taskInfo : this.tasks) { - taskIdToBuilderMap.put(taskInfo.getTaskId(), TaskGroup.builder(taskInfo)); - } - - // Now go through all task group builders and add children to their parents - for (TaskGroup.Builder taskGroup : taskIdToBuilderMap.values()) { - TaskId parentTaskId = taskGroup.getTaskInfo().getParentTaskId(); - if (parentTaskId != null) { - TaskGroup.Builder parentTask = taskIdToBuilderMap.get(parentTaskId); - if (parentTask != null) { - // we found parent in the list of tasks - add it to the parent list - parentTask.addGroup(taskGroup); - } else { - // we got zombie or the parent was filtered out - add it to the top task list - topLevelTasks.add(taskGroup); - } - } else { - // top level task - add it to the top task list - topLevelTasks.add(taskGroup); - } - } - return topLevelTasks.stream().map(TaskGroup.Builder::build).collect(Collectors.toUnmodifiableList()); - } - - public List getTasks() { - return tasks; - } - - public Map> getPerNodeTasks() { - return getTasks().stream().collect(groupingBy(TaskInfo::getNodeId)); - } - - public List getTaskFailures() { - return taskFailures; - } - - public List getNodeFailures() { - return nodeFailures; - } - - public List getTaskGroups() { - return taskGroups; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof ListTasksResponse) == false) return false; - ListTasksResponse response = (ListTasksResponse) o; - return nodesInfoData.equals(response.nodesInfoData) - && Objects.equals(getTaskFailures(), response.getTaskFailures()) - && Objects.equals(getNodeFailures(), response.getNodeFailures()) - && Objects.equals(getTasks(), response.getTasks()) - && Objects.equals(getTaskGroups(), response.getTaskGroups()); - } - - @Override - public int hashCode() { - return Objects.hash(nodesInfoData, getTaskFailures(), getNodeFailures(), getTasks(), getTaskGroups()); - } - - @Override - public String toString() { - return "CancelTasksResponse{" - + "nodesInfoData=" - + nodesInfoData - + ", taskFailures=" - + taskFailures - + ", nodeFailures=" - + nodeFailures - + ", tasks=" - + tasks - + ", taskGroups=" - + taskGroups - + '}'; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/NodeData.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/NodeData.java deleted file mode 100644 index 1e33b7ede7e48..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/NodeData.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -class NodeData { - - private String nodeId; - private String name; - private String transportAddress; - private String host; - private String ip; - private final List roles = new ArrayList<>(); - private final Map attributes = new HashMap<>(); - private final List tasks = new ArrayList<>(); - - NodeData(String nodeId) { - this.nodeId = nodeId; - } - - void setName(String name) { - this.name = name; - } - - public void setAttributes(Map attributes) { - if (attributes != null) { - this.attributes.putAll(attributes); - } - } - - void setTransportAddress(String transportAddress) { - this.transportAddress = transportAddress; - } - - void setHost(String host) { - this.host = host; - } - - void setIp(String ip) { - this.ip = ip; - } - - void setRoles(List roles) { - if (roles != null) { - this.roles.addAll(roles); - } - } - - public String getNodeId() { - return nodeId; - } - - public String getName() { - return name; - } - - public String getTransportAddress() { - return transportAddress; - } - - public String getHost() { - return host; - } - - public String getIp() { - return ip; - } - - public List getRoles() { - return roles; - } - - public Map getAttributes() { - return attributes; - } - - public List getTasks() { - return tasks; - } - - void setTasks(List tasks) { - if (tasks != null) { - this.tasks.addAll(tasks); - } - } - - @Override - public String toString() { - return "NodeData{" - + "nodeId='" - + nodeId - + '\'' - + ", name='" - + name - + '\'' - + ", transportAddress='" - + transportAddress - + '\'' - + ", host='" - + host - + '\'' - + ", ip='" - + ip - + '\'' - + ", roles=" - + roles - + ", attributes=" - + attributes - + '}'; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof NodeData) == false) return false; - NodeData nodeData = (NodeData) o; - return Objects.equals(getNodeId(), nodeData.getNodeId()) - && Objects.equals(getName(), nodeData.getName()) - && Objects.equals(getTransportAddress(), nodeData.getTransportAddress()) - && Objects.equals(getHost(), nodeData.getHost()) - && Objects.equals(getIp(), nodeData.getIp()) - && Objects.equals(getRoles(), nodeData.getRoles()) - && Objects.equals(getAttributes(), nodeData.getAttributes()) - && Objects.equals(getTasks(), nodeData.getTasks()); - } - - @Override - public int hashCode() { - return Objects.hash(getNodeId(), getName(), getTransportAddress(), getHost(), getIp(), getRoles(), getAttributes(), getTasks()); - } - - public static final ObjectParser.NamedObjectParser PARSER; - - static { - ObjectParser parser = new ObjectParser<>("nodes"); - parser.declareString(NodeData::setName, new ParseField("name")); - parser.declareString(NodeData::setTransportAddress, new ParseField("transport_address")); - parser.declareString(NodeData::setHost, new ParseField("host")); - parser.declareString(NodeData::setIp, new ParseField("ip")); - parser.declareStringArray(NodeData::setRoles, new ParseField("roles")); - parser.declareField(NodeData::setAttributes, (p, c) -> p.mapStrings(), new ParseField("attributes"), ObjectParser.ValueType.OBJECT); - parser.declareNamedObjects(NodeData::setTasks, TaskInfo.PARSER, new ParseField("tasks")); - PARSER = (XContentParser p, Void v, String nodeId) -> parser.parse(p, new NodeData(nodeId), null); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskGroup.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskGroup.java deleted file mode 100644 index a8c0e299485fc..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskGroup.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -/** - * Client side counterpart of server side version. - * - * {@link org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup} - */ -public class TaskGroup { - - private final TaskInfo task; - - @Override - public String toString() { - return "TaskGroup{" + "task=" + task + ", childTasks=" + childTasks + '}'; - } - - private final List childTasks = new ArrayList<>(); - - public TaskGroup(TaskInfo task, List childTasks) { - this.task = task; - this.childTasks.addAll(childTasks); - } - - public static TaskGroup.Builder builder(TaskInfo taskInfo) { - return new TaskGroup.Builder(taskInfo); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof TaskGroup) == false) return false; - TaskGroup taskGroup = (TaskGroup) o; - return Objects.equals(task, taskGroup.task) && Objects.equals(getChildTasks(), taskGroup.getChildTasks()); - } - - @Override - public int hashCode() { - return Objects.hash(task, getChildTasks()); - } - - public static class Builder { - private TaskInfo taskInfo; - private List childTasks; - - private Builder(TaskInfo taskInfo) { - this.taskInfo = taskInfo; - childTasks = new ArrayList<>(); - } - - public void addGroup(TaskGroup.Builder builder) { - childTasks.add(builder); - } - - public TaskInfo getTaskInfo() { - return taskInfo; - } - - public TaskGroup build() { - return new TaskGroup(taskInfo, childTasks.stream().map(TaskGroup.Builder::build).collect(Collectors.toList())); - } - } - - public TaskInfo getTaskInfo() { - return task; - } - - public List getChildTasks() { - return childTasks; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskInfo.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskInfo.java deleted file mode 100644 index 2c91be19b3db4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskInfo.java +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * client side counterpart of server side - *

- * {@link org.elasticsearch.tasks.TaskInfo} - */ -public class TaskInfo { - - private TaskId taskId; - private String type; - private String action; - private String description; - private long startTime; - private long runningTimeNanos; - private boolean cancellable; - private boolean cancelled; - private TaskId parentTaskId; - private final Map status = new HashMap<>(); - private final Map headers = new HashMap<>(); - - public TaskInfo(TaskId taskId) { - this.taskId = taskId; - } - - public TaskId getTaskId() { - return taskId; - } - - public String getNodeId() { - return taskId.nodeId; - } - - public String getType() { - return type; - } - - void setType(String type) { - this.type = type; - } - - public String getAction() { - return action; - } - - void setAction(String action) { - this.action = action; - } - - public String getDescription() { - return description; - } - - void setDescription(String description) { - this.description = description; - } - - public long getStartTime() { - return startTime; - } - - void setStartTime(long startTime) { - this.startTime = startTime; - } - - public long getRunningTimeNanos() { - return runningTimeNanos; - } - - void setRunningTimeNanos(long runningTimeNanos) { - this.runningTimeNanos = runningTimeNanos; - } - - public boolean isCancellable() { - return cancellable; - } - - void setCancellable(boolean cancellable) { - this.cancellable = cancellable; - } - - public boolean isCancelled() { - return cancelled; - } - - void setCancelled(boolean cancelled) { - this.cancelled = cancelled; - } - - public TaskId getParentTaskId() { - return parentTaskId; - } - - void setParentTaskId(String parentTaskId) { - this.parentTaskId = new TaskId(parentTaskId); - } - - public Map getHeaders() { - return headers; - } - - void setHeaders(Map headers) { - this.headers.putAll(headers); - } - - void setStatus(Map status) { - this.status.putAll(status); - } - - public Map getStatus() { - return status; - } - - private void noOpParse(Object s) {} - - public static final ObjectParser.NamedObjectParser PARSER; - - static { - ObjectParser parser = new ObjectParser<>("tasks", true, null); - // already provided in constructor: triggering a no-op - parser.declareString(TaskInfo::noOpParse, new ParseField("node")); - // already provided in constructor: triggering a no-op - parser.declareLong(TaskInfo::noOpParse, new ParseField("id")); - parser.declareString(TaskInfo::setType, new ParseField("type")); - parser.declareString(TaskInfo::setAction, new ParseField("action")); - parser.declareObject(TaskInfo::setStatus, (p, c) -> p.map(), new ParseField("status")); - parser.declareString(TaskInfo::setDescription, new ParseField("description")); - parser.declareLong(TaskInfo::setStartTime, new ParseField("start_time_in_millis")); - parser.declareLong(TaskInfo::setRunningTimeNanos, new ParseField("running_time_in_nanos")); - parser.declareBoolean(TaskInfo::setCancellable, new ParseField("cancellable")); - parser.declareBoolean(TaskInfo::setCancelled, new ParseField("cancelled")); - parser.declareString(TaskInfo::setParentTaskId, new ParseField("parent_task_id")); - parser.declareObject(TaskInfo::setHeaders, (p, c) -> p.mapStrings(), new ParseField("headers")); - PARSER = (XContentParser p, Void v, String name) -> parser.parse(p, new TaskInfo(new TaskId(name)), null); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof TaskInfo) == false) return false; - TaskInfo taskInfo = (TaskInfo) o; - return getStartTime() == taskInfo.getStartTime() - && getRunningTimeNanos() == taskInfo.getRunningTimeNanos() - && isCancellable() == taskInfo.isCancellable() - && isCancelled() == taskInfo.isCancelled() - && Objects.equals(getTaskId(), taskInfo.getTaskId()) - && Objects.equals(getType(), taskInfo.getType()) - && Objects.equals(getAction(), taskInfo.getAction()) - && Objects.equals(getDescription(), taskInfo.getDescription()) - && Objects.equals(getParentTaskId(), taskInfo.getParentTaskId()) - && Objects.equals(status, taskInfo.status) - && Objects.equals(getHeaders(), taskInfo.getHeaders()); - } - - @Override - public int hashCode() { - return Objects.hash( - getTaskId(), - getType(), - getAction(), - getDescription(), - getStartTime(), - getRunningTimeNanos(), - isCancellable(), - isCancelled(), - getParentTaskId(), - status, - getHeaders() - ); - } - - @Override - public String toString() { - return "TaskInfo{" - + "taskId=" - + taskId - + ", type='" - + type - + '\'' - + ", action='" - + action - + '\'' - + ", description='" - + description - + '\'' - + ", startTime=" - + startTime - + ", runningTimeNanos=" - + runningTimeNanos - + ", cancellable=" - + cancellable - + ", cancelled=" - + cancelled - + ", parentTaskId=" - + parentTaskId - + ", status=" - + status - + ", headers=" - + headers - + '}'; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskOperationFailure.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskOperationFailure.java deleted file mode 100644 index 063fb955a8f2b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskOperationFailure.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * client side counterpart of server side - * {@link org.elasticsearch.action.TaskOperationFailure} - */ -public class TaskOperationFailure { - - private final String nodeId; - private final long taskId; - private final ElasticsearchException reason; - private final String status; - - public TaskOperationFailure(String nodeId, long taskId, String status, ElasticsearchException reason) { - this.nodeId = nodeId; - this.taskId = taskId; - this.status = status; - this.reason = reason; - } - - public String getNodeId() { - return nodeId; - } - - public long getTaskId() { - return taskId; - } - - public ElasticsearchException getReason() { - return reason; - } - - public String getStatus() { - return status; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof TaskOperationFailure) == false) return false; - TaskOperationFailure that = (TaskOperationFailure) o; - return getTaskId() == that.getTaskId() - && Objects.equals(getNodeId(), that.getNodeId()) - && Objects.equals(getReason(), that.getReason()) - && Objects.equals(getStatus(), that.getStatus()); - } - - @Override - public int hashCode() { - return Objects.hash(getNodeId(), getTaskId(), getReason(), getStatus()); - } - - @Override - public String toString() { - return "TaskOperationFailure{" - + "nodeId='" - + nodeId - + '\'' - + ", taskId=" - + taskId - + ", reason=" - + reason - + ", status='" - + status - + '\'' - + '}'; - } - - public static TaskOperationFailure fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "task_info", - true, - constructorObjects -> { - int i = 0; - String nodeId = (String) constructorObjects[i++]; - long taskId = (long) constructorObjects[i++]; - String status = (String) constructorObjects[i++]; - ElasticsearchException reason = (ElasticsearchException) constructorObjects[i]; - return new TaskOperationFailure(nodeId, taskId, status, reason); - } - ); - - static { - PARSER.declareString(constructorArg(), new ParseField("node_id")); - PARSER.declareLong(constructorArg(), new ParseField("task_id")); - PARSER.declareString(constructorArg(), new ParseField("status")); - PARSER.declareObject(constructorArg(), (parser, c) -> ElasticsearchException.fromXContent(parser), new ParseField("reason")); - } -} diff --git a/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider b/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider index 9426b3d1bdde7..20b144ef1c562 100644 --- a/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider +++ b/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider @@ -1,6 +1,2 @@ org.elasticsearch.client.ilm.IndexLifecycleNamedXContentProvider -org.elasticsearch.client.ml.dataframe.MlDataFrameAnalysisNamedXContentProvider -org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider -org.elasticsearch.client.ml.dataframe.stats.AnalysisStatsNamedXContentProvider -org.elasticsearch.client.ml.inference.MlInferenceNamedXContentProvider org.elasticsearch.client.transform.TransformNamedXContentProvider diff --git a/distribution/archives/integ-test-zip/build.gradle b/distribution/archives/integ-test-zip/build.gradle index f83aaf74fc2a1..e8e2908a5606a 100644 --- a/distribution/archives/integ-test-zip/build.gradle +++ b/distribution/archives/integ-test-zip/build.gradle @@ -43,7 +43,7 @@ tasks.named("integTest").configure { * 2. *One* of these tests is incompatible with that and should be skipped * when running against an external cluster. */ - if (project.providers.systemProperty("tests.rest.cluster").forUseAtConfigurationTime().isPresent()) { + if (project.providers.systemProperty("tests.rest.cluster").isPresent()) { nonInputProperties.systemProperty 'tests.logfile', testClusters.named('integTest').map(c -> c.singleNode().serverLog) } else { systemProperty 'tests.logfile', '--external--' diff --git a/distribution/build.gradle b/distribution/build.gradle index 76549a83d0b0d..8fe7cfa7e2017 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -37,7 +37,6 @@ tasks.register("generateDependenciesReport", ConcatFilesTask) { headerLine = "name,version,url,license,sourceURL" target = new File(providers.systemProperty('csv') .orElse("${project.buildDir}/reports/dependencies/es-dependencies.csv") - .forUseAtConfigurationTime() .get() ) // explicitly add our dependency on the JDK diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index a7b0e57210366..a2fa645a63b07 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -18,7 +18,7 @@ apply plugin: 'elasticsearch.rest-resources' ext.cloudflareZlibVersion = '1.2.8' -String buildId = providers.systemProperty('build.id').forUseAtConfigurationTime().getOrNull() +String buildId = providers.systemProperty('build.id').getOrNull() boolean useLocalArtifacts = buildId != null && buildId.isBlank() == false repositories { diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index a0c7598352e6c..86715499368f0 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -273,7 +273,7 @@ ospackage { signingKeyPassphrase = project.property('signing.password') signingKeyRingFile = project.hasProperty('signing.secretKeyRingFile') ? project.file(project.property('signing.secretKeyRingFile')) : - new File(new File(project.providers.systemProperty('user.home').orElse('.gnupg').forUseAtConfigurationTime().get()), 'secring.gpg') + new File(new File(project.providers.systemProperty('user.home').orElse('.gnupg').get()), 'secring.gpg') } // version found on oldest supported distro, centos-6 diff --git a/docs/changelog/82685.yaml b/docs/changelog/82685.yaml new file mode 100644 index 0000000000000..3ef9e7841ba6e --- /dev/null +++ b/docs/changelog/82685.yaml @@ -0,0 +1,6 @@ +pr: 82685 +summary: Discard intermediate results upon cancellation for stats endpoints +area: Stats +type: bug +issues: + - 82337 diff --git a/docs/changelog/83191.yaml b/docs/changelog/83191.yaml new file mode 100644 index 0000000000000..f8d11586ae712 --- /dev/null +++ b/docs/changelog/83191.yaml @@ -0,0 +1,5 @@ +pr: 83191 +summary: "User Profile: Add initial search profile API" +area: Security +type: enhancement +issues: [] diff --git a/docs/changelog/83205.yaml b/docs/changelog/83205.yaml new file mode 100644 index 0000000000000..5376a79ad9e19 --- /dev/null +++ b/docs/changelog/83205.yaml @@ -0,0 +1,5 @@ +pr: 83205 +summary: Add Health Indicator Plugin +area: Health +type: feature +issues: [] diff --git a/docs/changelog/83290.yaml b/docs/changelog/83290.yaml new file mode 100644 index 0000000000000..9b3bb8ef056e5 --- /dev/null +++ b/docs/changelog/83290.yaml @@ -0,0 +1,5 @@ +pr: 83290 +summary: Update YAML Rest tests to check for product header on all responses +area: Infra/REST API +type: enhancement +issues: [] diff --git a/docs/changelog/83297.yaml b/docs/changelog/83297.yaml new file mode 100644 index 0000000000000..222163679595a --- /dev/null +++ b/docs/changelog/83297.yaml @@ -0,0 +1,5 @@ +pr: 83297 +summary: GET _index_template and GET _component_template request support query parameter flat_settings +area: ILM+SLM +type: enhancement +issues: [] diff --git a/docs/changelog/83310.yaml b/docs/changelog/83310.yaml new file mode 100644 index 0000000000000..e1c001927389b --- /dev/null +++ b/docs/changelog/83310.yaml @@ -0,0 +1,5 @@ +pr: 83310 +summary: "TSDB: routingPath object type check improvement" +area: TSDB +type: enhancement +issues: [] diff --git a/docs/changelog/83325.yaml b/docs/changelog/83325.yaml new file mode 100644 index 0000000000000..2e18d8378490d --- /dev/null +++ b/docs/changelog/83325.yaml @@ -0,0 +1,5 @@ +pr: 83325 +summary: Add elastic/enterprise-search-server service account +area: Authorization +type: enhancement +issues: [] diff --git a/docs/changelog/83381.yaml b/docs/changelog/83381.yaml new file mode 100644 index 0000000000000..73066155ac6ba --- /dev/null +++ b/docs/changelog/83381.yaml @@ -0,0 +1,7 @@ +pr: 83381 +summary: Replace scroll cursors with point-in-time and `search_after` +area: SQL +type: enhancement +issues: + - 61873 + - 80523 diff --git a/docs/changelog/83445.yaml b/docs/changelog/83445.yaml new file mode 100644 index 0000000000000..8e6fcbfe6e2cf --- /dev/null +++ b/docs/changelog/83445.yaml @@ -0,0 +1,5 @@ +pr: 83445 +summary: Repository integrity health indicator services +area: Health +type: enhancement +issues: [] diff --git a/docs/changelog/83456.yaml b/docs/changelog/83456.yaml new file mode 100644 index 0000000000000..dc138f1a98d10 --- /dev/null +++ b/docs/changelog/83456.yaml @@ -0,0 +1,5 @@ +pr: 83456 +summary: Retry anomaly detection job recovery during relocation +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/83494.yaml b/docs/changelog/83494.yaml new file mode 100644 index 0000000000000..e933ad666860e --- /dev/null +++ b/docs/changelog/83494.yaml @@ -0,0 +1,7 @@ +pr: 83494 +summary: Group field caps response by index mapping hash +area: Search +type: enhancement +issues: + - 78665 + - 82879 diff --git a/docs/changelog/83524.yaml b/docs/changelog/83524.yaml new file mode 100644 index 0000000000000..9dc7e8b0b487b --- /dev/null +++ b/docs/changelog/83524.yaml @@ -0,0 +1,6 @@ +pr: 83524 +summary: Tolerate empty types array in Watch definitions +area: Watcher +type: bug +issues: + - 83235 diff --git a/docs/changelog/83550.yaml b/docs/changelog/83550.yaml new file mode 100644 index 0000000000000..51ab72f642fe6 --- /dev/null +++ b/docs/changelog/83550.yaml @@ -0,0 +1,5 @@ +pr: 83550 +summary: "Script: Fields API for Dense Vector" +area: Infra/Scripting +type: enhancement +issues: [] diff --git a/docs/changelog/83570.yaml b/docs/changelog/83570.yaml new file mode 100644 index 0000000000000..80cfa41adf495 --- /dev/null +++ b/docs/changelog/83570.yaml @@ -0,0 +1,5 @@ +pr: 83570 +summary: User Profile - Update APIs to work with domain +area: Security +type: enhancement +issues: [] diff --git a/docs/changelog/83591.yaml b/docs/changelog/83591.yaml new file mode 100644 index 0000000000000..36200918bf366 --- /dev/null +++ b/docs/changelog/83591.yaml @@ -0,0 +1,5 @@ +pr: 83591 +summary: Compress Cursors +area: SQL +type: enhancement +issues: [] diff --git a/docs/changelog/83595.yaml b/docs/changelog/83595.yaml new file mode 100644 index 0000000000000..978583755249b --- /dev/null +++ b/docs/changelog/83595.yaml @@ -0,0 +1,6 @@ +pr: 83595 +summary: Completion field to support multiple completion multi-fields +area: Mapping +type: bug +issues: + - 83534 diff --git a/docs/changelog/83636.yaml b/docs/changelog/83636.yaml new file mode 100644 index 0000000000000..5ca5352e12422 --- /dev/null +++ b/docs/changelog/83636.yaml @@ -0,0 +1,6 @@ +pr: 83636 +summary: Add filtering to fieldcaps endpoint +area: Search +type: enhancement +issues: + - 82966 diff --git a/docs/changelog/83637.yaml b/docs/changelog/83637.yaml new file mode 100644 index 0000000000000..3ebd982bf7178 --- /dev/null +++ b/docs/changelog/83637.yaml @@ -0,0 +1,5 @@ +pr: 83637 +summary: "Fix: ip prefix bucket reduction" +area: Aggregations +type: bug +issues: [] diff --git a/docs/changelog/83661.yaml b/docs/changelog/83661.yaml new file mode 100644 index 0000000000000..e062bdc805e3f --- /dev/null +++ b/docs/changelog/83661.yaml @@ -0,0 +1,6 @@ +pr: 83661 +summary: Add log4j-slf4j-impl to repository-azure +area: "Packaging" +type: bug +issues: + - 83652 diff --git a/docs/changelog/83693.yaml b/docs/changelog/83693.yaml new file mode 100644 index 0000000000000..f6b64ea3b2609 --- /dev/null +++ b/docs/changelog/83693.yaml @@ -0,0 +1,6 @@ +pr: 83693 +summary: Avoid eagerly loading `StoredFieldsReader` in fetch phase +area: Search +type: bug +issues: + - 82777 diff --git a/docs/changelog/83704.yaml b/docs/changelog/83704.yaml new file mode 100644 index 0000000000000..9138548d63fdf --- /dev/null +++ b/docs/changelog/83704.yaml @@ -0,0 +1,5 @@ +pr: 83704 +summary: Speed up merging field-caps response +area: Search +type: enhancement +issues: [] diff --git a/docs/changelog/83715.yaml b/docs/changelog/83715.yaml new file mode 100644 index 0000000000000..5fab17dbe2be8 --- /dev/null +++ b/docs/changelog/83715.yaml @@ -0,0 +1,5 @@ +pr: 83715 +summary: "Fix: backward compatibility with version 7.17.0" +area: Aggregations +type: bug +issues: [] diff --git a/docs/changelog/83728.yaml b/docs/changelog/83728.yaml new file mode 100644 index 0000000000000..c8f78a5497e1d --- /dev/null +++ b/docs/changelog/83728.yaml @@ -0,0 +1,5 @@ +pr: 83728 +summary: Security global privilege for updating profile data of applications +area: Authorization +type: enhancement +issues: [] diff --git a/docs/changelog/83738.yaml b/docs/changelog/83738.yaml new file mode 100644 index 0000000000000..e4d661bf08284 --- /dev/null +++ b/docs/changelog/83738.yaml @@ -0,0 +1,6 @@ +pr: 83738 +summary: Check the utf8 length of keyword field is not bigger than 32766 in ES, rather than in Lucene. +area: Mapping +type: enhancement +issues: + - 80865 diff --git a/docs/changelog/83760.yaml b/docs/changelog/83760.yaml new file mode 100644 index 0000000000000..f4249f9c04771 --- /dev/null +++ b/docs/changelog/83760.yaml @@ -0,0 +1,5 @@ +pr: 83760 +summary: Batch open-indices cluster state updates +area: Indices APIs +type: enhancement +issues: [] diff --git a/docs/changelog/83764.yaml b/docs/changelog/83764.yaml new file mode 100644 index 0000000000000..83d8aa024bba3 --- /dev/null +++ b/docs/changelog/83764.yaml @@ -0,0 +1,5 @@ +pr: 83764 +summary: Short circuit date patterns after first match +area: Ingest +type: bug +issues: [] diff --git a/docs/changelog/83785.yaml b/docs/changelog/83785.yaml new file mode 100644 index 0000000000000..db6795c82e93e --- /dev/null +++ b/docs/changelog/83785.yaml @@ -0,0 +1,6 @@ +pr: 83785 +summary: '[GCE Discovery] Correcly handle large zones with 500 or more instances' +area: Distributed +type: bug +issues: + - 83783 diff --git a/docs/changelog/83795.yaml b/docs/changelog/83795.yaml new file mode 100644 index 0000000000000..af5a670918a7a --- /dev/null +++ b/docs/changelog/83795.yaml @@ -0,0 +1,6 @@ +pr: 83795 +summary: Add leniency option to SQL CLI +area: SQL +type: enhancement +issues: + - 67436 diff --git a/docs/changelog/83802.yaml b/docs/changelog/83802.yaml new file mode 100644 index 0000000000000..c93e0a1c98db6 --- /dev/null +++ b/docs/changelog/83802.yaml @@ -0,0 +1,5 @@ +pr: 83802 +summary: Add ML memory stats API +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/83896.yaml b/docs/changelog/83896.yaml new file mode 100644 index 0000000000000..3344003b17a38 --- /dev/null +++ b/docs/changelog/83896.yaml @@ -0,0 +1,6 @@ +pr: 83896 +summary: Update system index mappings if `_meta` is null +area: Infra/Core +type: bug +issues: + - 83890 diff --git a/docs/changelog/83913.yaml b/docs/changelog/83913.yaml new file mode 100644 index 0000000000000..65bf661793383 --- /dev/null +++ b/docs/changelog/83913.yaml @@ -0,0 +1,6 @@ +pr: 83913 +summary: Improve BWC for persisted authentication headers +area: Authentication +type: enhancement +issues: + - 83567 diff --git a/docs/changelog/83918.yaml b/docs/changelog/83918.yaml new file mode 100644 index 0000000000000..941d0c6aea30a --- /dev/null +++ b/docs/changelog/83918.yaml @@ -0,0 +1,6 @@ +pr: 83918 +summary: Always re-run Feature migrations which have encountered errors +area: Infra/Core +type: bug +issues: + - 83917 diff --git a/docs/changelog/83920.yaml b/docs/changelog/83920.yaml new file mode 100644 index 0000000000000..95bfc2536fc84 --- /dev/null +++ b/docs/changelog/83920.yaml @@ -0,0 +1,5 @@ +pr: 83920 +summary: "TSDB: Reject the nested object fields that are configured time_series_dimension" +area: TSDB +type: enhancement +issues: [] diff --git a/docs/changelog/84000.yaml b/docs/changelog/84000.yaml new file mode 100644 index 0000000000000..b24d357834e21 --- /dev/null +++ b/docs/changelog/84000.yaml @@ -0,0 +1,5 @@ +pr: 84000 +summary: Fix `GeoIpDownloader` startup during rolling upgrade +area: Ingest +type: bug +issues: [] diff --git a/docs/changelog/84034.yaml b/docs/changelog/84034.yaml new file mode 100644 index 0000000000000..30065232231ef --- /dev/null +++ b/docs/changelog/84034.yaml @@ -0,0 +1,5 @@ +pr: 84034 +summary: Use static empty store files metadata +area: Allocation +type: enhancement +issues: [] diff --git a/docs/changelog/84038.yaml b/docs/changelog/84038.yaml new file mode 100644 index 0000000000000..c4f07f6d3aefa --- /dev/null +++ b/docs/changelog/84038.yaml @@ -0,0 +1,6 @@ +pr: 84038 +summary: Preserve context in `ResultDeduplicator` +area: Infra/Core +type: bug +issues: + - 84036 diff --git a/docs/changelog/84051.yaml b/docs/changelog/84051.yaml new file mode 100644 index 0000000000000..726252127bccc --- /dev/null +++ b/docs/changelog/84051.yaml @@ -0,0 +1,6 @@ +pr: 84051 +summary: '`GeometryNormalizer` should not fail if it cannot compute signed area' +area: Geo +type: bug +issues: + - 83946 diff --git a/docs/changelog/84089.yaml b/docs/changelog/84089.yaml new file mode 100644 index 0000000000000..5404d331ebd3c --- /dev/null +++ b/docs/changelog/84089.yaml @@ -0,0 +1,6 @@ +pr: 84089 +summary: Preserve context in `snapshotDeletionListeners` +area: Snapshot/Restore +type: bug +issues: + - 84036 diff --git a/docs/painless/painless-contexts/painless-watcher-context-variables.asciidoc b/docs/painless/painless-contexts/painless-watcher-context-variables.asciidoc index 0a21ae1fd2bdc..c1ef342100143 100644 --- a/docs/painless/painless-contexts/painless-watcher-context-variables.asciidoc +++ b/docs/painless/painless-contexts/painless-watcher-context-variables.asciidoc @@ -9,7 +9,7 @@ The following variables are available in all watcher contexts. The id of the watch. `ctx['id']` (`String`, read-only):: - The server generated unique identifer for the run watch. + The server generated unique identifier for the run watch. `ctx['metadata']` (`Map`, read-only):: Metadata can be added to the top level of the watch definition. This diff --git a/docs/plugins/repository.asciidoc b/docs/plugins/repository.asciidoc index 61716e5f6a059..7355231141791 100644 --- a/docs/plugins/repository.asciidoc +++ b/docs/plugins/repository.asciidoc @@ -6,7 +6,7 @@ functionality in Elasticsearch by adding repositories backed by the cloud or by distributed file systems: [discrete] -==== Offical repository plugins +==== Official repository plugins NOTE: Support for S3, GCS and Azure repositories is now bundled in {es} by default. diff --git a/docs/reference/aggregations/metrics/max-aggregation.asciidoc b/docs/reference/aggregations/metrics/max-aggregation.asciidoc index 2435f800bb6da..13855a2285249 100644 --- a/docs/reference/aggregations/metrics/max-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/max-aggregation.asciidoc @@ -137,8 +137,8 @@ PUT metrics_index/_doc/1?refresh { "network.name" : "net-1", "latency_histo" : { - "values" : [0.1, 0.2, 0.3, 0.4, 0.5], <1> - "counts" : [3, 7, 23, 12, 6] <2> + "values" : [0.1, 0.2, 0.3, 0.4, 0.5], + "counts" : [3, 7, 23, 12, 6] } } @@ -146,8 +146,8 @@ PUT metrics_index/_doc/2?refresh { "network.name" : "net-2", "latency_histo" : { - "values" : [0.1, 0.2, 0.3, 0.4, 0.5], <1> - "counts" : [8, 17, 8, 7, 6] <2> + "values" : [0.1, 0.2, 0.3, 0.4, 0.5], + "counts" : [8, 17, 8, 7, 6] } } diff --git a/docs/reference/aggregations/metrics/min-aggregation.asciidoc b/docs/reference/aggregations/metrics/min-aggregation.asciidoc index d4c3135cc576e..d7bf8b478f563 100644 --- a/docs/reference/aggregations/metrics/min-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/min-aggregation.asciidoc @@ -137,8 +137,8 @@ PUT metrics_index/_doc/1?refresh { "network.name" : "net-1", "latency_histo" : { - "values" : [0.1, 0.2, 0.3, 0.4, 0.5], <1> - "counts" : [3, 7, 23, 12, 6] <2> + "values" : [0.1, 0.2, 0.3, 0.4, 0.5], + "counts" : [3, 7, 23, 12, 6] } } @@ -146,8 +146,8 @@ PUT metrics_index/_doc/2?refresh { "network.name" : "net-2", "latency_histo" : { - "values" : [0.1, 0.2, 0.3, 0.4, 0.5], <1> - "counts" : [8, 17, 8, 7, 6] <2> + "values" : [0.1, 0.2, 0.3, 0.4, 0.5], + "counts" : [8, 17, 8, 7, 6] } } diff --git a/docs/reference/aggregations/pipeline/bucket-correlation-aggregation.asciidoc b/docs/reference/aggregations/pipeline/bucket-correlation-aggregation.asciidoc index 55b14b5d9adc2..841632124805f 100644 --- a/docs/reference/aggregations/pipeline/bucket-correlation-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/bucket-correlation-aggregation.asciidoc @@ -31,21 +31,29 @@ The correlation function to execute. The configuration to calculate a count correlation. This function is designed for determining the correlation of a term value and a given metric. Consequently, it needs to meet the following requirements. ++ +-- * The `buckets_path` must point to a `_count` metric. * The total count of all the `bucket_path` count values must be less than or equal to `indicator.doc_count`. * When utilizing this function, an initial calculation to gather the required `indicator` values is required. - +-- ++ .Properties of `count_correlation` [%collapsible%open] ===== `indicator`::: (Required, object) The indicator with which to correlate the configured `bucket_path` values. - ++ .Properties of `indicator` [%collapsible%open] -===== +====== +`doc_count`::: +(Required, integer) +The total number of documents that initially created the `expectations`. It's required to be greater than or equal to the sum +of all values in the `buckets_path` as this is the originating superset of data to which the term values are correlated. + `expectations`::: (Required, array) An array of numbers with which to correlate the configured `bucket_path` values. The length of this value must always equal @@ -55,12 +63,7 @@ the number of buckets returned by the `bucket_path`. (Optional, array) An array of fractions to use when averaging and calculating variance. This should be used if the pre-calculated data and the `buckets_path` have known gaps. The length of `fractions`, if provided, must equal `expectations`. - -`doc_count`::: -(Required, integer) -The total number of documents that initially created the `expectations`. It's required to be greater than or equal to the sum -of all values in the `buckets_path` as this is the originating superset of data to which the term values are correlated. -===== +====== ===== ==== @@ -75,8 +78,10 @@ A `bucket_correlation` aggregation looks like this in isolation: "buckets_path": "range_values>_count", <1> "function": { "count_correlation": { <2> - "expectations": [...], - "doc_count": 10000 + "indicator": { + "expectations": [...], + "doc_count": 10000 + } } } } diff --git a/docs/reference/analysis.asciidoc b/docs/reference/analysis.asciidoc index 3db1a89bbe847..72ab42d22b911 100644 --- a/docs/reference/analysis.asciidoc +++ b/docs/reference/analysis.asciidoc @@ -1,7 +1,7 @@ [[analysis]] = Text analysis -:lucene-analysis-docs: https://lucene.apache.org/core/{lucene_version_path}/analyzers-common/org/apache/lucene/analysis +:lucene-analysis-docs: https://lucene.apache.org/core/{lucene_version_path}/analysis/common/org/apache/lucene/analysis :lucene-gh-main-link: https://github.com/apache/lucene/blob/main/lucene :lucene-stop-word-link: {lucene-gh-main-link}/analysis/common/src/resources/org/apache/lucene/analysis diff --git a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc index 92c293795a3d2..e685c17f96865 100644 --- a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc @@ -366,7 +366,7 @@ The regex above is easier to understand as: [discrete] === Definition -The `pattern` anlayzer consists of: +The `pattern` analyzer consists of: Tokenizer:: * <> diff --git a/docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc index b90350e2bbd13..9a20131a227e7 100644 --- a/docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc @@ -44,7 +44,7 @@ The filter produces the following tokens. The API response contains the position and offsets of each output token. Note the `predicate_token_filter` filter does not change the tokens' original -positions or offets. +positions or offsets. .*Response* [%collapsible] diff --git a/docs/reference/cat/trainedmodel.asciidoc b/docs/reference/cat/trainedmodel.asciidoc index a092b8a207bb2..6f305be845fc4 100644 --- a/docs/reference/cat/trainedmodel.asciidoc +++ b/docs/reference/cat/trainedmodel.asciidoc @@ -72,7 +72,7 @@ The estimated heap size to keep the trained model in memory. `id`::: (Default) -Idetifier for the trained model. +Identifier for the trained model. `ingest.count`, `ic`, `ingestCount`::: The total number of documents that are processed by the model. diff --git a/docs/reference/ccr/index.asciidoc b/docs/reference/ccr/index.asciidoc index c39b875446598..f42f0d0f51bf0 100644 --- a/docs/reference/ccr/index.asciidoc +++ b/docs/reference/ccr/index.asciidoc @@ -216,19 +216,11 @@ To manage how operations are replicated from the leader index, you can configure settings when <>. -The follower index automatically retrieves some updates applied to the leader -index, while other updates are retrieved as needed: - -[cols="3"] -|=== -h| Update type h| Automatic h| As needed -| Alias | {yes-icon} | {no-icon} -| Mapping | {no-icon} | {yes-icon} -| Settings | {no-icon} | {yes-icon} -|=== - -For example, changing the number of replicas on the leader index is not -replicated by the follower index, so that setting might not be retrieved. +Changes in the index mapping on the leader index are replicated to the +follower index as soon as possible. This behavior is true for index +settings as well, except for some settings that are local to the leader +index. For example, changing the number of replicas on the leader index is +not replicated by the follower index, so that setting might not be retrieved. If you apply a non-dynamic settings change to the leader index that is needed by the follower index, the follower index closes itself, applies the diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc index b59b7e34689ec..8ea35d268134d 100644 --- a/docs/reference/cluster/stats.asciidoc +++ b/docs/reference/cluster/stats.asciidoc @@ -1096,7 +1096,7 @@ Total size of all file stores across all selected nodes. `total_in_bytes`:: (integer) -Total size, in bytes, of all file stores across all seleced nodes. +Total size, in bytes, of all file stores across all selected nodes. `free`:: (<>) diff --git a/docs/reference/commands/index.asciidoc b/docs/reference/commands/index.asciidoc index 3aa564d1a4017..e55cd89bd6af4 100644 --- a/docs/reference/commands/index.asciidoc +++ b/docs/reference/commands/index.asciidoc @@ -13,6 +13,7 @@ tasks from the command line: * <> * <> * <> +* <> * <> * <> * <> @@ -28,6 +29,7 @@ include::create-enrollment-token.asciidoc[] include::croneval.asciidoc[] include::keystore.asciidoc[] include::node-tool.asciidoc[] +include::reconfigure-node.asciidoc[] include::reset-password.asciidoc[] include::saml-metadata.asciidoc[] include::service-tokens-command.asciidoc[] diff --git a/docs/reference/commands/keystore.asciidoc b/docs/reference/commands/keystore.asciidoc index c56b7dbc6a64e..a8e542cb8e52d 100644 --- a/docs/reference/commands/keystore.asciidoc +++ b/docs/reference/commands/keystore.asciidoc @@ -218,7 +218,7 @@ password. [[show-keystore-value]] ==== Show settings in the keystore -To display the value of a setting in the keystorem use the `show` command: +To display the value of a setting in the keystore use the `show` command: [source,sh] ---------------------------------------------------------------- diff --git a/docs/reference/commands/reconfigure-node.asciidoc b/docs/reference/commands/reconfigure-node.asciidoc new file mode 100644 index 0000000000000..f06aee9f94db8 --- /dev/null +++ b/docs/reference/commands/reconfigure-node.asciidoc @@ -0,0 +1,64 @@ +[[reconfigure-node]] +== elasticsearch-reconfigure-node + +The `elasticsearch-reconfigure-node` tool reconfigures an {es} node that was installed +through an RPM or DEB package to join an existing cluster with security features enabled. + +[discrete] +=== Synopsis + +[source,shell] +----------------------------------------------------- +bin/elasticsearch-reconfigure-node +[--enrollment-token] [-h, --help] [-E ] +[-s, --silent] [-v, --verbose] +----------------------------------------------------- + + +[discrete] +=== Description + +When installing {es} with a DEB or RPM package, the current node is assumed to +be the first node in the cluster. {es} enables and configures security +features on the node, generates a password for the `elastic` superuser, and +configures TLS for the HTTP and transport layers. + +Rather than form a single-node cluster, you can add a node to an existing +cluster where security features are already enabled and configured. Before +starting your new node, run the +<> tool +with the `-s node` option to generate an enrollment token on any node in your +existing cluster. On your new node, run the the +`elasticsearch-reconfigure-node` tool and pass the enrollment token as a +parameter. + +NOTE: This tool is intended only for use on DEB or RPM distributions of {es}. + +You must run this tool with `sudo` so that it can edit the necessary +files in your {es} installation configuration directory that are owned by +`root:elasticsearch`. + + +[discrete] +[[reconfigure-node-parameters]] +=== Parameters + +`--enrollment-token`:: The enrollment token, which can be generated on any of the +nodes in an existing, secured cluster. + +`-E `:: Configures a standard {es} or {xpack} setting. + +`-h, --help`:: Shows help information. + +`-s, --silent`:: Shows minimal output. + +`-v, --verbose`:: Shows verbose output. + +[discrete] +=== Examples + +The following example reconfigures an installed {es} node so that it can join an existing cluster when it starts for the first time. +[source,shell] +---- +sudo /usr/share/elasticsearch/elasticsearch-reconfigure-node --enrollment-token eyJ2ZXIiOiI4LjAuMCIsImFkciI6WyIxOTIuMTY4LjEuMTY6OTIwMCJdLCJmZ3IiOiI4NGVhYzkyMzAyMWQ1MjcyMmQxNTFhMTQwZmM2ODI5NmE5OWNiNmU0OGVhZjYwYWMxYzljM2I3ZDJjOTg2YTk3Iiwia2V5IjoiUy0yUjFINEJrNlFTMkNEY1dVV1g6QS0wSmJxM3hTRy1haWxoQTdPWVduZyJ9 +---- diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index 8a06f641d0dd2..0b8ecdb3bd9f7 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -351,14 +351,6 @@ The bulk API's response contains the individual results of each operation in the request, returned in the order submitted. The success or failure of an individual operation does not affect other operations in the request. -[[bulk-partial-responses]] -.Partial responses -**** -To ensure fast responses, the bulk API will respond with partial results if one -or more shards fail. See <> for more -information. -**** - `took`:: (integer) How long, in milliseconds, it took to process the bulk request. diff --git a/docs/reference/docs/data-replication.asciidoc b/docs/reference/docs/data-replication.asciidoc index d9cc3c6ab0224..9503b6b6bb29d 100644 --- a/docs/reference/docs/data-replication.asciidoc +++ b/docs/reference/docs/data-replication.asciidoc @@ -126,7 +126,6 @@ respond with partial results if one or more shards fail: * <> * <> -* <> * <> Responses containing partial results still provide a `200 OK` HTTP status code. diff --git a/docs/reference/eql/eql-search-api.asciidoc b/docs/reference/eql/eql-search-api.asciidoc index 29c43271f851d..93084cad2f9e3 100644 --- a/docs/reference/eql/eql-search-api.asciidoc +++ b/docs/reference/eql/eql-search-api.asciidoc @@ -109,8 +109,8 @@ Comma-separated list of filters for the API response. See <>. `ignore_unavailable`:: -(Optional, Boolean) If `true`, missing or closed indices are not included in the -response. Defaults to `true`. +(Optional, Boolean) If `false`, the request returns an error if it targets a +missing or closed index. Defaults to `true`. `keep_alive`:: + diff --git a/docs/reference/graph/explore.asciidoc b/docs/reference/graph/explore.asciidoc index 62d50ed23ecea..6536653ae4cd6 100644 --- a/docs/reference/graph/explore.asciidoc +++ b/docs/reference/graph/explore.asciidoc @@ -84,7 +84,7 @@ graph as vertices. For example: field::: Identifies a field in the documents of interest. include::: Identifies the terms of interest that form the starting points from which you want to spider out. You do not have to specify a seed query -if you specify an include clause. The include clause implicitly querys for +if you specify an include clause. The include clause implicitly queries for documents that contain any of the listed terms listed. In addition to specifying a simple array of strings, you can also pass objects with `term` and `boost` values to boost matches on particular terms. diff --git a/docs/reference/how-to/fix-common-cluster-issues.asciidoc b/docs/reference/how-to/fix-common-cluster-issues.asciidoc index cc778ea0d79e4..28d79f63761eb 100644 --- a/docs/reference/how-to/fix-common-cluster-issues.asciidoc +++ b/docs/reference/how-to/fix-common-cluster-issues.asciidoc @@ -692,9 +692,9 @@ see the number of active threads in each thread pool and how many tasks are queued, how many have been rejected, and how many have completed. [source,console] ---- +---- GET /_cat/thread_pool?v&s=t,n&h=type,name,node_name,active,queue,rejected,completed ---- +---- **Inspect the hot threads on each node** @@ -704,9 +704,9 @@ to determine if the thread has sufficient resources to progress and gauge how quickly it is progressing. [source,console] ---- +---- GET /_nodes/hot_threads ---- +---- **Look for long running tasks** diff --git a/docs/reference/how-to/recipes/scoring.asciidoc b/docs/reference/how-to/recipes/scoring.asciidoc index 47a3622aabf1f..5c5a8977d34d4 100644 --- a/docs/reference/how-to/recipes/scoring.asciidoc +++ b/docs/reference/how-to/recipes/scoring.asciidoc @@ -192,7 +192,7 @@ While both options would return similar scores, there are trade-offs: <> provides a lot of flexibility, enabling you to combine the text relevance score with static signals as you prefer. On the other hand, the <> only -exposes a couple ways to incorporate static signails into the score. However, +exposes a couple ways to incorporate static signals into the score. However, it relies on the <> and <> fields, which index values in a special way that allows the <> to skip diff --git a/docs/reference/images/index-mgmt/management_index_details.png b/docs/reference/images/index-mgmt/management_index_details.png index 13d607b061d4a..79568818aa4ec 100644 Binary files a/docs/reference/images/index-mgmt/management_index_details.png and b/docs/reference/images/index-mgmt/management_index_details.png differ diff --git a/docs/reference/mapping/types/geo-shape.asciidoc b/docs/reference/mapping/types/geo-shape.asciidoc index 2565ae5f320ff..0eee58a1a2f90 100644 --- a/docs/reference/mapping/types/geo-shape.asciidoc +++ b/docs/reference/mapping/types/geo-shape.asciidoc @@ -5,7 +5,7 @@ ++++ The `geo_shape` data type facilitates the indexing of and searching -with arbitrary geo shapes such as rectangles and polygons. It should be +with arbitrary geoshapes such as rectangles and polygons. It should be used when either the data being indexed or the queries being executed contain shapes other than just points. @@ -26,7 +26,7 @@ type. |`orientation` a|Optional. Default <> for the field's -polygons. +WKT polygons. This parameter sets and returns only a `RIGHT` (counterclockwise) or `LEFT` (clockwise) value. However, you can specify either value in multiple ways. @@ -66,7 +66,7 @@ and reject the whole document. [[geoshape-indexing-approach]] [discrete] ==== Indexing approach -GeoShape types are indexed by decomposing the shape into a triangular mesh and +Geoshape types are indexed by decomposing the shape into a triangular mesh and indexing each triangle as a 7 dimension point in a BKD tree. This provides near perfect spatial resolution (down to 1e-7 decimal degree precision) since all spatial relations are computed using an encoded vector representation of the @@ -144,7 +144,7 @@ API. The following is an example of a point in GeoJSON. POST /example/_doc { "location" : { - "type" : "point", + "type" : "Point", "coordinates" : [-77.03653, 38.897676] } } @@ -164,23 +164,23 @@ POST /example/_doc [[geo-linestring]] ===== http://geojson.org/geojson-spec.html#id3[LineString] -A `linestring` defined by an array of two or more positions. By -specifying only two points, the `linestring` will represent a straight +A linestring defined by an array of two or more positions. By +specifying only two points, the linestring will represent a straight line. Specifying more than two points creates an arbitrary path. The -following is an example of a LineString in GeoJSON. +following is an example of a linestring in GeoJSON. [source,console] -------------------------------------------------- POST /example/_doc { "location" : { - "type" : "linestring", + "type" : "LineString", "coordinates" : [[-77.03653, 38.897676], [-77.009051, 38.889939]] } } -------------------------------------------------- -The following is an example of a LineString in WKT: +The following is an example of a linestring in WKT: [source,console] -------------------------------------------------- @@ -190,7 +190,7 @@ POST /example/_doc } -------------------------------------------------- -The above `linestring` would draw a straight line starting at the White +The above linestring would draw a straight line starting at the White House to the US Capitol Building. [discrete] @@ -199,14 +199,14 @@ House to the US Capitol Building. A polygon is defined by a list of a list of points. The first and last points in each (outer) list must be the same (the polygon must be -closed). The following is an example of a Polygon in GeoJSON. +closed). The following is an example of a polygon in GeoJSON. [source,console] -------------------------------------------------- POST /example/_doc { "location" : { - "type" : "polygon", + "type" : "Polygon", "coordinates" : [ [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ] ] @@ -214,7 +214,7 @@ POST /example/_doc } -------------------------------------------------- -The following is an example of a Polygon in WKT: +The following is an example of a polygon in WKT: [source,console] -------------------------------------------------- @@ -233,7 +233,7 @@ of a polygon with a hole: POST /example/_doc { "location" : { - "type" : "polygon", + "type" : "Polygon", "coordinates" : [ [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ], [ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2] ] @@ -242,7 +242,7 @@ POST /example/_doc } -------------------------------------------------- -The following is an example of a Polygon with a hole in WKT: +The following is an example of a polygon with a hole in WKT: [source,console] -------------------------------------------------- @@ -257,22 +257,29 @@ POST /example/_doc ===== Polygon orientation A polygon's orientation indicates the order of its vertices: `RIGHT` -(counterclockwise) or `LEFT` (clockwise). +(counterclockwise) or `LEFT` (clockwise). {es} uses a polygon’s orientation to +determine if it crosses the international dateline (+/-180° longitude). -You can set a default orientation for a `geo_shape` field using the -<>. You can override -this default for specific polygons using the document-level `orientation` -parameter. +You can set a default orientation for WKT polygons using the +<>. This is because +the WKT specification doesn't specify or enforce a default orientation. -For example, the following indexing request specifies a document-level -`orientation` of `LEFT`. +GeoJSON polygons use a default orientation of `RIGHT`, regardless of +`orientation` mapping parameter's value. This is because the +https://tools.ietf.org/html/rfc7946#section-3.1.6[GeoJSON specification] +mandates that an outer polygon use a counterclockwise orientation and interior +shapes use a clockwise orientation. + +You can override the default orientation for GeoJSON polygons using the +document-level `orientation` parameter. For example, the following indexing +request specifies a document-level `orientation` of `LEFT`. [source,console] ---- POST /example/_doc { "location" : { - "type" : "polygon", + "type" : "Polygon", "orientation" : "LEFT", "coordinates" : [ [ [-177.0, 10.0], [176.0, 15.0], [172.0, 0.0], [176.0, -15.0], [-177.0, -10.0], [-177.0, 10.0] ] @@ -282,15 +289,15 @@ POST /example/_doc ---- {es} only uses a polygon’s orientation to determine if it crosses the -international dateline (+/-180° longitude). If the difference between a -polygon’s minimum longitude and the maximum longitude is less than 180°, the -polygon doesn't cross the dateline and its orientation has no effect. +international dateline. If the difference between a polygon’s minimum longitude +and the maximum longitude is less than 180°, the polygon doesn't cross the +dateline and its orientation has no effect. If the difference between a polygon’s minimum longitude and the maximum longitude is 180° or greater, {es} checks whether the polygon's document-level -`orientation` differs from the default in the `orientation` mapping parameter. -If the orientation differs, {es} considers the polygon to cross the -international dateline and splits the polygon at the dateline. +`orientation` differs from the default orientation. If the orientation differs, +{es} considers the polygon to cross the international dateline and splits the +polygon at the dateline. [discrete] [[geo-multipoint]] @@ -303,7 +310,7 @@ The following is an example of a list of GeoJSON points: POST /example/_doc { "location" : { - "type" : "multipoint", + "type" : "MultiPoint", "coordinates" : [ [102.0, 2.0], [103.0, 2.0] ] @@ -332,7 +339,7 @@ The following is an example of a list of GeoJSON linestrings: POST /example/_doc { "location" : { - "type" : "multilinestring", + "type" : "MultiLineString", "coordinates" : [ [ [102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0] ], [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0] ], @@ -363,7 +370,7 @@ The following is an example of a list of GeoJSON polygons (second polygon contai POST /example/_doc { "location" : { - "type" : "multipolygon", + "type" : "MultiPolygon", "coordinates" : [ [ [[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]] ], [ [[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]], @@ -394,14 +401,14 @@ The following is an example of a collection of GeoJSON geometry objects: POST /example/_doc { "location" : { - "type": "geometrycollection", + "type": "GeometryCollection", "geometries": [ { - "type": "point", + "type": "Point", "coordinates": [100.0, 0.0] }, { - "type": "linestring", + "type": "LineString", "coordinates": [ [101.0, 0.0], [102.0, 1.0] ] } ] diff --git a/docs/reference/migration/apis/deprecation.asciidoc b/docs/reference/migration/apis/deprecation.asciidoc index e3ebd57263c2d..fd82bb3e0e6d2 100644 --- a/docs/reference/migration/apis/deprecation.asciidoc +++ b/docs/reference/migration/apis/deprecation.asciidoc @@ -5,14 +5,11 @@ Deprecation info ++++ -IMPORTANT: Use this API to check for deprecated configuration before performing -a major version upgrade. You should run it on the last minor version of the -major version you are upgrading from, as earlier minor versions may not include -all deprecations. +include::{es-repo-dir}/migration/apis/shared-migration-apis-tip.asciidoc[] The deprecation API is to be used to retrieve information about different cluster, node, and index level settings that use deprecated features that will -be removed or changed in the next major version. +be removed or changed in a future version. [[migration-api-request]] ==== {api-request-title} @@ -118,7 +115,7 @@ issue. |======= |warning | You can upgrade directly, but you are using deprecated functionality -which will not be available or behave differently in the next major version. +which will not be available or behave differently in a future version. |critical | You cannot upgrade without fixing this problem. |======= diff --git a/docs/reference/migration/apis/feature_upgrade.asciidoc b/docs/reference/migration/apis/feature-migration.asciidoc similarity index 68% rename from docs/reference/migration/apis/feature_upgrade.asciidoc rename to docs/reference/migration/apis/feature-migration.asciidoc index 1f1fc5b2aa239..9cd904f42e084 100644 --- a/docs/reference/migration/apis/feature_upgrade.asciidoc +++ b/docs/reference/migration/apis/feature-migration.asciidoc @@ -1,35 +1,47 @@ [role="xpack"] -[[migration-api-feature-upgrade]] -=== Feature Upgrade APIs +[[feature-migration-api]] +=== Feature migration APIs ++++ -Feature upgrade APIs +Feature migration ++++ -IMPORTANT: Use this API to check for system features that need to be upgraded before -a major version upgrade. You should run it on the last minor version of the -major version you are upgrading from. +include::{es-repo-dir}/migration/apis/shared-migration-apis-tip.asciidoc[] -The feature upgrade APIs are to be used to retrieve information about system features -that have to be upgraded before a cluster can be migrated to the next major version number, -and to trigger an automated system upgrade that might potentially involve downtime for -{es} system features. +Version upgrades sometimes require changes to how features store configuration +information and data in system indices. The feature migration APIs enable you to +see what features require changes, initiate the automatic migration process, and +check migration status. -[[feature-upgrade-api-request]] +Some functionality might be temporarily unavailable during the migration +process. + +[[feature-migration-api-request]] ==== {api-request-title} `GET /migration/system_features` -[[feature-upgrade-api-prereqs]] +`POST /migration/system_features` + +[[feature-migration-api-prereqs]] ==== {api-prereq-title} * If the {es} {security-features} are enabled, you must have the `manage` <> to use this API. -[[feature-upgrade-api-example]] +[[feature-migration-api-desc]] +==== {api-description-title} + +Submit a GET request to the `_migration/system_features` endpoint to see what +features need to be migrated and the status of any migrations that are in +progress. + +Submit a POST request to the endpoint to start the migration process. + +[[feature-migration-api-example]] ==== {api-examples-title} -To see the list of system features needing upgrades, submit a GET request to the -`_migration/system_features` endpoint: +When you submit a GET request to the `_migration/system_features` endpoint, the +response indicates the status of any features that need to be migrated. [source,console] -------------------------------------------------- @@ -120,10 +132,10 @@ Example response: -------------------------------------------------- // TESTRESPONSE[s/"minimum_index_version" : "8.0.0"/"minimum_index_version" : $body.$_path/] -This response tells us that Elasticsearch security needs its internal -indices upgraded before we can upgrade the cluster to 8.0. -To perform the required upgrade, submit a POST request to the same endpoint. +When you submit a POST request to the `_migration/system_features` endpoint to +start the migration process, the response indicates what features will be +migrated. [source,console] -------------------------------------------------- @@ -138,13 +150,13 @@ Example response: "accepted" : true, "features" : [ { - "feature_name" : "security" + "feature_name" : "security" <1> } ] } -------------------------------------------------- // TESTRESPONSE[skip: can't actually upgrade system indices in these tests] -This tells us that the security index is being upgraded. To check the -overall status of the upgrade, call the endpoint with GET. +<1> {es} security will be migrated before the cluster is upgraded. +Subsequent GET requests will return the status of the migration process. diff --git a/docs/reference/migration/apis/shared-migration-apis-tip.asciidoc b/docs/reference/migration/apis/shared-migration-apis-tip.asciidoc new file mode 100644 index 0000000000000..6a606ac83354c --- /dev/null +++ b/docs/reference/migration/apis/shared-migration-apis-tip.asciidoc @@ -0,0 +1,4 @@ +TIP: These APIs are designed for indirect use by {kib}'s **Upgrade Assistant**. +We strongly recommend you use the **Upgrade Assistant** to upgrade from +{prev-major-last} to {version}. For upgrade instructions, refer to +{stack-ref}/upgrading-elastic-stack.html[Upgrading to Elastic {version}]. \ No newline at end of file diff --git a/docs/reference/migration/migrate_8_0.asciidoc b/docs/reference/migration/migrate_8_0.asciidoc index 14dc02b060d1d..9c7ba87a4759f 100644 --- a/docs/reference/migration/migrate_8_0.asciidoc +++ b/docs/reference/migration/migrate_8_0.asciidoc @@ -9,8 +9,6 @@ your application to {es} 8.0. See also <> and <>. -coming::[8.0.0] - [discrete] [[breaking-changes-8.0]] === Breaking changes diff --git a/docs/reference/migration/migrate_8_0/plugin-changes.asciidoc b/docs/reference/migration/migrate_8_0/plugin-changes.asciidoc index 4955cfc189504..63a625c89026d 100644 --- a/docs/reference/migration/migrate_8_0/plugin-changes.asciidoc +++ b/docs/reference/migration/migrate_8_0/plugin-changes.asciidoc @@ -13,7 +13,7 @@ TIP: {ess-skip-section} ==== *Details* + In previous versions of {es}, in order to register a snapshot repository -backed by Amazon S3, Google Cloud Storge (GCS) or Microsoft Azure Blob +backed by Amazon S3, Google Cloud Storage (GCS) or Microsoft Azure Blob Storage, you first had to install the corresponding Elasticsearch plugin, for example `repository-s3`. These plugins are now included in {es} by default. diff --git a/docs/reference/migration/migrate_8_0/rest-api-changes.asciidoc b/docs/reference/migration/migrate_8_0/rest-api-changes.asciidoc index dad6c988a6995..a3f7e256e7c4d 100644 --- a/docs/reference/migration/migrate_8_0/rest-api-changes.asciidoc +++ b/docs/reference/migration/migrate_8_0/rest-api-changes.asciidoc @@ -679,7 +679,7 @@ The {ml} {ref}/ml-post-data.html[post data to jobs API] is deprecated starting i and will be removed in a future major version. *Impact* + -Use {ref}/ml-apis.html#ml-api-datafeed-endpoint[{dfeeds}] instead. +Use {ref}/ml-ad-apis.html#ml-api-datafeed-endpoint[{dfeeds}] instead. ==== .The `job_id` property of the Update {dfeeds} API has been removed. diff --git a/docs/reference/migration/migrate_8_0/sql-jdbc-changes.asciidoc b/docs/reference/migration/migrate_8_0/sql-jdbc-changes.asciidoc index 4833f587d1896..0e14d5b4ec0af 100644 --- a/docs/reference/migration/migrate_8_0/sql-jdbc-changes.asciidoc +++ b/docs/reference/migration/migrate_8_0/sql-jdbc-changes.asciidoc @@ -12,7 +12,7 @@ *Details* + To reduce the dependency of the JDBC driver onto Elasticsearch classes, the JDBC driver returns geometry data as strings using the WKT (well-known text) format instead of classes from the `org.elasticsearch.geometry`. -Users can choose the geometry library desired to convert the string represantion into a full-blown objects +Users can choose the geometry library desired to convert the string representation into a full-blown objects either such as the `elasticsearch-geo` library (which returned the object `org.elasticsearch.geo` as before), jts or spatial4j. diff --git a/docs/reference/migration/migration.asciidoc b/docs/reference/migration/migration.asciidoc index 88c1631e30903..ffb2ca7a7859d 100644 --- a/docs/reference/migration/migration.asciidoc +++ b/docs/reference/migration/migration.asciidoc @@ -2,9 +2,12 @@ [[migration-api]] == Migration APIs -The migration APIs simplify upgrading {xpack} indices from one version to another. +The migration APIs power {kib}'s **Upgrade Assistant** feature. + +include::apis/shared-migration-apis-tip.asciidoc[] * <> +* <> include::apis/deprecation.asciidoc[] -include::apis/feature_upgrade.asciidoc[] +include::apis/feature-migration.asciidoc[] diff --git a/docs/reference/ml/anomaly-detection/apis/index.asciidoc b/docs/reference/ml/anomaly-detection/apis/index.asciidoc index 4603a7cd4aa04..3f8ab5a454b0a 100644 --- a/docs/reference/ml/anomaly-detection/apis/index.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/index.asciidoc @@ -1,4 +1,4 @@ -include::ml-apis.asciidoc[leveloffset=+1] +include::ml-ad-apis.asciidoc[leveloffset=+1] //ADD include::post-calendar-event.asciidoc[leveloffset=+2] include::put-calendar-job.asciidoc[leveloffset=+2] @@ -34,7 +34,6 @@ include::get-datafeed-stats.asciidoc[leveloffset=+2] include::get-influencer.asciidoc[leveloffset=+2] include::get-job.asciidoc[leveloffset=+2] include::get-job-stats.asciidoc[leveloffset=+2] -include::get-ml-info.asciidoc[leveloffset=+2] include::get-snapshot.asciidoc[leveloffset=+2] include::get-job-model-snapshot-upgrade-stats.asciidoc[leveloffset=+2] include::get-overall-buckets.asciidoc[leveloffset=+2] @@ -52,7 +51,6 @@ include::reset-job.asciidoc[leveloffset=+2] //REVERT include::revert-snapshot.asciidoc[leveloffset=+2] //SET/START/STOP -include::set-upgrade-mode.asciidoc[leveloffset=+2] include::start-datafeed.asciidoc[leveloffset=+2] include::stop-datafeed.asciidoc[leveloffset=+2] //UPDATE diff --git a/docs/reference/ml/anomaly-detection/apis/ml-apis.asciidoc b/docs/reference/ml/anomaly-detection/apis/ml-ad-apis.asciidoc similarity index 91% rename from docs/reference/ml/anomaly-detection/apis/ml-apis.asciidoc rename to docs/reference/ml/anomaly-detection/apis/ml-ad-apis.asciidoc index d44395b66046c..856232c933432 100644 --- a/docs/reference/ml/anomaly-detection/apis/ml-apis.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/ml-ad-apis.asciidoc @@ -1,10 +1,10 @@ [role="xpack"] -[[ml-apis]] +[[ml-ad-apis]] = {ml-cap} {anomaly-detect} APIs You can use the following APIs to perform {ml} {anomaly-detect} activities. -See also <>. +See also <>, <>, <>. [discrete] [[ml-api-anomaly-job-endpoint]] @@ -70,20 +70,9 @@ See also <>. * <> * <> -[discrete] -[[ml-api-ml-info-endpoint]] -== Info - -* <> - [discrete] [[ml-api-delete-expired-data-endpoint]] == Delete expired data * <> -[discrete] -[[ml-set-upgrade-mode-endpoint]] -== Set upgrade mode - -* <> diff --git a/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc b/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc index 3844d5fcd7aed..4bbf9d800e3da 100644 --- a/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc +++ b/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc @@ -330,7 +330,7 @@ formatting is based on the {kib} settings. The peak number of bytes of memory ever used by the model. ==== -==== _Data delay has occured_ +==== _Data delay has occurred_ `context.message`:: A preconstructed message for the rule. diff --git a/docs/reference/ml/anomaly-detection/apis/get-ml-info.asciidoc b/docs/reference/ml/common/apis/get-ml-info.asciidoc similarity index 100% rename from docs/reference/ml/anomaly-detection/apis/get-ml-info.asciidoc rename to docs/reference/ml/common/apis/get-ml-info.asciidoc diff --git a/docs/reference/ml/common/apis/get-ml-memory.asciidoc b/docs/reference/ml/common/apis/get-ml-memory.asciidoc new file mode 100644 index 0000000000000..2dd79a969aca0 --- /dev/null +++ b/docs/reference/ml/common/apis/get-ml-memory.asciidoc @@ -0,0 +1,310 @@ +[role="xpack"] +[[get-ml-memory]] += Get machine learning memory stats API + +[subs="attributes"] +++++ +Get {ml} memory stats +++++ + +Returns information on how {ml} is using memory. + +[[get-ml-memory-request]] +== {api-request-title} + +`GET _ml/memory/_stats` + +`GET _ml/memory//_stats` + +[[get-ml-memory-prereqs]] +== {api-prereq-title} + +Requires the `monitor_ml` cluster privilege. This privilege is included in the +`machine_learning_user` built-in role. + +[[get-ml-memory-desc]] +== {api-description-title} + +Get information about how {ml} jobs and trained models are using memory, on each +node, both within the JVM heap, and natively, outside of the JVM. + +[[get-ml-memory-path-params]] +== {api-path-parms-title} + +``:: + (Optional, string) The names of particular nodes in the cluster to target. + For example, `nodeId1,nodeId2` or `ml:true`. For node selection options, + see <>. + +[[get-ml-memory-query-parms]] +== {api-query-parms-title} + +`human`:: + Specify this query parameter to include the fields with units in the response. + Otherwise only the `_in_bytes` sizes are returned in the response. + +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] + +[role="child_attributes"] +[[get-ml-memory-response-body]] +== {api-response-body-title} + +`_nodes`:: +(object) +Contains statistics about the number of nodes selected by the request. ++ +.Properties of `_nodes` +[%collapsible%open] +==== +`failed`:: +(integer) +Number of nodes that rejected the request or failed to respond. If this value +is not `0`, a reason for the rejection or failure is included in the response. + +`successful`:: +(integer) +Number of nodes that responded successfully to the request. + +`total`:: +(integer) +Total number of nodes selected by the request. +==== + +`cluster_name`:: +(string) +Name of the cluster. Based on the <> setting. + +`nodes`:: +(object) +Contains statistics for the nodes selected by the request. ++ +.Properties of `nodes` +[%collapsible%open] +==== +``:: +(object) +Contains statistics for the node. ++ +.Properties of `` +[%collapsible%open] +===== +`attributes`:: +(object) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-attributes] + +`ephemeral_id`:: +(string) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-ephemeral-id] + +`jvm`:: +(object) +Contains Java Virtual Machine (JVM) statistics for the node. ++ +.Properties of `jvm` +[%collapsible%open] +====== +`heap_max`:: +(<>) +Maximum amount of memory available for use by the heap. + +`heap_max_in_bytes`:: +(integer) +Maximum amount of memory, in bytes, available for use by the heap. + +`java_inference`:: +(<>) +Amount of Java heap currently being used for caching inference models. + +`java_inference_in_bytes`:: +(integer) +Amount of Java heap, in bytes, currently being used for caching inference models. + +`java_inference_max`:: +(<>) +Maximum amount of Java heap to be used for caching inference models. + +`java_inference_max_in_bytes`:: +(integer) +Maximum amount of Java heap, in bytes, to be used for caching inference models. +====== + +`mem`:: +(object) +Contains statistics about memory usage for the node. ++ +.Properties of `mem` +[%collapsible%open] +====== +`adjusted_total`:: +(<>) +If the amount of physical memory has been overridden using the `es.total_memory_bytes` +system property then this reports the overridden value. Otherwise it reports the same +value as `total`. + +`adjusted_total_in_bytes`:: +(integer) +If the amount of physical memory has been overridden using the `es.total_memory_bytes` +system property then this reports the overridden value in bytes. Otherwise it reports +the same value as `total_in_bytes`. + +`ml`:: +(object) +Contains statistics about {ml} use of native memory on the node. ++ +.Properties of `ml` +[%collapsible%open] +======= +`anomaly_detectors`:: +(<>) +Amount of native memory set aside for {anomaly-jobs}. + +`anomaly_detectors_in_bytes`:: +(integer) +Amount of native memory, in bytes, set aside for {anomaly-jobs}. + +`data_frame_analytics`:: +(<>) +Amount of native memory set aside for {dfanalytics-jobs}. + +`data_frame_analytics_in_bytes`:: +(integer) +Amount of native memory, in bytes, set aside for {dfanalytics-jobs}. + +`max`:: +(<>) +Maximum amount of native memory (separate to the JVM heap) that may be used by {ml} +native processes. + +`max_in_bytes`:: +(integer) +Maximum amount of native memory (separate to the JVM heap), in bytes, that may be +used by {ml} native processes. + +`native_code_overhead`:: +(<>) +Amount of native memory set aside for loading {ml} native code shared libraries. + +`native_code_overhead_in_bytes`:: +(integer) +Amount of native memory, in bytes, set aside for loading {ml} native code shared libraries. + +`native_inference`:: +(<>) +Amount of native memory set aside for trained models that have a PyTorch `model_type`. + +`native_inference_in_bytes`:: +(integer) +Amount of native memory, in bytes, set aside for trained models that have a PyTorch `model_type`. +======= + +`total`:: +(<>) +Total amount of physical memory. + +`total_in_bytes`:: +(integer) +Total amount of physical memory in bytes. + +====== + +`name`:: +(string) +Human-readable identifier for the node. Based on the <> setting. + +`roles`:: +(array of strings) +Roles assigned to the node. See <>. + +`transport_address`:: +(string) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-transport-address] + +===== +==== + +[[get-ml-memory-example]] +== {api-examples-title} + +[source,console] +-------------------------------------------------- +GET _ml/memory/_stats?human +-------------------------------------------------- +// TEST[setup:node] + +This is a possible response: + +[source,console-result] +---- +{ + "_nodes": { + "total": 1, + "successful": 1, + "failed": 0 + }, + "cluster_name": "my_cluster", + "nodes": { + "pQHNt5rXTTWNvUgOrdynKg": { + "name": "node-0", + "ephemeral_id": "ITZ6WGZnSqqeT_unfit2SQ", + "transport_address": "127.0.0.1:9300", + "attributes": { + "ml.machine_memory": "68719476736", + "ml.max_jvm_size": "536870912" + }, + "roles": [ + "data", + "data_cold", + "data_content", + "data_frozen", + "data_hot", + "data_warm", + "ingest", + "master", + "ml", + "remote_cluster_client", + "transform" + ], + "mem": { + "total": "64gb", + "total_in_bytes": 68719476736, + "adjusted_total": "64gb", + "adjusted_total_in_bytes": 68719476736, + "ml": { + "max": "19.1gb", + "max_in_bytes": 20615843020, + "native_code_overhead": "0b", + "native_code_overhead_in_bytes": 0, + "anomaly_detectors": "0b", + "anomaly_detectors_in_bytes": 0, + "data_frame_analytics": "0b", + "data_frame_analytics_in_bytes": 0, + "native_inference": "0b", + "native_inference_in_bytes": 0 + } + }, + "jvm": { + "heap_max": "512mb", + "heap_max_in_bytes": 536870912, + "java_inference_max": "204.7mb", + "java_inference_max_in_bytes": 214748364, + "java_inference": "0b", + "java_inference_in_bytes": 0 + } + } + } +} +---- +// TESTRESPONSE[s/"cluster_name": "my_cluster"/"cluster_name": $body.cluster_name/] +// TESTRESPONSE[s/"pQHNt5rXTTWNvUgOrdynKg"/\$node_name/] +// TESTRESPONSE[s/"ephemeral_id": "ITZ6WGZnSqqeT_unfit2SQ"/"ephemeral_id": "$body.$_path"/] +// TESTRESPONSE[s/"transport_address": "127.0.0.1:9300"/"transport_address": "$body.$_path"/] +// TESTRESPONSE[s/"attributes": \{[^\}]*\}/"attributes": $body.$_path/] +// TESTRESPONSE[s/"total": "64gb"/"total": "$body.$_path"/] +// TESTRESPONSE[s/"total_in_bytes": 68719476736/"total_in_bytes": $body.$_path/] +// TESTRESPONSE[s/"adjusted_total": "64gb"/"adjusted_total": "$body.$_path"/] +// TESTRESPONSE[s/"adjusted_total_in_bytes": 68719476736/"adjusted_total_in_bytes": $body.$_path/] +// TESTRESPONSE[s/"max": "19.1gb"/"max": "$body.$_path"/] +// TESTRESPONSE[s/"max_in_bytes": 20615843020/"max_in_bytes": $body.$_path/] +// TESTRESPONSE[s/"heap_max": "512mb"/"heap_max": "$body.$_path"/] +// TESTRESPONSE[s/"heap_max_in_bytes": 536870912/"heap_max_in_bytes": $body.$_path/] +// TESTRESPONSE[s/"java_inference_max": "204.7mb"/"java_inference_max": "$body.$_path"/] +// TESTRESPONSE[s/"java_inference_max_in_bytes": 214748364/"java_inference_max_in_bytes": $body.$_path/] diff --git a/docs/reference/ml/common/apis/index.asciidoc b/docs/reference/ml/common/apis/index.asciidoc new file mode 100644 index 0000000000000..84ebd0adffb7f --- /dev/null +++ b/docs/reference/ml/common/apis/index.asciidoc @@ -0,0 +1,7 @@ +include::ml-apis.asciidoc[leveloffset=+1] +//GET +include::get-ml-info.asciidoc[leveloffset=+2] +include::get-ml-memory.asciidoc[leveloffset=+2] +//SET +include::set-upgrade-mode.asciidoc[leveloffset=+2] + diff --git a/docs/reference/ml/common/apis/ml-apis.asciidoc b/docs/reference/ml/common/apis/ml-apis.asciidoc new file mode 100644 index 0000000000000..c4349f3eb7366 --- /dev/null +++ b/docs/reference/ml/common/apis/ml-apis.asciidoc @@ -0,0 +1,15 @@ +[role="xpack"] +[[ml-apis]] += {ml-cap} APIs + +You can use the following APIs to retrieve information related to the +{stack-ml-features}: + +* <> +* <> + +The following API is useful when you upgrade: + +* <> + +See also <>, <>, and <>. diff --git a/docs/reference/ml/anomaly-detection/apis/set-upgrade-mode.asciidoc b/docs/reference/ml/common/apis/set-upgrade-mode.asciidoc similarity index 100% rename from docs/reference/ml/anomaly-detection/apis/set-upgrade-mode.asciidoc rename to docs/reference/ml/common/apis/set-upgrade-mode.asciidoc diff --git a/docs/reference/ml/ml-shared.asciidoc b/docs/reference/ml/ml-shared.asciidoc index d5b0a2fcdaded..6fc55539c439c 100644 --- a/docs/reference/ml/ml-shared.asciidoc +++ b/docs/reference/ml/ml-shared.asciidoc @@ -995,7 +995,7 @@ Tokenize with special tokens. The tokens typically included in MPNet-style token end::inference-config-nlp-tokenization-mpnet-with-special-tokens[] tag::inference-config-nlp-vocabulary[] -The configuration for retreiving the vocabulary of the model. The vocabulary is +The configuration for retrieving the vocabulary of the model. The vocabulary is then used at inference time. This information is usually provided automatically by storing vocabulary in a known, internally managed index. end::inference-config-nlp-vocabulary[] diff --git a/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc b/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc index 9ea0d2220d349..1363e179f6e1f 100644 --- a/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc +++ b/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc @@ -33,7 +33,6 @@ Requires the `manage_ml` cluster privilege. This privilege is included in the The create trained model API enables you to supply a trained model that is not created by {dfanalytics}. - [[ml-put-trained-models-path-params]] == {api-path-parms-title} @@ -893,7 +892,7 @@ Appropriate types are: * `tree_ensemble`: The model definition is an ensemble model of decision trees. * `lang_ident`: A special type reserved for language identification models. * `pytorch`: The stored definition is a PyTorch (specifically a TorchScript) model. Currently only -NLP models are supported. +NLP models are supported. For more information, refer to {ml-docs}/ml-nlp.html[{nlp-cap}]. -- `tags`:: diff --git a/docs/reference/ml/trained-models/apis/stop-trained-model-deployment.asciidoc b/docs/reference/ml/trained-models/apis/stop-trained-model-deployment.asciidoc index c3a17da0c5322..b2310a35b1f06 100644 --- a/docs/reference/ml/trained-models/apis/stop-trained-model-deployment.asciidoc +++ b/docs/reference/ml/trained-models/apis/stop-trained-model-deployment.asciidoc @@ -21,10 +21,10 @@ Stops a trained model deployment. Requires the `manage_ml` cluster privilege. This privilege is included in the `machine_learning_admin` built-in role. -//// [[stop-trained-model-deployment-desc]] == {api-description-title} -//// + +Deployment is required only for trained models that have a PyTorch `model_type`. [[stop-trained-model-deployment-path-params]] == {api-path-parms-title} diff --git a/docs/reference/modules/discovery/bootstrapping.asciidoc b/docs/reference/modules/discovery/bootstrapping.asciidoc index be1149d54a0b7..a313f05dbf5f4 100644 --- a/docs/reference/modules/discovery/bootstrapping.asciidoc +++ b/docs/reference/modules/discovery/bootstrapping.asciidoc @@ -75,7 +75,7 @@ configuration. If each node name is a fully-qualified domain name such as `master-a.example.com` then you must use fully-qualified domain names in the `cluster.initial_master_nodes` list too; conversely if your node names are bare hostnames (without the `.example.com` suffix) then you must use bare hostnames -in the `cluster.initial_master_nodes` list. If you use a mix of fully-qualifed +in the `cluster.initial_master_nodes` list. If you use a mix of fully-qualified and bare hostnames, or there is some other mismatch between `node.name` and `cluster.initial_master_nodes`, then the cluster will not form successfully and you will see log messages like the following. diff --git a/docs/reference/query-dsl/percolate-query.asciidoc b/docs/reference/query-dsl/percolate-query.asciidoc index 684b0b571f149..24b951a46ed9d 100644 --- a/docs/reference/query-dsl/percolate-query.asciidoc +++ b/docs/reference/query-dsl/percolate-query.asciidoc @@ -9,8 +9,13 @@ stored in an index. The `percolate` query itself contains the document that will be used as query to match with the stored queries. -[discrete] -=== Sample Usage +==== Sample usage + +TIP: To provide a simple example, this documentation uses one index, +`my-index-000001`, for both the percolate queries and documents. This setup can +work well when there are just a few percolate queries registered. For heavier +usage, we recommend you store queries and documents in separate indices. For +more details, refer to <>. Create an index with two fields: @@ -118,11 +123,6 @@ The above request will yield the following response: <2> The `_percolator_document_slot` field indicates which document has matched with this query. Useful when percolating multiple document simultaneously. -TIP: To provide a simple example, this documentation uses one index `my-index-000001` for both the percolate queries and documents. -This set-up can work well when there are just a few percolate queries registered. However, with heavier usage it is recommended -to store queries and documents in separate indices. Please see <> for more details. - -[discrete] ==== Parameters The following parameters are required when percolating a document: @@ -148,7 +148,6 @@ In that case the `document` parameter can be substituted with the following para `preference`:: Optionally, preference to be used to fetch document to percolate. `version`:: Optionally, the expected version of the document to be fetched. -[discrete] ==== Percolating in a filter context In case you are not interested in the score, better performance can be expected by wrapping @@ -183,7 +182,6 @@ should be wrapped in a `constant_score` query or a `bool` query's filter clause. Note that the `percolate` query never gets cached by the query cache. -[discrete] ==== Percolating multiple documents The `percolate` query can match multiple documents simultaneously with the indexed percolator queries. @@ -265,14 +263,12 @@ GET /my-index-000001/_search <1> The `_percolator_document_slot` indicates that the first, second and last documents specified in the `percolate` query are matching with this query. -[discrete] ==== Percolating an Existing Document In order to percolate a newly indexed document, the `percolate` query can be used. Based on the response from an index request, the `_id` and other meta information can be used to immediately percolate the newly added document. -[discrete] ===== Example Based on the previous example. @@ -330,14 +326,12 @@ case the search request would fail with a version conflict error. The search response returned is identical as in the previous example. -[discrete] ==== Percolate query and highlighting The `percolate` query is handled in a special way when it comes to highlighting. The queries hits are used to highlight the document that is provided in the `percolate` query. Whereas with regular highlighting the query in the search request is used to highlight the hits. -[discrete] ===== Example This example is based on the mapping of the first example. @@ -555,7 +549,6 @@ The slightly different response: <1> The highlight fields have been prefixed with the document slot they belong to, in order to know which highlight field belongs to what document. -[discrete] ==== Specifying multiple percolate queries It is possible to specify multiple `percolate` queries in a single search request: @@ -641,7 +634,6 @@ The above search request returns a response similar to this: <1> The `_percolator_document_slot_query1` percolator slot field indicates that these matched slots are from the `percolate` query with `_name` parameter set to `query1`. -[discrete] [[how-it-works]] ==== How it Works Under the Hood @@ -689,6 +681,7 @@ a different index configuration, like the number of primary shards. [[percolate-query-notes]] ==== Notes + ===== Allow expensive queries Percolate queries will not be executed if <> is set to false. diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index 7badd5ce5dd45..c8c31ee3dd775 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -3,6 +3,11 @@ The following pages have moved or been deleted. +[role="exclude",id="migration-api-feature-upgrade"] +=== Feature upgrade APIs + +Refer to <>. + [role="exclude",id="java-clients"] === Java transport client and security diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index fab77ab7e58b9..91afec0038c36 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -6,6 +6,8 @@ This section summarizes the changes in each release. +* <> +* <> * <> * <> * <> @@ -14,6 +16,8 @@ This section summarizes the changes in each release. -- +include::release-notes/8.1.asciidoc[] +include::release-notes/8.0.0.asciidoc[] include::release-notes/8.0.0-rc2.asciidoc[] include::release-notes/8.0.0-rc1.asciidoc[] include::release-notes/8.0.0-beta1.asciidoc[] diff --git a/docs/reference/release-notes/8.0.0-rc2.asciidoc b/docs/reference/release-notes/8.0.0-rc2.asciidoc index 2c9e5c299ce47..9d3f93e006847 100644 --- a/docs/reference/release-notes/8.0.0-rc2.asciidoc +++ b/docs/reference/release-notes/8.0.0-rc2.asciidoc @@ -15,6 +15,27 @@ data loss. If you upgrade from a released version, such as 7.16, to a pre-release version for testing, discard the contents of the cluster when you are done. Do not attempt to upgrade to the final 8.0 release. +* If you installed {es} from an archive on an aarch64 platform like Linux ARM or macOS M1, the +`elastic` user password and {kib} enrollment token are not generated +automatically when starting your node for the first time. ++ +-- +After the node starts, generate the `elastic` password with the +<> tool: + +[source,bash] +---- +bin/elasticsearch-reset-password -u elastic +---- + +Then, create an enrollment token for {kib} with the +<> tool: + +[source,bash] +---- +bin/elasticsearch-create-enrollment-token -s kibana +---- +-- [[deprecation-8.0.0-rc2]] [float] === Deprecations @@ -58,6 +79,9 @@ Machine Learning:: Network:: * Improve slow inbound handling to include response type {es-pull}80425[#80425] +Packaging:: +* Convert repository plugins to modules {es-pull}81870[#81870] (issue: {es-issue}81652[#81652]) + Search:: * Check nested fields earlier in kNN search {es-pull}80516[#80516] (issue: {es-issue}78473[#78473]) diff --git a/docs/reference/release-notes/8.0.0.asciidoc b/docs/reference/release-notes/8.0.0.asciidoc new file mode 100644 index 0000000000000..2e333974a38e5 --- /dev/null +++ b/docs/reference/release-notes/8.0.0.asciidoc @@ -0,0 +1,711 @@ +[[release-notes-8.0.0]] +== {es} version 8.0.0 + +The following list are changes in 8.0.0 as compared to 7.17.0, and combines +release notes from the 8.0.0-alpha1, -alpha2, -beta1, -rc1 and -rc2 releases. + +Also see <>. + +[[known-issues-8.0.0]] +[float] +=== Known issues + +* If you installed {es} from an archive on an aarch64 platform like Linux ARM or macOS M1, the +`elastic` user password and {kib} enrollment token are not generated +automatically when starting your node for the first time. ++ +-- +After the node starts, generate the `elastic` password with the +<> tool: + +[source,bash] +---- +bin/elasticsearch-reset-password -u elastic +---- + +Then, create an enrollment token for {kib} with the +<> tool: + +[source,bash] +---- +bin/elasticsearch-create-enrollment-token -s kibana +---- +-- + + +[[breaking-8.0.0]] +[float] +=== Breaking changes + +Aggregations:: +* Percentiles aggregation: disallow specifying same percentile values twice {es-pull}52257[#52257] (issue: {es-issue}51871[#51871]) +* Remove adjacency matrix setting {es-pull}46327[#46327] (issues: {es-issue}46257[#46257], {es-issue}46324[#46324]) +* Remove `MovingAverage` pipeline aggregation {es-pull}39328[#39328] +* Remove deprecated `_time` and `_term` sort orders {es-pull}39450[#39450] +* Remove deprecated date histo interval {es-pull}75000[#75000] + +Allocation:: +* Require single data nodes to respect disk watermarks {es-pull}73737[#73737] (issues: {es-issue}55805[#55805], {es-issue}73733[#73733]) +* Remove `include_relocations` setting {es-pull}47717[#47717] (issues: {es-issue}46079[#46079], {es-issue}47443[#47443]) + +Analysis:: +* Cleanup versioned deprecations in analysis {es-pull}41560[#41560] (issue: {es-issue}41164[#41164]) +* Remove preconfigured `delimited_payload_filter` {es-pull}43686[#43686] (issues: {es-issue}41560[#41560], {es-issue}43684[#43684]) + +Authentication:: +* Always add file and native realms unless explicitly disabled {es-pull}69096[#69096] (issue: {es-issue}50892[#50892]) +* Do not set a NameID format in Policy by default {es-pull}44090[#44090] (issue: {es-issue}40353[#40353]) +* Make order setting mandatory for Realm config {es-pull}51195[#51195] (issue: {es-issue}37614[#37614]) + +CCR:: +* Avoid auto following leader system indices in CCR {es-pull}72815[#72815] (issue: {es-issue}67686[#67686]) + +Cluster Coordination:: +* Remove join timeout {es-pull}60873[#60873] (issue: {es-issue}60872[#60872]) +* Remove node filters for voting config exclusions {es-pull}55673[#55673] (issues: {es-issue}47990[#47990], {es-issue}50836[#50836]) +* Remove support for delaying state recovery pending master {es-pull}53845[#53845] (issue: {es-issue}51806[#51806]) + +Distributed:: +* Remove synced flush {es-pull}50882[#50882] (issues: {es-issue}50776[#50776], {es-issue}50835[#50835]) +* Remove the `cluster.remote.connect` setting {es-pull}54175[#54175] (issue: {es-issue}53924[#53924]) + +Engine:: +* Force merge should reject requests with `only_expunge_deletes` and `max_num_segments` set {es-pull}44761[#44761] (issue: {es-issue}43102[#43102]) +* Remove per-type indexing stats {es-pull}47203[#47203] (issue: {es-issue}41059[#41059]) +* Remove translog retention settings {es-pull}51697[#51697] (issue: {es-issue}50775[#50775]) + +Features/CAT APIs:: +* Remove the deprecated `local` parameter for `_cat/indices` {es-pull}64868[#64868] (issue: {es-issue}62198[#62198]) +* Remove the deprecated `local` parameter for `_cat/shards` {es-pull}64867[#64867] (issue: {es-issue}62197[#62197]) + +Features/Features:: +* Remove deprecated `._tier` allocation filtering settings {es-pull}73074[#73074] (issue: {es-issue}72835[#72835]) + +Features/ILM+SLM:: +* Add lower bound on `poll_interval` {es-pull}39593[#39593] (issue: {es-issue}39163[#39163]) +* Make the ILM `freeze` action a no-op {es-pull}77158[#77158] (issue: {es-issue}70192[#70192]) +* Always enforce default tier preference {es-pull}79751[#79751] (issue: {es-issue}76147[#76147]) +* Validate that snapshot repository exists for ILM policies at creation/update time {es-pull}78468[#78468] (issues: {es-issue}72957[#72957], {es-issue}77657[#77657]) +* Default `cluster.routing.allocation.enforce_default_tier_preference` to `true` {es-pull}79275[#79275] (issues: {es-issue}76147[#76147], {es-issue}79210[#79210]) + +Features/Indices APIs:: +* Change `prefer_v2_templates` parameter to default to true {es-pull}55489[#55489] (issues: {es-issue}53101[#53101], {es-issue}55411[#55411]) +* Remove deprecated `_upgrade` API {es-pull}64732[#64732] (issue: {es-issue}21337[#21337]) +* Remove local parameter for get field mapping request {es-pull}55100[#55100] (issue: {es-issue}55099[#55099]) +* Remove `include_type_name` parameter from REST layer {es-pull}48632[#48632] (issue: {es-issue}41059[#41059]) +* Remove the `template` field in index templates {es-pull}49460[#49460] (issue: {es-issue}21009[#21009]) +* Remove endpoint for freezing indices {es-pull}78918[#78918] (issues: {es-issue}70192[#70192], {es-issue}77273[#77273]) + +Features/Watcher:: +* Move watcher history to data stream {es-pull}64252[#64252] + +Geo:: +* Disallow creating `geo_shape` mappings with deprecated parameters {es-pull}70850[#70850] (issue: {es-issue}32039[#32039]) +* Remove bounding box query `type` parameter {es-pull}74536[#74536] + +Infra/Circuit Breakers:: +* Fixed synchronizing inflight breaker with internal variable {es-pull}40878[#40878] + +Infra/Core:: +* Limit processors by available processors {es-pull}44894[#44894] (issue: {es-issue}44889[#44889]) +* Remove `nodes/0` folder prefix from data path {es-pull}42489[#42489] +* Remove `bootstrap.system_call_filter` setting {es-pull}72848[#72848] +* Remove `fixed_auto_queue_size` threadpool type {es-pull}52280[#52280] +* Remove `node.max_local_storage_nodes` {es-pull}42428[#42428] (issue: {es-issue}42426[#42426]) +* Remove camel case named date/time formats {es-pull}60044[#60044] +* Remove legacy role settings {es-pull}71163[#71163] (issues: {es-issue}54998[#54998], {es-issue}66409[#66409], {es-issue}71143[#71143]) +* Remove `processors` setting {es-pull}45905[#45905] (issue: {es-issue}45855[#45855]) +* Remove the `local` parameter of `/_cat/nodes` {es-pull}50594[#50594] (issues: {es-issue}50088[#50088], {es-issue}50499[#50499]) +* Remove the listener thread pool {es-pull}53314[#53314] (issue: {es-issue}53049[#53049]) +* Remove the node local storage setting {es-pull}54381[#54381] (issue: {es-issue}54374[#54374]) +* Remove the `pidfile` setting {es-pull}45940[#45940] (issue: {es-issue}45938[#45938]) +* Removes `week_year` date format {es-pull}63384[#63384] (issue: {es-issue}60707[#60707]) +* System indices treated as restricted indices {es-pull}74212[#74212] (issue: {es-issue}69298[#69298]) +* Remove Joda dependency {es-pull}79007[#79007] +* Remove Joda support from date formatters {es-pull}78990[#78990] +* All system indices are hidden indices {es-pull}79512[#79512] + +Infra/Logging:: +* Remove slowlog level {es-pull}57591[#57591] (issue: {es-issue}56171[#56171]) + +Infra/Plugins:: +* Remove deprecated basic license feature enablement settings {es-pull}56211[#56211] (issue: {es-issue}54745[#54745]) + +Infra/REST API:: +* Remove content type required setting {es-pull}61043[#61043] +* Remove deprecated endpoints containing `_xpack` {es-pull}48170[#48170] (issue: {es-issue}35958[#35958]) +* Remove deprecated endpoints of hot threads API {es-pull}55109[#55109] (issue: {es-issue}52640[#52640]) +* Allow parsing Content-Type and Accept headers with version {es-pull}61427[#61427] + +Infra/Resiliency:: +* Fail node containing ancient closed index {es-pull}44264[#44264] (issues: {es-issue}21830[#21830], {es-issue}41731[#41731], {es-issue}44230[#44230]) + +Infra/Scripting:: +* Consolidate script parsing from object {es-pull}59507[#59507] (issue: {es-issue}59391[#59391]) +* Move `script_cache` into _nodes/stats {es-pull}59265[#59265] (issues: {es-issue}50152[#50152], {es-issue}59262[#59262]) +* Remove general cache settings {es-pull}59262[#59262] (issue: {es-issue}50152[#50152]) + +Infra/Settings:: +* Change default value of `action.destructive_requires_name` to `true` {es-pull}66908[#66908] (issue: {es-issue}61074[#61074]) +* Forbid settings without a namespace {es-pull}45947[#45947] (issues: {es-issue}45905[#45905], {es-issue}45940[#45940]) + +Ingest:: +* Remove default maxmind GeoIP databases from distribution {es-pull}78362[#78362] (issue: {es-issue}68920[#68920]) + +License:: +* Set `xpack.security.enabled` to true for all licenses {es-pull}72300[#72300] +* Enforce license expiration {es-pull}79671[#79671] + +Machine Learning:: +* Remove deprecated `_xpack` endpoints {es-pull}59870[#59870] (issues: {es-issue}35958[#35958], {es-issue}48170[#48170]) +* Remove the ability to update datafeed's `job_id` {es-pull}44752[#44752] (issue: {es-issue}44616[#44616]) +* Remove `allow_no_datafeeds` and `allow_no_jobs` parameters from APIs {es-pull}80048[#80048] (issue: {es-issue}60732[#60732]) + +Mapping:: +* Remove `boost` mapping parameter {es-pull}62639[#62639] (issue: {es-issue}62623[#62623]) +* Remove support for chained multi-fields {es-pull}42333[#42333] (issues: {es-issue}41267[#41267], {es-issue}41926[#41926]) +* Remove support for string in `unmapped_type` {es-pull}45675[#45675] +* Removes typed URLs from mapping APIs {es-pull}41676[#41676] + +Network:: +* Remove client feature tracking {es-pull}44929[#44929] (issues: {es-issue}31020[#31020], {es-issue}42538[#42538], {es-issue}44667[#44667]) +* Remove escape hatch permitting incompatible builds {es-pull}65753[#65753] (issues: {es-issue}65249[#65249], {es-issue}65601[#65601]) + +Packaging:: +* Remove SysV init support {es-pull}51716[#51716] (issue: {es-issue}51480[#51480]) +* Remove support for `JAVA_HOME` {es-pull}69149[#69149] (issue: {es-issue}55820[#55820]) +* Remove no-jdk distributions {es-pull}76896[#76896] (issue: {es-issue}65109[#65109]) +* Require Java 17 for running Elasticsearch {es-pull}79873[#79873] + +Recovery:: +* Remove dangling index auto import functionality {es-pull}59698[#59698] (issue: {es-issue}48366[#48366]) + +Reindex:: +* Reindex from Remote encoding {es-pull}41007[#41007] (issue: {es-issue}40303[#40303]) +* Reindex remove outer level size {es-pull}43373[#43373] (issues: {es-issue}24344[#24344], {es-issue}41894[#41894]) + +Rollup:: +* `RollupStart` endpoint should return OK if job already started {es-pull}41502[#41502] (issues: {es-issue}35928[#35928], {es-issue}39845[#39845]) + +Search:: +* Decouple shard allocation awareness from search and get requests {es-pull}45735[#45735] (issue: {es-issue}43453[#43453]) +* Fix range query on date fields for number inputs {es-pull}63692[#63692] (issue: {es-issue}63680[#63680]) +* Make fuzziness reject illegal values earlier {es-pull}33511[#33511] +* Make remote cluster resolution stricter {es-pull}40419[#40419] (issue: {es-issue}37863[#37863]) +* Parse empty first line in msearch request body as action metadata {es-pull}41011[#41011] (issue: {es-issue}39841[#39841]) +* Remove `CommonTermsQuery` and `cutoff_frequency` param {es-pull}42654[#42654] (issue: {es-issue}37096[#37096]) +* Remove `type` query {es-pull}47207[#47207] (issue: {es-issue}41059[#41059]) +* Remove `use_field_mapping` format option for docvalue fields {es-pull}55622[#55622] +* Remove deprecated `SimpleQueryStringBuilder` parameters {es-pull}57200[#57200] +* Remove deprecated `search.remote` settings {es-pull}42381[#42381] (issues: {es-issue}33413[#33413], {es-issue}38556[#38556]) +* Remove deprecated sort options: `nested_path` and `nested_filter` {es-pull}42809[#42809] (issue: {es-issue}27098[#27098]) +* Remove deprecated vector functions {es-pull}48725[#48725] (issue: {es-issue}48604[#48604]) +* Remove support for `_type` in searches {es-pull}68564[#68564] (issues: {es-issue}41059[#41059], {es-issue}68311[#68311]) +* Remove support for sparse vectors {es-pull}48781[#48781] (issue: {es-issue}48368[#48368]) +* Remove the object format for `indices_boost` {es-pull}55078[#55078] +* Removes type from `TermVectors` APIs {es-pull}42198[#42198] (issue: {es-issue}41059[#41059]) +* Removes typed endpoint from search and related APIs {es-pull}41640[#41640] +* Set max allowed size for stored async response {es-pull}74455[#74455] (issue: {es-issue}67594[#67594]) +* `indices.query.bool.max_clause_count` now limits all query clauses {es-pull}75297[#75297] + +Security:: +* Remove obsolete security settings {es-pull}40496[#40496] +* Remove support of creating CA on the fly when generating certificates {es-pull}65590[#65590] (issue: {es-issue}61884[#61884]) +* Remove the `id` field from the `InvalidateApiKey` API {es-pull}66671[#66671] (issue: {es-issue}66317[#66317]) +* Remove the migrate tool {es-pull}42174[#42174] +* Compress audit logs {es-pull}64472[#64472] (issue: {es-issue}63843[#63843]) +* Remove insecure settings {es-pull}46147[#46147] (issue: {es-issue}45947[#45947]) +* Remove `kibana_dashboard_only_user` reserved role {es-pull}76507[#76507] + +Snapshot/Restore:: +* Blob store compress default to `true` {es-pull}40033[#40033] +* Get snapshots support for multiple repositories {es-pull}42090[#42090] (issue: {es-issue}41210[#41210]) +* Remove repository stats API {es-pull}62309[#62309] (issue: {es-issue}62297[#62297]) +* Remove frozen cache setting leniency {es-pull}71013[#71013] (issue: {es-issue}70341[#70341]) +* Adjust snapshot index resolution behavior to be more intuitive {es-pull}79670[#79670] (issue: {es-issue}78320[#78320]) + +TLS:: +* Reject misconfigured/ambiguous SSL server config {es-pull}45892[#45892] +* Remove support for configurable PKCS#11 keystores {es-pull}75404[#75404] +* Remove the client transport profile filter {es-pull}43236[#43236] + + + +[[breaking-java-8.0.0]] +[float] +=== Breaking Java changes + +Authentication:: +* Mandate x-pack REST handler installed {es-pull}71061[#71061] (issue: {es-issue}70523[#70523]) + +CCR:: +* Remove the `CcrClient` {es-pull}42816[#42816] + +CRUD:: +* Remove types from `BulkRequest` {es-pull}46983[#46983] (issue: {es-issue}41059[#41059]) +* Remove `Client.prepareIndex(index, type, id)` method {es-pull}48443[#48443] + + +Client:: +* Remove `SecurityClient` from x-pack {es-pull}42471[#42471] + +Features/ILM+SLM:: +* Remove the `ILMClient` {es-pull}42817[#42817] + +Features/Monitoring:: +* Remove `MonitoringClient` from x-pack {es-pull}42770[#42770] + +Features/Watcher:: +* Remove `WatcherClient` from x-pack {es-pull}42815[#42815] + +Infra/Core:: +* Remove `XPackClient` from x-pack {es-pull}42729[#42729] +* Remove the transport client {es-pull}42538[#42538] +* Remove transport client from x-pack {es-pull}42202[#42202] + +Infra/REST API:: +* Copy HTTP headers to `ThreadContext` strictly {es-pull}45945[#45945] + +Machine Learning:: +* Remove the `MachineLearningClient` {es-pull}43108[#43108] + +Mapping:: +* Remove type filter from `GetMappings` API {es-pull}47364[#47364] (issue: {es-issue}41059[#41059]) +* Remove `type` parameter from `PutMappingRequest.buildFromSimplifiedDef()` {es-pull}50844[#50844] (issue: {es-issue}41059[#41059]) +* Remove unused parameter from `MetadataFieldMapper.TypeParser#getDefault()` {es-pull}51219[#51219] +* Remove `type` parameter from `CIR.mapping(type, object...)` {es-pull}50739[#50739] (issue: {es-issue}41059[#41059]) + +Search:: +* Removes types from `SearchRequest` and `QueryShardContext` {es-pull}42112[#42112] + +Snapshot/Restore:: +* Remove deprecated repository methods {es-pull}42359[#42359] (issue: {es-issue}42213[#42213]) + + +[[deprecation-8.0.0]] +[float] +=== Deprecations + +Authentication:: +* Deprecate setup-passwords tool {es-pull}76902[#76902] + +CRUD:: +* Remove `indices_segments` 'verbose' parameter {es-pull}78451[#78451] (issue: {es-issue}75955[#75955]) + +Engine:: +* Deprecate setting `max_merge_at_once_explicit` {es-pull}80574[#80574] + +Machine Learning:: +* Deprecate `estimated_heap_memory_usage_bytes` and replace with `model_size_bytes` {es-pull}80554[#80554] + +Monitoring:: +* Add deprecation info API entries for deprecated monitoring settings {es-pull}78799[#78799] +* Automatically install monitoring templates at plugin initialization {es-pull}78350[#78350] +* Remove Monitoring ingest pipelines {es-pull}77459[#77459] (issue: {es-issue}50770[#50770]) + +Search:: +* Configure `IndexSearcher.maxClauseCount()` based on node characteristics {es-pull}81525[#81525] (issue: {es-issue}46433[#46433]) + +Transform:: +* Improve transform deprecation messages {es-pull}81847[#81847] (issues: {es-issue}81521[#81521], {es-issue}81523[#81523]) + +[[feature-8.0.0]] +[float] +=== New features + +Security:: +* Auto-configure TLS for new nodes of new clusters {es-pull}77231[#77231] (issues: {es-issue}75144[#75144], {es-issue}75704[#75704]) + +Snapshot/Restore:: +* Support IAM roles for Kubernetes service accounts {es-pull}81255[#81255] (issue: {es-issue}52625[#52625]) + +Watcher:: +* Use `startsWith` rather than exact matches for Watcher history template names {es-pull}82396[#82396] + + +[[enhancement-8.0.0]] +[float] +=== Enhancements + +Analysis:: +* Move `reload_analyzers` endpoint to x-pack {es-pull}43559[#43559] + +Authentication:: +* Reset elastic password CLI tool {es-pull}74892[#74892] (issues: {es-issue}70113[#70113], {es-issue}74890[#74890]) +* Autogenerate and print elastic password on startup {es-pull}77291[#77291] +* Enroll Kibana API uses Service Accounts {es-pull}76370[#76370] +* Add `reset-kibana-system-user` tool {es-pull}77322[#77322] +* New CLI tool to reset password for built-in users {es-pull}79709[#79709] +* Auto-configure the `elastic` user password {es-pull}78306[#78306] + +Authorization:: +* Granting `kibana_system` reserved role access to "all" privileges to `.internal.preview.alerts*` index {es-pull}80889[#80889] (issues: {es-issue}76624[#76624], {es-issue}80746[#80746], {es-issue}116374[#116374]) +* Granting `kibana_system` reserved role access to "all" privileges to .preview.alerts* index {es-pull}80746[#80746] +* Granting editor and viewer roles access to alerts-as-data indices {es-pull}81285[#81285] + +Cluster Coordination:: +* Prevent downgrades from 8.x to 7.x {es-pull}78586[#78586] (issues: {es-issue}42489[#42489], {es-issue}52414[#52414]) +* Prevent downgrades from 8.x to 7.x {es-pull}78638[#78638] (issues: {es-issue}42489[#42489], {es-issue}52414[#52414]) +* Make `TaskBatcher` less lock-heavy {es-pull}82227[#82227] (issue: {es-issue}77466[#77466]) + +Data streams:: +* Data stream support read and write with custom routing and partition size {es-pull}74394[#74394] (issue: {es-issue}74390[#74390]) + +EQL:: +* Add option for returning results from the tail of the stream {es-pull}64869[#64869] (issue: {es-issue}58646[#58646]) +* Introduce case insensitive variant `in~` {es-pull}68176[#68176] (issue: {es-issue}68172[#68172]) +* Optimize redundant `toString` {es-pull}71070[#71070] (issue: {es-issue}70681[#70681]) + +Engine:: +* Always use soft-deletes in `InternalEngine` {es-pull}50415[#50415] +* Remove translog retention policy {es-pull}51417[#51417] (issue: {es-issue}50775[#50775]) + +Features/CAT APIs:: +* Remove `size` and add `time` params to `_cat/threadpool` {es-pull}55736[#55736] (issue: {es-issue}54478[#54478]) + +Features/ILM+SLM:: +* Allow for setting the total shards per node in the Allocate ILM action {es-pull}76794[#76794] (issue: {es-issue}76775[#76775]) +* Inject migrate action regardless of allocate action {es-pull}79090[#79090] (issue: {es-issue}76147[#76147]) +* Make unchanged ILM policy updates into noop {es-pull}82240[#82240] (issue: {es-issue}82065[#82065]) +* Avoid unnecessary `LifecycleExecutionState` recalculation {es-pull}81558[#81558] (issues: {es-issue}77466[#77466], {es-issue}79692[#79692]) + +Features/Indices APIs:: +* Batch rollover cluster state updates {es-pull}79945[#79945] (issues: {es-issue}77466[#77466], {es-issue}79782[#79782]) +* Reuse `MappingMetadata` instances in Metadata class {es-pull}80348[#80348] (issues: {es-issue}69772[#69772], {es-issue}77466[#77466]) + +Features/Stats:: +* Add bulk stats track the bulk per shard {es-pull}52208[#52208] (issues: {es-issue}47345[#47345], {es-issue}50536[#50536]) + +Features/Watcher:: +* Remove Watcher history clean up from monitoring {es-pull}67154[#67154] + +Infra/Core:: +* Remove aliases exist action {es-pull}43430[#43430] +* Remove indices exists action {es-pull}43164[#43164] +* Remove types exists action {es-pull}43344[#43344] +* Retain reference to stdout for exceptional cases {es-pull}77460[#77460] +* Check whether stdout is a real console {es-pull}79882[#79882] +* Share int, long, float, double, and byte pages {es-pull}75053[#75053] +* Revert "Deprecate resolution loss on date field (#78921)" {es-pull}79914[#79914] (issue: {es-issue}78921[#78921]) +* Add two missing entries to the deprecation information API {es-pull}80290[#80290] (issue: {es-issue}80233[#80233]) +* Prevent upgrades to 8.0 without first upgrading to the last 7.x release {es-pull}82321[#82321] (issue: {es-issue}81865[#81865]) + +Infra/Logging:: +* Make Elasticsearch JSON logs ECS compliant {es-pull}47105[#47105] (issue: {es-issue}46119[#46119]) + +Infra/REST API:: +* Allow for field declaration for future compatible versions {es-pull}69774[#69774] (issue: {es-issue}51816[#51816]) +* Introduce stability description to the REST API specification {es-pull}38413[#38413] +* Parsing: Validate that fields are not registered twice {es-pull}70243[#70243] +* Support response content-type with versioned media type {es-pull}65500[#65500] (issue: {es-issue}51816[#51816]) +* [REST API Compatibility] Typed endpoints for index and get APIs {es-pull}69131[#69131] (issue: {es-issue}54160[#54160]) +* [REST API Compatibility] Typed endpoints for put and get mapping and get field mappings {es-pull}71721[#71721] (issues: {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Allow `copy_settings` flag for resize operations {es-pull}75184[#75184] (issues: {es-issue}38514[#38514], {es-issue}51816[#51816]) +* [REST API Compatibility] Allow for type in geo shape query {es-pull}74553[#74553] (issues: {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Always return `adjust_pure_negative` value {es-pull}75182[#75182] (issues: {es-issue}49543[#49543], {es-issue}51816[#51816]) +* [REST API Compatibility] Clean up x-pack/plugin rest compat tests {es-pull}74701[#74701] (issue: {es-issue}51816[#51816]) +* [REST API Compatibility] Do not return `_doc` for empty mappings in template {es-pull}75448[#75448] (issues: {es-issue}51816[#51816], {es-issue}54160[#54160], {es-issue}70966[#70966], {es-issue}74544[#74544]) +* [REST API Compatibility] Dummy REST action for `indices.upgrade` API {es-pull}75136[#75136] (issue: {es-issue}51816[#51816]) +* [REST API Compatibility] REST Terms vector typed response {es-pull}73117[#73117] +* [REST API Compatibility] Rename `BulkItemResponse.Failure` type field {es-pull}74937[#74937] (issue: {es-issue}51816[#51816]) +* [REST API Compatibility] Type metadata for docs used in simulate request {es-pull}74222[#74222] (issues: {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Typed `TermLookups` {es-pull}74544[#74544] (issues: {es-issue}46943[#46943], {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Typed and x-pack graph explore API {es-pull}74185[#74185] (issues: {es-issue}46935[#46935], {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Typed endpoint for bulk API {es-pull}73571[#73571] (issue: {es-issue}51816[#51816]) +* [REST API Compatibility] Typed endpoint for multi-get API {es-pull}73878[#73878] (issue: {es-issue}51816[#51816]) +* [REST API Compatibility] Typed endpoints for `RestUpdateAction` and `RestDeleteAction` {es-pull}73115[#73115] (issues: {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Typed endpoints for `get_source` API {es-pull}73957[#73957] (issues: {es-issue}46587[#46587], {es-issue}46931[#46931], {es-issue}51816[#51816]) +* [REST API Compatibility] Typed endpoints for explain API {es-pull}73901[#73901] (issue: {es-issue}51816[#51816]) +* [REST API Compatibility] Typed endpoints for search `_count` API {es-pull}73958[#73958] (issues: {es-issue}42112[#42112], {es-issue}51816[#51816]) +* [REST API Compatibility] Typed indexing stats {es-pull}74181[#74181] (issues: {es-issue}47203[#47203], {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Types for percolate query API {es-pull}74698[#74698] (issues: {es-issue}46985[#46985], {es-issue}51816[#51816], {es-issue}54160[#54160], {es-issue}74689[#74689]) +* [REST API Compatibility] Validate query typed API {es-pull}74171[#74171] (issues: {es-issue}46927[#46927], {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Voting config exclusion exception message {es-pull}75406[#75406] (issues: {es-issue}51816[#51816], {es-issue}55291[#55291]) +* [REST API Compatibility] `MoreLikeThisQuery` with types {es-pull}75123[#75123] (issues: {es-issue}42198[#42198], {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Update and delete by query using size field {es-pull}69606[#69606] +* [REST API Compatibility] Indicies boost in object format {es-pull}74422[#74422] (issues: {es-issue}51816[#51816], {es-issue}55078[#55078]) +* [REST API Compatibility] Typed endpoints for search and related endpoints {es-pull}72155[#72155] (issues: {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Allow to use size `-1` {es-pull}75342[#75342] (issues: {es-issue}51816[#51816], {es-issue}69548[#69548], {es-issue}70209[#70209]) +* [REST API Compatibility] Ignore `use_field_mapping` option for docvalue {es-pull}74435[#74435] (issue: {es-issue}55622[#55622]) +* [REST API Compatibility] `_time` and `_term` sort orders {es-pull}74919[#74919] (issues: {es-issue}39450[#39450], {es-issue}51816[#51816]) +* [REST API Compatability] `template` parameter and field on PUT index template {es-pull}71238[#71238] (issues: {es-issue}49460[#49460], {es-issue}51816[#51816], {es-issue}68905[#68905]) +* [REST API Compatibility] Make query registration easier {es-pull}75722[#75722] (issue: {es-issue}51816[#51816]) +* [REST API Compatibility] Typed query {es-pull}75453[#75453] (issues: {es-issue}47207[#47207], {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Deprecate the use of synced flush {es-pull}75372[#75372] (issues: {es-issue}50882[#50882], {es-issue}51816[#51816]) +* [REST API Compatibility] Licence `accept_enterprise` and response changes {es-pull}75479[#75479] (issues: {es-issue}50067[#50067], {es-issue}50735[#50735], {es-issue}51816[#51816], {es-issue}58217[#58217]) + +Infra/Scripting:: +* Update `DeprecationMap` to `DynamicMap` {es-pull}56149[#56149] (issue: {es-issue}52103[#52103]) +* Add nio Buffers to Painless {es-pull}79870[#79870] (issue: {es-issue}79867[#79867]) +* Restore the scripting general cache {es-pull}79453[#79453] (issue: {es-issue}62899[#62899]) + +Infra/Settings:: +* Fixed inconsistent `Setting.exist()` {es-pull}46603[#46603] (issue: {es-issue}41830[#41830]) +* Remove `index.optimize_auto_generated_id` setting (#27583) {es-pull}27600[#27600] (issue: {es-issue}27583[#27583]) +* Implement setting deduplication via string interning {es-pull}80493[#80493] (issues: {es-issue}77466[#77466], {es-issue}78892[#78892]) + +Ingest:: +* Add support for `_meta` field to ingest pipelines {es-pull}76381[#76381] +* Remove binary field after attachment processor execution {es-pull}79172[#79172] +* Improving cache lookup to reduce recomputing / searches {es-pull}77259[#77259] +* Extract more standard metadata from binary files {es-pull}78754[#78754] (issue: {es-issue}22339[#22339]) + +License:: +* Add deprecated `accept_enterprise` param to `/_xpack` {es-pull}58220[#58220] (issue: {es-issue}58217[#58217]) +* Support `accept_enterprise` param in get license API {es-pull}50067[#50067] (issue: {es-issue}49474[#49474]) +* Enforce Transport TLS check on all licenses {es-pull}79602[#79602] (issue: {es-issue}75292[#75292]) + +Machine Learning:: +* The Windows build platform for the {ml} C++ code now uses Visual Studio 2019 {ml-pull}1352[#1352] +* The macOS build platform for the {ml} C++ code is now Mojave running Xcode 11.3.1, + or Ubuntu 20.04 running clang 8 for cross compilation {ml-pull}1429[#1429] +* Add a new application for evaluating PyTorch models. The app depends on LibTorch - the C++ front end to PyTorch - and performs inference on models stored in the TorchScript format {ml-pull}1902[#1902] +* Adding new PUT trained model vocabulary endpoint {es-pull}77387[#77387] +* Creating new PUT model definition part API {es-pull}76987[#76987] +* Add inference time configuration overrides {es-pull}78441[#78441] (issue: {es-issue}77799[#77799]) +* Optimize source extraction for `categorize_text` aggregation {es-pull}79099[#79099] +* The Linux build platform for the {ml} C++ code is now CentOS 7 running gcc 10.3. {ml-pull}2028[#2028] +* Make ML indices hidden when the node becomes master {es-pull}77416[#77416] (issue: {es-issue}53674[#53674]) +* Add `deployment_stats` to trained model stats {es-pull}80531[#80531] +* The setting `use_auto_machine_memory_percent` now defaults `max_model_memory_limit` {es-pull}80532[#80532] (issue: {es-issue}80415[#80415]) + +Mapping:: +* Sparse vector to throw exception consistently {es-pull}62646[#62646] +* Add support for configuring HNSW parameters {es-pull}79193[#79193] (issue: {es-issue}78473[#78473]) +* Extend `dense_vector` to support indexing vectors {es-pull}78491[#78491] (issue: {es-issue}78473[#78473]) + +Monitoring:: +* Add previously removed Monitoring settings back for 8.0 {es-pull}78784[#78784] +* Change Monitoring plugin cluster alerts to not install by default {es-pull}79657[#79657] +* Adding default templates for Metricbeat ECS data {es-pull}81744[#81744] + +Network:: +* Enable LZ4 transport compression by default {es-pull}76326[#76326] (issue: {es-issue}73497[#73497]) +* Improve slow inbound handling to include response type {es-pull}80425[#80425] + +Packaging:: +* Make the Docker build more re-usable in Cloud {es-pull}50277[#50277] (issues: {es-issue}46166[#46166], {es-issue}49926[#49926]) +* Update docker-compose.yml to fix bootstrap check error {es-pull}47650[#47650] +* Allow total memory to be overridden {es-pull}78750[#78750] (issue: {es-issue}65905[#65905]) +* Convert repository plugins to modules {es-pull}81870[#81870] (issue: {es-issue}81652[#81652]) + +Recovery:: +* Use Lucene index in peer recovery and resync {es-pull}51189[#51189] (issue: {es-issue}50775[#50775]) +* Fix `PendingReplicationActions` submitting lots of `NOOP` tasks to `GENERIC` {es-pull}82092[#82092] (issues: {es-issue}77466[#77466], {es-issue}79837[#79837]) + +Reindex:: +* Make reindexing managed by a persistent task {es-pull}43382[#43382] (issue: {es-issue}42612[#42612]) +* Reindex restart from checkpoint {es-pull}46055[#46055] (issue: {es-issue}42612[#42612]) +* Reindex search resiliency {es-pull}45497[#45497] (issues: {es-issue}42612[#42612], {es-issue}43187[#43187]) +* Reindex v2 rethrottle sliced fix {es-pull}46967[#46967] (issues: {es-issue}42612[#42612], {es-issue}46763[#46763]) +* Do not scroll if max docs is less than scroll size (update/delete by query) {es-pull}81654[#81654] (issue: {es-issue}54270[#54270]) + +Rollup:: +* Adds support for `date_nanos` in Rollup Metric and `DateHistogram` Configs {es-pull}59349[#59349] (issue: {es-issue}44505[#44505]) + +SQL:: +* Add text formatting support for multivalue {es-pull}68606[#68606] +* Add xDBC and CLI support. QA CSV specs {es-pull}68966[#68966] +* Export array values through result sets {es-pull}69512[#69512] +* Improve alias resolution in sub-queries {es-pull}67216[#67216] (issue: {es-issue}56713[#56713]) +* Improve the optimization of null conditionals {es-pull}71192[#71192] +* Push `WHERE` clause inside subqueries {es-pull}71362[#71362] +* Use Java `String` methods for `LTRIM/RTRIM` {es-pull}57594[#57594] +* QL: Make canonical form take into account children {es-pull}71266[#71266] +* QL: Polish optimizer expression rule declaration {es-pull}71396[#71396] +* QL: Propagate nullability constraints across conjunctions {es-pull}71187[#71187] (issue: {es-issue}70683[#70683]) + +Search:: +* Completely disallow setting negative size in search {es-pull}70209[#70209] (issue: {es-issue}69548[#69548]) +* Make `0` as invalid value for `min_children` in `has_child` query {es-pull}41347[#41347] +* Return error when remote indices are locally resolved {es-pull}74556[#74556] (issue: {es-issue}26247[#26247]) +* [REST API Compatibility] Nested path and filter sort options {es-pull}76022[#76022] (issues: {es-issue}42809[#42809], {es-issue}51816[#51816]) +* [REST API Compatibility] `CommonTermsQuery` and `cutoff_frequency` parameter {es-pull}75896[#75896] (issues: {es-issue}42654[#42654], {es-issue}51816[#51816]) +* [REST API Compatibility] Allow first empty line for `_msearch` {es-pull}75886[#75886] (issues: {es-issue}41011[#41011], {es-issue}51816[#51816]) +* Node level can match action {es-pull}78765[#78765] +* TSDB: Add time series information to field caps {es-pull}78790[#78790] (issue: {es-issue}74660[#74660]) +* Add new kNN search endpoint {es-pull}79013[#79013] (issue: {es-issue}78473[#78473]) +* Disallow kNN searches on nested vector fields {es-pull}79403[#79403] (issue: {es-issue}78473[#78473]) +* Ensure kNN search respects authorization {es-pull}79693[#79693] (issue: {es-issue}78473[#78473]) +* Load kNN vectors format with mmapfs {es-pull}78724[#78724] (issue: {es-issue}78473[#78473]) +* Support cosine similarity in kNN search {es-pull}79500[#79500] +* Node level can match action {es-pull}78765[#78765] +* Check nested fields earlier in kNN search {es-pull}80516[#80516] (issue: {es-issue}78473[#78473]) + +Security:: +* Add a tool for creating enrollment tokens {es-pull}74890[#74890] +* Add the Enroll Kibana API {es-pull}72207[#72207] +* Change default hashing algorithm for FIPS 140 {es-pull}55544[#55544] +* Create enrollment token {es-pull}73573[#73573] (issues: {es-issue}71438[#71438], {es-issue}72129[#72129]) +* Enroll node API {es-pull}72129[#72129] +* Configure security for the initial node CLI {es-pull}74868[#74868] +* Generate and store password hash for elastic user {es-pull}76276[#76276] (issue: {es-issue}75310[#75310]) +* Set elastic password and generate enrollment token {es-pull}75816[#75816] (issue: {es-issue}75310[#75310]) +* Add `elasticsearch-enroll-node` tool {es-pull}77292[#77292] +* Default hasher to `PBKDF2_STRETCH` on FIPS mode {es-pull}76274[#76274] +* Add v7 `restCompat` for invalidating API key with the id field {es-pull}78664[#78664] (issue: {es-issue}66671[#66671]) +* Print enrollment token on startup {es-pull}78293[#78293] +* Startup check for security implicit behavior change {es-pull}76879[#76879] +* CLI tool to reconfigure nodes to enroll {es-pull}79690[#79690] (issue: {es-issue}7718[#7718]) +* Security auto-configuration for packaged installations {es-pull}75144[#75144] (issue: {es-issue}78306[#78306]) + +Snapshot/Restore:: +* Introduce searchable snapshots index setting for cascade deletion of snapshots {es-pull}74977[#74977] +* Unify blob store compress setting {es-pull}39346[#39346] (issue: {es-issue}39073[#39073]) +* Add recovery state tracking for searchable snapshots {es-pull}60505[#60505] +* Allow listing older repositories {es-pull}78244[#78244] +* Optimize SLM Policy Queries {es-pull}79341[#79341] (issue: {es-issue}79321[#79321]) + +TLS:: +* Add `ChaCha20` TLS ciphers on Java 12+ {es-pull}42155[#42155] +* Add support for `KeyStore` filters to `ssl-config` {es-pull}75407[#75407] +* Update TLS ciphers and protocols for JDK 11 {es-pull}41808[#41808] (issues: {es-issue}38646[#38646], {es-issue}41385[#41385]) + +Transform:: +* Prevent old beta transforms from starting {es-pull}79712[#79712] + +TSDB:: +* Automatically add timestamp mapper {es-pull}79136[#79136] +* Create a coordinating node level reader for tsdb {es-pull}79197[#79197] +* Fix TSDB shrink test in multi-version cluster {es-pull}79940[#79940] (issue: {es-issue}79936[#79936]) +* Do not allow shadowing metrics or dimensions {es-pull}79757[#79757] + + +[[bug-8.0.0]] +[float] +=== Bug fixes + +Aggregations:: +* Fix BWC issues for `x_pack/usage` {es-pull}55181[#55181] (issue: {es-issue}54847[#54847]) +* Fix `DoubleBounds` null serialization {es-pull}59475[#59475] +* Fix `TopHitsAggregationBuilder` adding duplicate `_score` sort clauses {es-pull}42179[#42179] (issue: {es-issue}42154[#42154]) +* Fix `t_test` usage stats {es-pull}54753[#54753] (issue: {es-issue}54744[#54744]) +* Throw exception if legacy interval cannot be parsed in `DateIntervalWrapper` {es-pull}41972[#41972] (issue: {es-issue}41970[#41970]) + +Autoscaling:: +* Autoscaling use adjusted total memory {es-pull}80528[#80528] (issue: {es-issue}78750[#78750]) + +CCR:: +* Fix `AutoFollow` version checks {es-pull}73776[#73776] (issue: {es-issue}72935[#72935]) + +Cluster Coordination:: +* Apply cluster states in system context {es-pull}53785[#53785] (issue: {es-issue}53751[#53751]) + +Data streams:: +* Prohibit restoring a data stream alias with a conflicting write data stream {es-pull}81217[#81217] (issue: {es-issue}80976[#80976]) + +Distributed:: +* Introduce `?wait_for_active_shards=index-setting` {es-pull}67158[#67158] (issue: {es-issue}66419[#66419]) +* Fixes to task result index mapping {es-pull}50359[#50359] (issue: {es-issue}50248[#50248]) + +Features/CAT APIs:: +* Fix cat recovery display of bytes fields {es-pull}40379[#40379] (issue: {es-issue}40335[#40335]) + +Features/ILM+SLM:: +* Ensuring that the `ShrinkAction` does not hang if total shards per node is too low {es-pull}76732[#76732] (issue: {es-issue}44070[#44070]) +* Less verbose serialization of snapshot failure in SLM metadata {es-pull}80942[#80942] (issue: {es-issue}77466[#77466]) + +Features/Indices APIs:: +* Fix `ComposableIndexTemplate` equals when `composed_of` is null {es-pull}80864[#80864] + +Features/Java High Level REST Client:: +* The Java High Level Rest Client (HLRC) has been removed and replaced by a new +{es} Java client. For migration steps, refer to +{java-api-client}/migrate-hlrc.html[Migrate from the High Level Rest Client]. + +Geo:: +* Preprocess polygon rings before processing it for decomposition {es-pull}59501[#59501] (issues: {es-issue}54441[#54441], {es-issue}59386[#59386]) + +Infra/Core:: +* Add searchable snapshot cache folder to `NodeEnvironment` {es-pull}66297[#66297] (issue: {es-issue}65725[#65725]) +* CLI tools: Write errors to stderr instead of stdout {es-pull}45586[#45586] (issue: {es-issue}43260[#43260]) +* Precompute `ParsedMediaType` for XContentType {es-pull}67409[#67409] +* Prevent stack overflow in rounding {es-pull}80450[#80450] + +Infra/Logging:: +* Fix NPE when logging null values in JSON {es-pull}53715[#53715] (issue: {es-issue}46702[#46702]) +* Fix stats in slow logs to be a escaped JSON {es-pull}44642[#44642] +* Populate data stream fields when `xOpaqueId` not provided {es-pull}62156[#62156] + +Infra/REST API:: +* Do not allow spaces within `MediaType's` parameters {es-pull}64650[#64650] (issue: {es-issue}51816[#51816]) +* Handle incorrect header values {es-pull}64708[#64708] (issues: {es-issue}51816[#51816], {es-issue}64689[#64689]) +* Ignore media ranges when parsing {es-pull}64721[#64721] (issues: {es-issue}51816[#51816], {es-issue}64689[#64689]) +* `RestController` should not consume request content {es-pull}44902[#44902] (issue: {es-issue}37504[#37504]) +* Handle exceptions thrown from `RestCompatibleVersionHelper` {es-pull}80253[#80253] (issues: {es-issue}78214[#78214], {es-issue}79060[#79060]) + +Infra/Scripting:: +* Change compound assignment structure to support string concatenation {es-pull}61825[#61825] +* Fixes casting in constant folding {es-pull}61508[#61508] +* Several minor Painless fixes {es-pull}61594[#61594] +* Fix duplicated allow lists upon script engine creation {es-pull}82820[#82820] (issue: {es-issue}82778[#82778]) + +Infra/Settings:: +* Stricter `UpdateSettingsRequest` parsing on the REST layer {es-pull}79227[#79227] (issue: {es-issue}29268[#29268]) +* Set Auto expand replica on deprecation log data stream {es-pull}79226[#79226] (issue: {es-issue}78991[#78991]) + +Ingest:: +* Adjust default geoip logging to be less verbose {es-pull}81404[#81404] (issue: {es-issue}81356[#81356]) + +Machine Learning:: +* Add timeout parameter for delete trained models API {es-pull}79739[#79739] (issue: {es-issue}77070[#77070]) +* Tone down ML unassigned job notifications {es-pull}79578[#79578] (issue: {es-issue}79270[#79270]) +* Use a new annotations index for future annotations {es-pull}79006[#79006] (issue: {es-issue}78439[#78439]) +* Set model state compatibility version to 8.0.0 {ml-pull}2139[#2139] +* Check that `total_definition_length` is consistent before starting a deployment {es-pull}80553[#80553] +* Fail inference processor more consistently on certain error types {es-pull}81475[#81475] +* Optimize the job stats call to do fewer searches {es-pull}82362[#82362] (issue: {es-issue}82255[#82255]) + +Mapping:: +* Remove assertions that mappings have one top-level key {es-pull}58779[#58779] (issue: {es-issue}58521[#58521]) + +Packaging:: +* Suppress illegal access in plugin install {es-pull}41620[#41620] (issue: {es-issue}41478[#41478]) + +Recovery:: +* Make shard started response handling only return after the cluster state update completes {es-pull}82790[#82790] (issue: {es-issue}81628[#81628]) + +SQL:: +* Introduce dedicated node for `HAVING` declaration {es-pull}71279[#71279] (issue: {es-issue}69758[#69758]) +* Make `RestSqlQueryAction` thread-safe {es-pull}69901[#69901] + +Search:: +* Check for negative `from` values in search request body {es-pull}54953[#54953] (issue: {es-issue}54897[#54897]) +* Fix `VectorsFeatureSetUsage` serialization in BWC mode {es-pull}55399[#55399] (issue: {es-issue}55378[#55378]) +* Handle total hits equal to `track_total_hits` {es-pull}37907[#37907] (issue: {es-issue}37897[#37897]) +* Improve error msg for CCS request on node without remote cluster role {es-pull}60351[#60351] (issue: {es-issue}59683[#59683]) +* Remove unsafe assertion in wildcard field {es-pull}78966[#78966] + +Security:: +* Allow access to restricted system indices for reserved system roles {es-pull}76845[#76845] + +Snapshot/Restore:: +* Fix `GET /_snapshot/_all/_all` if there are no repos {es-pull}43558[#43558] (issue: {es-issue}43547[#43547]) +* Don't fill stack traces in `SnapshotShardFailure` {es-pull}80009[#80009] (issue: {es-issue}79718[#79718]) +* Remove custom metadata if there is nothing to restore {es-pull}81373[#81373] (issues: {es-issue}81247[#81247], {es-issue}82019[#82019]) + +[[regression-8.0.0]] +[float] +=== Regressions + +Search:: +* Disable numeric sort optimization conditionally {es-pull}78103[#78103] + +[[upgrade-8.0.0]] +[float] +=== Upgrades + +Authentication:: +* Upgrade to UnboundID LDAP SDK v6.0.2 {es-pull}79332[#79332] + +Infra/Logging:: +* Upgrade ECS logging layout to latest version {es-pull}80500[#80500] + +Search:: +* Upgrade to Lucene 9 {es-pull}81426[#81426] + +Security:: +* Update to OpenSAML 4 {es-pull}77012[#77012] (issue: {es-issue}71983[#71983]) + +Snapshot/Restore:: +* Upgrade repository-hdfs plugin to Hadoop 3 {es-pull}76897[#76897] diff --git a/docs/reference/release-notes/8.1.asciidoc b/docs/reference/release-notes/8.1.asciidoc new file mode 100644 index 0000000000000..adb61d997228d --- /dev/null +++ b/docs/reference/release-notes/8.1.asciidoc @@ -0,0 +1,284 @@ +[[release-notes-8.1.0]] +== {es} version 8.1.0 + +coming[8.1.0] + +Also see <>. + +[[breaking-8.1.0]] +[float] +=== Breaking changes + +Geo:: +* Fields API should return normalize geometries {es-pull}80649[#80649] (issues: {es-issue}79232[#79232], {es-issue}63739[#63739]) + +[[bug-8.1.0]] +[float] +=== Bug fixes + +Aggregations:: +* Fix: reduce float and half-float values to their stored precision {es-pull}83213[#83213] +* Reenable `BooleanTermsIT` {es-pull}83421[#83421] (issue: {es-issue}83351[#83351]) + +Allocation:: +* Fix `updateMinNode` condition {es-pull}80403[#80403] (issue: {es-issue}41194[#41194]) +* Make `*.routing.allocation.*` list-based setting {es-pull}80420[#80420] (issue: {es-issue}77773[#77773]) +* Permit metadata updates on flood-stage-blocked indices {es-pull}81781[#81781] +* Reroute after cluster recovery {es-pull}82856[#82856] (issue: {es-issue}82456[#82456]) + +Authorization:: +* Capture anonymous roles when creating API keys {es-pull}81427[#81427] (issue: {es-issue}81024[#81024]) +* Extend fleet-server service account privileges {es-pull}82600[#82600] + +Autoscaling:: +* Fix autoscaling of follower data streams {es-pull}83302[#83302] (issue: {es-issue}82857[#82857]) + +Geo:: +* Handle bounds properly when grid tiles crosses the dateline {es-pull}83348[#83348] (issue: {es-issue}83299[#83299]) + +Infra/Core:: +* Copy `trace.id` in threadcontext stash {es-pull}83218[#83218] + +Infra/Scripting:: +* Fix duplicated allow lists upon script engine creation {es-pull}82820[#82820] (issue: {es-issue}82778[#82778]) +* Fix plumbing in double and keyword runtime fields for the scripting fields API {es-pull}83392[#83392] + +Machine Learning:: +* Correctly capture min stats for `inference.ingest_processors` in ML usage {es-pull}82352[#82352] +* Fail queued inference requests with cause if the process crashes {es-pull}81584[#81584] +* Fix NLP tokenization `never_split` handling around punctuation {es-pull}82982[#82982] +* Fix `ZeroShotClassificationConfig` update mixing fields {es-pull}82848[#82848] +* Fixes `categorize_text` parameter validation to be parse order independent {es-pull}82628[#82628] (issue: {es-issue}82629[#82629]) +* Return `zxx` for `lang_ident_model_1` if no valid text is found for language identification {es-pull}82746[#82746] (issue: {es-issue}81933[#81933]) +* Validate vocabulary on model deployment {es-pull}81548[#81548] (issue: {es-issue}81470[#81470]) + +Mapping:: +* Add support for sub-fields to `search_as_you_type` fields {es-pull}82430[#82430] (issue: {es-issue}56326[#56326]) +* Better exception message for `MappingParser.parse` {es-pull}80696[#80696] + +Network:: +* Throw `NoSeedNodeLeftException` on proxy failure {es-pull}80961[#80961] (issue: {es-issue}80898[#80898]) + +Recovery:: +* Add missing `indices.recovery.internal_action_retry_timeout` to list of settings {es-pull}83354[#83354] +* Add missing max overcommit factor to list of (dynamic) settings {es-pull}83350[#83350] + +SQL:: +* Fix txt format for empty result sets {es-pull}83376[#83376] + +Search:: +* Returns valid PIT when no index matched {es-pull}83424[#83424] + +Security:: +* Add validation for API key role descriptors {es-pull}82049[#82049] (issue: {es-issue}67311[#67311]) + +Snapshot/Restore:: +* Adjust `LinuxFileSystemNatives.allocatedSizeInBytes` for aarch64 architectures {es-pull}81376[#81376] (issues: {es-issue}80437[#80437], {es-issue}81362[#81362]) +* Distinguish "missing repository" from "missing repository plugin" {es-pull}82457[#82457] (issue: {es-issue}81758[#81758]) +* Move get snapshots serialization to management pool {es-pull}83215[#83215] + +TSDB:: +* Fix time series timestamp meta missing {es-pull}80695[#80695] + +Transform:: +* Fix NPE in transform version check {es-pull}81756[#81756] +* Fix condition on which the transform stops processing buckets {es-pull}82852[#82852] +* Prevent stopping of transforms due to threadpool limitation {es-pull}81912[#81912] (issue: {es-issue}81796[#81796]) + +[[deprecation-8.1.0]] +[float] +=== Deprecations + +CRUD:: +* Bulk actions JSON must be well-formed {es-pull}78876[#78876] (issue: {es-issue}43774[#43774]) + +Cluster Coordination:: +* Remove last few mentions of Zen discovery {es-pull}80410[#80410] + +[[enhancement-8.1.0]] +[float] +=== Enhancements + +Aggregations:: +* Add an aggregator for IPv4 and IPv6 subnets {es-pull}82410[#82410] +* Fail shards early when we can detect a type missmatch {es-pull}79869[#79869] (issue: {es-issue}72276[#72276]) +* Optimize `significant_text` aggregation to only parse the field it requires from `_source` {es-pull}79651[#79651] + +Allocation:: +* Identify other node in `SameShardAllocDec` message {es-pull}82890[#82890] (issue: {es-issue}80767[#80767]) +* Make `AllocationService#adaptAutoExpandReplicas` Faster {es-pull}83092[#83092] +* Speed up same host check {es-pull}80767[#80767] + +Analysis:: +* Expose Japanese completion filter to kuromoji analysis plugin {es-pull}81858[#81858] + +Authentication:: +* Enable `run_as` for all authentication schemes {es-pull}79809[#79809] +* Return API key name in `_authentication` response {es-pull}78946[#78946] (issue: {es-issue}70306[#70306]) + +Authorization:: +* Avoid loading authorized indices when requested indices are all concrete names {es-pull}81237[#81237] +* Optimize DLS bitset building for `matchAll` query {es-pull}81030[#81030] (issue: {es-issue}80904[#80904]) + +Cluster Coordination:: +* Add detail to slow cluster state warning message {es-pull}83221[#83221] +* Batch Index Settings Update Requests {es-pull}82896[#82896] (issue: {es-issue}79866[#79866]) +* Improve node-join task descriptions {es-pull}80090[#80090] +* Make `PeerFinder` log messages happier {es-pull}83222[#83222] +* More compact serialization of metadata {es-pull}82608[#82608] (issue: {es-issue}77466[#77466]) +* Paginate persisted cluster state {es-pull}78875[#78875] +* Reduce verbosity-increase timeout to 3 minutes {es-pull}81118[#81118] +* Use network recycler for publications {es-pull}80650[#80650] (issue: {es-issue}80111[#80111]) + +Data streams:: +* Defer reroute when autocreating datastream {es-pull}82412[#82412] (issue: {es-issue}82159[#82159]) + +ILM+SLM:: +* Expose the index age in ILM explain output {es-pull}81273[#81273] (issue: {es-issue}64429[#64429]) + +Indices APIs:: +* Batch auto create index cluster state updates {es-pull}82159[#82159] +* Expose 'features' option in Get Index API {es-pull}83083[#83083] (issue: {es-issue}82948[#82948]) +* Expose index health and status to the `_stats` API {es-pull}81954[#81954] (issue: {es-issue}80413[#80413]) +* Force merge REST API support `wait_for_completion` {es-pull}80463[#80463] (issues: {es-issue}80129[#80129], {es-issue}80129[#80129]) + +Infra/Circuit Breakers:: +* Allow dynamically changing the `use_real_memory` setting {es-pull}78288[#78288] (issue: {es-issue}77324[#77324]) + +Infra/Core:: +* Use `VarHandles` for number conversions {es-pull}80367[#80367] (issue: {es-issue}78823[#78823]) +* Use `VarHandles` in `ByteUtils` {es-pull}80442[#80442] (issue: {es-issue}78823[#78823]) +* `FilterPathBasedFilter` support match fieldname with dot {es-pull}83178[#83178] (issues: {es-issue}83148[#83148], {es-issue}83152[#83152]) + +Infra/REST API:: +* Allow for customised content-type validation {es-pull}80906[#80906] (issue: {es-issue}80482[#80482]) + +Infra/Scripting:: +* Add '$' syntax as a shortcut for 'field' in Painless {es-pull}80518[#80518] +* Add `BinaryDocValuesField` to replace `BytesRef` `(ScriptDocValues)` {es-pull}79760[#79760] +* Add a geo point field for the scripting fields api {es-pull}81395[#81395] +* Add date fields to the scripting fields api {es-pull}81272[#81272] +* Add half float mapping to the scripting fields API {es-pull}82294[#82294] +* Add scaled float to the scripting fields API {es-pull}82275[#82275] +* Add support for `GeoShape` to the scripting fields API {es-pull}81617[#81617] +* Fields API for IP mapped type {es-pull}81396[#81396] +* Fields API for byte, double, float, integer, long, short {es-pull}81126[#81126] (issue: {es-issue}79105[#79105]) +* Fields API for flattened mapped type {es-pull}82590[#82590] +* Fields API for x-pack `constant_keyword` {es-pull}82292[#82292] +* Fields API for x-pack version, doc version, seq no, mumur3 {es-pull}81476[#81476] +* Improve support for joda datetime to java datetime in Painless {es-pull}83099[#83099] +* Keyword fields API support {es-pull}81266[#81266] +* Make wildcard accessible from the scripting field API {es-pull}82763[#82763] +* Ordinal field data plumbing {es-pull}80970[#80970] (issue: {es-issue}79105[#79105]) +* Support boolean fields in Fields API {es-pull}80043[#80043] (issue: {es-issue}79105[#79105]) +* Time series compile and cache evict metrics {es-pull}79078[#79078] (issue: {es-issue}62899[#62899]) + +Infra/Settings:: +* Optimize duplicated code block in `MetadataUpdateSettingsService` {es-pull}82048[#82048] + +Machine Learning:: +* Add ability to update the truncation option at inference {es-pull}80267[#80267] +* Add error counts to trained model stats {es-pull}82705[#82705] +* Add latest search interval to datafeed stats {es-pull}82620[#82620] (issue: {es-issue}82405[#82405]) +* Adds new MPNet tokenization for NLP models {es-pull}82234[#82234] +* Force delete trained models {es-pull}80595[#80595] +* Improve error message on starting scrolling datafeed with no matching indices {es-pull}81069[#81069] (issue: {es-issue}81013[#81013]) +* Report thread settings per node for trained model deployments {es-pull}81723[#81723] (issue: {es-issue}81149[#81149]) +* Set default value of 30 days for model prune window {es-pull}81377[#81377] +* Track token positions and use source string to tag NER entities {es-pull}81275[#81275] +* Warn when creating job with an unusual bucket span {es-pull}82145[#82145] (issue: {es-issue}81645[#81645]) + +Mapping:: +* Allow doc-values only search on geo_point fields {es-pull}83395[#83395] +* Implement all queries on doc-values only keyword fields {es-pull}83404[#83404] +* Optimize source filtering in `SourceFieldMapper` {es-pull}81970[#81970] (issues: {es-issue}77154[#77154], {es-issue}81575[#81575]) + +Monitoring:: +* Add Enterprise Search monitoring index templates {es-pull}82743[#82743] + +Network:: +* Report close connection exceptions at INFO {es-pull}81768[#81768] (issues: {es-issue}51612[#51612], {es-issue}66473[#66473]) +* Serialize outbound messages on netty buffers {es-pull}80111[#80111] +* Track histogram of transport handling times {es-pull}80581[#80581] (issue: {es-issue}80428[#80428]) + +Recovery:: +* Adjust `indices.recovery.max_bytes_per_sec` according to external settings {es-pull}82819[#82819] + +SQL:: +* Extend Tableau connector to reconnect with catalog {es-pull}81321[#81321] + +Search:: +* Add `scripted_metric` agg context to `unsigned_long` {es-pull}64422[#64422] (issue: {es-issue}64347[#64347]) +* Add field usage support for vectors {es-pull}80608[#80608] +* Allow doc-values only search on boolean fields {es-pull}82925[#82925] (issues: {es-issue}82409[#82409], {es-issue}81210[#81210], {es-issue}52728[#52728]) +* Allow doc-values only search on date types {es-pull}82602[#82602] (issues: {es-issue}82409[#82409], {es-issue}81210[#81210], {es-issue}52728[#52728]) +* Allow doc-values only search on ip fields {es-pull}82929[#82929] (issues: {es-issue}82409[#82409], {es-issue}81210[#81210], {es-issue}52728[#52728]) +* Allow doc-values only search on keyword fields {es-pull}82846[#82846] (issues: {es-issue}82409[#82409], {es-issue}81210[#81210], {es-issue}52728[#52728]) +* Allow doc-values only search on number types {es-pull}82409[#82409] (issues: {es-issue}81210[#81210], {es-issue}52728[#52728]) +* Rewrite `match` and `match_phrase` queries to `term` queries on `keyword` fields {es-pull}82612[#82612] (issue: {es-issue}82515[#82515]) +* Short cut if reader has point values {es-pull}80268[#80268] +* Support combining `_shards` preference param with `` {es-pull}80024[#80024] (issue: {es-issue}80021[#80021]) + +Security:: +* Activate user profile API {es-pull}82400[#82400] +* Add an initial `ProfileService` for user profiles {es-pull}81899[#81899] +* Add new system index for user profile documents {es-pull}81355[#81355] +* Add update user profile data API {es-pull}82772[#82772] +* Add user profile API for get profile by UID {es-pull}81910[#81910] +* Update Kibana system user privileges {es-pull}82781[#82781] + +Snapshot/Restore:: +* Add Linux x86-64bits native method to retrieve the number of allocated bytes on disk for a file {es-pull}80437[#80437] (issue: {es-issue}79698[#79698]) + +Stats:: +* Add index pressure stats in cluster stats {es-pull}80303[#80303] (issue: {es-issue}79788[#79788]) +* Optimize `getIndices` in `IndicesSegmentResponse` {es-pull}80064[#80064] +* Speed up `MappingStats` Computation on Coordinating Node {es-pull}82830[#82830] + +TSDB:: +* Add `_tsid` field to `time_series` indices {es-pull}80276[#80276] +* Make time boundaries settings required in TSDB indices {es-pull}81146[#81146] + +Transform:: +* Introduce `deduce_mappings` transform setting {es-pull}82256[#82256] (issue: {es-issue}82559[#82559]) +* Make it possible to clear retention policy on an existing transform {es-pull}82703[#82703] (issue: {es-issue}82560[#82560]) +* Report transforms without config as erroneous {es-pull}81141[#81141] (issue: {es-issue}80955[#80955]) + +[[feature-8.1.0]] +[float] +=== New features + +Authentication:: +* Initial version of JWT Realm {es-pull}82175[#82175] +* Introduce domain setting to associate realms {es-pull}81968[#81968] + +Distributed:: +* Add desired nodes API {es-pull}82975[#82975] + +Geo:: +* New `GeoHexGrid` aggregation {es-pull}82924[#82924] + +Health:: +* Model for the new health reporting api {es-pull}83398[#83398] + +TSDB:: +* Handle `fields.with.dots` in `routing_path` {es-pull}83148[#83148] + +Transform:: +* Add transform reset API {es-pull}79828[#79828] (issue: {es-issue}75768[#75768]) + +[[upgrade-8.1.0]] +[float] +=== Upgrades + +Geo:: +* Update vector tiles google protobuf to 3.16.1 {es-pull}83402[#83402] + +Network:: +* Upgrade to Netty 4.1.73 {es-pull}82844[#82844] + +Packaging:: +* Bump bundled JDK to 17.0.2+8 {es-pull}83243[#83243] (issue: {es-issue}83242[#83242]) + + + diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index 9e01f7d5c6343..c750abb7c3ba1 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -398,8 +398,8 @@ end::ignore_throttled[] tag::index-ignore-unavailable[] `ignore_unavailable`:: -(Optional, Boolean) If `true`, missing or closed indices are not included in the -response. Defaults to `false`. +(Optional, Boolean) If `false`, the request returns an error if it targets a +missing or closed index. Defaults to `false`. end::index-ignore-unavailable[] tag::include-defaults[] diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index 532b814ad5619..04f014b75054b 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -31,7 +31,8 @@ not be included yet. * <> * <> * <> -* <> +* <> +* <> * <> * <> * <> @@ -72,6 +73,7 @@ include::{es-repo-dir}/ingest/apis/index.asciidoc[] include::info.asciidoc[] include::{es-repo-dir}/licensing/index.asciidoc[] include::{xes-repo-dir}/rest-api/logstash/index.asciidoc[] +include::{es-repo-dir}/ml/common/apis/index.asciidoc[] include::{es-repo-dir}/ml/anomaly-detection/apis/index.asciidoc[] include::{es-repo-dir}/ml/df-analytics/apis/index.asciidoc[] include::{es-repo-dir}/ml/trained-models/apis/index.asciidoc[] diff --git a/docs/reference/rest-api/info.asciidoc b/docs/reference/rest-api/info.asciidoc index e4d533c8378d3..5292b6e8967cb 100644 --- a/docs/reference/rest-api/info.asciidoc +++ b/docs/reference/rest-api/info.asciidoc @@ -81,6 +81,10 @@ Example response: "available" : true, "enabled" : true }, + "archive" : { + "available" : true, + "enabled" : true + }, "enrich" : { "available" : true, "enabled" : true diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index 786a21f576423..13773b02fe417 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -395,6 +395,11 @@ GET /_xpack/usage "aggregate_metric" : { "available" : true, "enabled" : true + }, + "archive" : { + "available" : true, + "enabled" : true, + "indices_count" : 0 } } ------------------------------------------------------------ diff --git a/docs/reference/search/field-caps.asciidoc b/docs/reference/search/field-caps.asciidoc index 7cbb583bab800..580553b027fa2 100644 --- a/docs/reference/search/field-caps.asciidoc +++ b/docs/reference/search/field-caps.asciidoc @@ -77,6 +77,15 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailab (Optional, Boolean) If `true`, unmapped fields are included in the response. Defaults to `false`. +`filters`:: +(Optional, string) Comma-separated list of filters to apply to the response. The +following filters are supported: +metadata,-metadata,-parent,-nested,-multifield + +`types`:: +(Optional, string) Comma-separated list of field types to include. Any fields that +do not match one of these types will be excluded from the results. Defaults to empty, +meaning that all field types are returned. + [[search-field-caps-api-request-body]] ==== {api-request-body-title} diff --git a/docs/reference/setup/install/check-running.asciidoc b/docs/reference/setup/install/check-running.asciidoc index 7b4c176a27fd6..8dae6f2927b54 100644 --- a/docs/reference/setup/install/check-running.asciidoc +++ b/docs/reference/setup/install/check-running.asciidoc @@ -5,7 +5,7 @@ You can test that your {es} node is running by sending an HTTPS request to port ["source","sh",subs="attributes"] ---- -curl --cacert {os-dir}{slash}certs{slash}http_ca.crt -u elastic https://localhost:9200 <1> +curl --cacert {os-dir}{slash}config{slash}certs{slash}http_ca.crt -u elastic https://localhost:9200 <1> ---- // NOTCONSOLE <1> Ensure that you use `https` in your call, or the request will fail. diff --git a/docs/reference/setup/install/connect-clients.asciidoc b/docs/reference/setup/install/connect-clients.asciidoc index 19c3d556b1d5e..dcc40ac385a99 100644 --- a/docs/reference/setup/install/connect-clients.asciidoc +++ b/docs/reference/setup/install/connect-clients.asciidoc @@ -26,7 +26,7 @@ path is to the auto-generated CA certificate for the HTTP layer. [source,sh] ---- -openssl x509 -fingerprint -sha256 -in config/tls_auto_config_/http_ca.crt +openssl x509 -fingerprint -sha256 -in config/certs/http_ca.crt ---- ``:: The timestamp of when the auto-configuration process created the security files directory. @@ -45,6 +45,6 @@ SHA256 Fingerprint= If your library doesn't support a method of validating the fingerprint, the auto-generated CA certificate is created in the -`config/tls_auto_config_` directory on each {es} node. Copy the +`config/certs` directory on each {es} node. Copy the `http_ca.crt` file to your machine and configure your client to use this -certificate to establish trust when it connects to {es}. \ No newline at end of file +certificate to establish trust when it connects to {es}. diff --git a/docs/reference/setup/install/docker.asciidoc b/docs/reference/setup/install/docker.asciidoc index 43375b63c6619..99bfeeb223391 100644 --- a/docs/reference/setup/install/docker.asciidoc +++ b/docs/reference/setup/install/docker.asciidoc @@ -249,7 +249,7 @@ Settings (Windows). Create the following configuration files in a new, empty directory. These files are also available from the -https://github.com/elastic/elasticsearch/tree/master/docs/reference/setup/install[elasticsearch] +https://github.com/elastic/elasticsearch/tree/master/docs/reference/setup/install/docker[elasticsearch] repository on GitHub. -- @@ -276,7 +276,7 @@ referenced by the `docker-compose.yml` file. ["source","txt",subs="attributes"] ---- -include::.env[] +include::docker/.env[] ---- [discrete] @@ -297,7 +297,7 @@ then only be accessible from the host machine itself. [source,yaml,subs="attributes"] ---- -include::docker-compose.yml[] +include::docker/docker-compose.yml[] ---- endif::[] diff --git a/docs/reference/setup/install/.env b/docs/reference/setup/install/docker/.env similarity index 100% rename from docs/reference/setup/install/.env rename to docs/reference/setup/install/docker/.env diff --git a/docs/reference/setup/install/docker-compose.yml b/docs/reference/setup/install/docker/docker-compose.yml similarity index 95% rename from docs/reference/setup/install/docker-compose.yml rename to docs/reference/setup/install/docker/docker-compose.yml index 3d8b92ffcce58..bb99575d7eefd 100644 --- a/docs/reference/setup/install/docker-compose.yml +++ b/docs/reference/setup/install/docker/docker-compose.yml @@ -2,7 +2,7 @@ version: "2.2" services: setup: - image: docker.elastic.co/elasticsearch/elasticsearch:{version} + image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION} volumes: - certs:/usr/share/elasticsearch/config/certs user: "0" @@ -66,7 +66,7 @@ services: depends_on: setup: condition: service_healthy - image: {docker-repo}:{version} + image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION} volumes: - certs:/usr/share/elasticsearch/config/certs - esdata01:/usr/share/elasticsearch/data @@ -109,7 +109,7 @@ services: es02: depends_on: - es01 - image: {docker-repo}:{version} + image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION} volumes: - certs:/usr/share/elasticsearch/config/certs - esdata02:/usr/share/elasticsearch/data @@ -149,7 +149,7 @@ services: es03: depends_on: - es02 - image: {docker-repo}:{version} + image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION} volumes: - certs:/usr/share/elasticsearch/config/certs - esdata03:/usr/share/elasticsearch/data @@ -194,7 +194,7 @@ services: condition: service_healthy es03: condition: service_healthy - image: docker.elastic.co/kibana/kibana:{version} + image: docker.elastic.co/kibana/kibana:${STACK_VERSION} volumes: - certs:/usr/share/kibana/config/certs - kibanadata:/usr/share/kibana/data diff --git a/docs/reference/snapshot-restore/apis/clone-snapshot-api.asciidoc b/docs/reference/snapshot-restore/apis/clone-snapshot-api.asciidoc index 778bb55a2f267..1ff77c8af886e 100644 --- a/docs/reference/snapshot-restore/apis/clone-snapshot-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/clone-snapshot-api.asciidoc @@ -55,4 +55,4 @@ fails and returns an error. Defaults to `30s`. `indices`:: (Required, string) A comma-separated list of indices to include in the snapshot. -<> is supported. \ No newline at end of file +<> is supported. \ No newline at end of file diff --git a/docs/reference/snapshot-restore/apis/create-snapshot-api.asciidoc b/docs/reference/snapshot-restore/apis/create-snapshot-api.asciidoc index 2821db95dbb07..3242fd2ee80f0 100644 --- a/docs/reference/snapshot-restore/apis/create-snapshot-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/create-snapshot-api.asciidoc @@ -113,7 +113,7 @@ include::restore-snapshot-api.asciidoc[tag=cluster-state-contents] `indices`:: (Optional, string or array of strings) Comma-separated list of data streams and indices to include in the snapshot. -Supports <>. Defaults to an empty array +Supports <>. Defaults to an empty array (`[]`), which includes all regular data streams and regular indices. To exclude all data streams and indices, use `-*`. + @@ -132,8 +132,9 @@ If `include_global_state` is `true`, the snapshot includes all feature states by default. If `include_global_state` is `false`, the snapshot includes no feature states by default. + -To exclude all feature states, regardless of the `include_global_state` value, -specify an empty array (`[]`) or `none`. +Note that specifying an empty array will result in the default behavior. To +exclude all feature states, regardless of the `include_global_state` value, +specify an array with only the value `none` (`["none"]`). `metadata`:: (Optional, object) diff --git a/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc b/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc index 0b77795540a14..4d578b3df489d 100644 --- a/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc @@ -91,7 +91,7 @@ Repository type. Other repository types are available through official plugins: -`hfds`:: {plugins}/repository-hdfs.html[Hadoop Distributed File System (HDFS) repository] +`hdfs`:: {plugins}/repository-hdfs.html[Hadoop Distributed File System (HDFS) repository] ==== [[put-snapshot-repo-api-settings-param]] diff --git a/docs/reference/snapshot-restore/apis/restore-snapshot-api.asciidoc b/docs/reference/snapshot-restore/apis/restore-snapshot-api.asciidoc index e86434ac7a260..5c1b38e779880 100644 --- a/docs/reference/snapshot-restore/apis/restore-snapshot-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/restore-snapshot-api.asciidoc @@ -182,8 +182,10 @@ state then the restore request will fail. + If `include_global_state` is `true`, the request restores all feature states in the snapshot by default. If `include_global_state` is `false`, the request -restores no feature states by default. To restore no feature states, regardless -of the `include_global_state` value, specify an empty array (`[]`). +restores no feature states by default. Note that specifying an empty array +will result in the default behavior. To restore no feature states, regardless +of the `include_global_state` value, specify an array containing only the value +`none` (`["none"]`). [[restore-snapshot-api-index-settings]] `index_settings`:: diff --git a/docs/reference/snapshot-restore/repository-gcs.asciidoc b/docs/reference/snapshot-restore/repository-gcs.asciidoc index 3a036e1487972..37dfe2add0b18 100644 --- a/docs/reference/snapshot-restore/repository-gcs.asciidoc +++ b/docs/reference/snapshot-restore/repository-gcs.asciidoc @@ -191,6 +191,16 @@ are marked as `Secure`. can be specified explicitly. For example, it can be used to switch between projects when the same credentials are usable for both the production and the development projects. +`proxy.host`:: + Host name of a proxy to connect to the Google Cloud Storage through. + +`proxy.port`:: + Port of a proxy to connect to the Google Cloud Storage through. + +`proxy.type`:: + Proxy type for the client. Supported values are `direct` (no proxy), + `http`, and `socks`. Defaults to `direct`. + [[repository-gcs-repository]] ==== Repository settings diff --git a/docs/reference/sql/functions/conditional.asciidoc b/docs/reference/sql/functions/conditional.asciidoc index d7c53e94c4bbf..f70e6de9e6925 100644 --- a/docs/reference/sql/functions/conditional.asciidoc +++ b/docs/reference/sql/functions/conditional.asciidoc @@ -86,10 +86,12 @@ E.g.: for the following query: [source, sql] +---- CASE WHEN a = 1 THEN null WHEN a > 2 THEN 10 WHEN a > 5 THEN 'foo' END +---- an error message would be returned, mentioning that *'foo'* is of data type *keyword*, which does not match the expected data type *integer* (based on result *10*). @@ -105,6 +107,7 @@ interesting than every single value, CASE can create custom buckets as in the following example: [source, sql] +---- SELECT count(*) AS count, CASE WHEN NVL(languages, 0) = 0 THEN 'zero' WHEN languages = 1 THEN 'one' @@ -115,7 +118,7 @@ SELECT count(*) AS count, FROM employees GROUP BY lang_skills ORDER BY lang_skills; - +---- With this query, one can create normal grouping buckets for values _0, 1, 2, 3_ with descriptive names, and every value _>= 4_ falls into the _multilingual_ bucket. @@ -282,7 +285,9 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[iifWithoutDefaultValue] expression. E.g.: [source, sql] +---- IIF(a = 1, 'one', IIF(a = 2, 'two', IIF(a = 3, 'three', 'many'))) +---- ================= diff --git a/docs/reference/sql/language/indices.asciidoc b/docs/reference/sql/language/indices.asciidoc index 4a8fb2325352e..1f986bfea8c70 100644 --- a/docs/reference/sql/language/indices.asciidoc +++ b/docs/reference/sql/language/indices.asciidoc @@ -6,7 +6,7 @@ [[sql-index-patterns-multi]] [discrete] -==== {es} multi-index +==== {es} multi-target syntax The {es} notation for enumerating, including or excluding <> is supported _as long_ as it is quoted or escaped as a table identifier. diff --git a/docs/reference/sql/limitations.asciidoc b/docs/reference/sql/limitations.asciidoc index 383e5b1a08edd..fae4e97ff6eec 100644 --- a/docs/reference/sql/limitations.asciidoc +++ b/docs/reference/sql/limitations.asciidoc @@ -4,7 +4,7 @@ [discrete] [[large-parsing-trees]] -=== Large queries may throw `ParsingExpection` +=== Large queries may throw `ParsingException` Extremely large queries can consume too much memory during the parsing phase, in which case the {es-sql} engine will abort parsing and throw an error. In such cases, consider reducing the query to a smaller size by potentially diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 7454180f2ae88..41d9927a4d4fb 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 7cec6af44e192..d5190930b2f32 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionSha256Sum=c9490e938b221daf0094982288e4038deed954a3f12fb54cbf270ddf4e37d879 +distributionSha256Sum=cd5c2958a107ee7f0722004a12d0f8559b4564c34daad7df06cffd4d12a426d0 diff --git a/libs/core/src/main/java/org/elasticsearch/core/MemoizedSupplier.java b/libs/core/src/main/java/org/elasticsearch/core/MemoizedSupplier.java new file mode 100644 index 0000000000000..5167c5759567c --- /dev/null +++ b/libs/core/src/main/java/org/elasticsearch/core/MemoizedSupplier.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.core; + +import java.util.function.Supplier; + +public class MemoizedSupplier implements Supplier { + private Supplier supplier; + private T value; + + public MemoizedSupplier(Supplier supplier) { + this.supplier = supplier; + } + + @Override + public T get() { + if (supplier != null) { + value = supplier.get(); + supplier = null; + } + return value; + } +} diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/DelegatingXContentParser.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/DelegatingXContentParser.java deleted file mode 100644 index 1a87920947db1..0000000000000 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/DelegatingXContentParser.java +++ /dev/null @@ -1,244 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.xcontent; - -import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.core.RestApiVersion; - -import java.io.IOException; -import java.nio.CharBuffer; -import java.util.List; -import java.util.Map; -import java.util.function.Supplier; - -public abstract class DelegatingXContentParser implements XContentParser { - - protected abstract XContentParser delegate(); - - @Override - public XContentType contentType() { - return delegate().contentType(); - } - - @Override - public void allowDuplicateKeys(boolean allowDuplicateKeys) { - delegate().allowDuplicateKeys(allowDuplicateKeys); - } - - @Override - public Token nextToken() throws IOException { - return delegate().nextToken(); - } - - @Override - public void skipChildren() throws IOException { - delegate().skipChildren(); - } - - @Override - public Token currentToken() { - return delegate().currentToken(); - } - - @Override - public String currentName() throws IOException { - return delegate().currentName(); - } - - @Override - public Map map() throws IOException { - return delegate().map(); - } - - @Override - public Map mapOrdered() throws IOException { - return delegate().mapOrdered(); - } - - @Override - public Map mapStrings() throws IOException { - return delegate().mapStrings(); - } - - @Override - public Map map(Supplier> mapFactory, CheckedFunction mapValueParser) - throws IOException { - return delegate().map(mapFactory, mapValueParser); - } - - @Override - public List list() throws IOException { - return delegate().list(); - } - - @Override - public List listOrderedMap() throws IOException { - return delegate().listOrderedMap(); - } - - @Override - public String text() throws IOException { - return delegate().text(); - } - - @Override - public String textOrNull() throws IOException { - return delegate().textOrNull(); - } - - @Override - public CharBuffer charBufferOrNull() throws IOException { - return delegate().charBufferOrNull(); - } - - @Override - public CharBuffer charBuffer() throws IOException { - return delegate().charBuffer(); - } - - @Override - public Object objectText() throws IOException { - return delegate().objectText(); - } - - @Override - public Object objectBytes() throws IOException { - return delegate().objectBytes(); - } - - @Override - public boolean hasTextCharacters() { - return delegate().hasTextCharacters(); - } - - @Override - public char[] textCharacters() throws IOException { - return delegate().textCharacters(); - } - - @Override - public int textLength() throws IOException { - return delegate().textLength(); - } - - @Override - public int textOffset() throws IOException { - return delegate().textOffset(); - } - - @Override - public Number numberValue() throws IOException { - return delegate().numberValue(); - } - - @Override - public NumberType numberType() throws IOException { - return delegate().numberType(); - } - - @Override - public short shortValue(boolean coerce) throws IOException { - return delegate().shortValue(coerce); - } - - @Override - public int intValue(boolean coerce) throws IOException { - return delegate().intValue(coerce); - } - - @Override - public long longValue(boolean coerce) throws IOException { - return delegate().longValue(coerce); - } - - @Override - public float floatValue(boolean coerce) throws IOException { - return delegate().floatValue(coerce); - } - - @Override - public double doubleValue(boolean coerce) throws IOException { - return delegate().doubleValue(coerce); - } - - @Override - public short shortValue() throws IOException { - return delegate().shortValue(); - } - - @Override - public int intValue() throws IOException { - return delegate().intValue(); - } - - @Override - public long longValue() throws IOException { - return delegate().longValue(); - } - - @Override - public float floatValue() throws IOException { - return delegate().floatValue(); - } - - @Override - public double doubleValue() throws IOException { - return delegate().doubleValue(); - } - - @Override - public boolean isBooleanValue() throws IOException { - return delegate().isBooleanValue(); - } - - @Override - public boolean booleanValue() throws IOException { - return delegate().booleanValue(); - } - - @Override - public byte[] binaryValue() throws IOException { - return delegate().binaryValue(); - } - - @Override - public XContentLocation getTokenLocation() { - return delegate().getTokenLocation(); - } - - @Override - public T namedObject(Class categoryClass, String name, Object context) throws IOException { - return delegate().namedObject(categoryClass, name, context); - } - - @Override - public NamedXContentRegistry getXContentRegistry() { - return delegate().getXContentRegistry(); - } - - @Override - public boolean isClosed() { - return delegate().isClosed(); - } - - @Override - public RestApiVersion getRestApiVersion() { - return delegate().getRestApiVersion(); - } - - @Override - public DeprecationHandler getDeprecationHandler() { - return delegate().getDeprecationHandler(); - } - - @Override - public void close() throws IOException { - delegate().close(); - } -} diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/DotExpandingXContentParser.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/DotExpandingXContentParser.java index f352143979806..cc0f8a13da37e 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/DotExpandingXContentParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/DotExpandingXContentParser.java @@ -17,9 +17,9 @@ * * A fieldname named {@code "foo.bar.baz":...} will be parsed instead as {@code 'foo':{'bar':{'baz':...}}} */ -public class DotExpandingXContentParser extends FilterXContentParser { +public class DotExpandingXContentParser extends FilterXContentParserWrapper { - private static class WrappingParser extends DelegatingXContentParser { + private static final class WrappingParser extends FilterXContentParser { final Deque parsers = new ArrayDeque<>(); @@ -135,7 +135,7 @@ public Token nextToken() throws IOException { assert expandedTokens < subPaths.length * 2; if (expandedTokens == subPaths.length * 2 - 1) { state = State.PARSING_ORIGINAL_CONTENT; - Token token = in.currentToken(); + Token token = delegate().currentToken(); if (token == Token.START_OBJECT || token == Token.START_ARRAY) { innerLevel++; } @@ -170,7 +170,7 @@ public Token currentToken() { return switch (state) { case EXPANDING_START_OBJECT -> expandedTokens % 2 == 1 ? Token.START_OBJECT : Token.FIELD_NAME; case ENDING_EXPANDED_OBJECT -> Token.END_OBJECT; - case PARSING_ORIGINAL_CONTENT -> in.currentToken(); + case PARSING_ORIGINAL_CONTENT -> delegate().currentToken(); }; } @@ -181,14 +181,14 @@ public String currentName() throws IOException { // whenever we are parsing some inner object/array we can easily delegate to the inner parser // e.g. field.with.dots: { obj:{ parsing here } } if (innerLevel > 0) { - return in.currentName(); + return delegate().currentName(); } Token token = currentToken(); // if we are parsing the outer object/array, only at the start object/array we need to return // e.g. dots instead of field.with.dots otherwise we can simply delegate to the inner parser // which will do the right thing if (innerLevel == 0 && token != Token.START_OBJECT && token != Token.START_ARRAY) { - return in.currentName(); + return delegate().currentName(); } // note that innerLevel can be -1 if there are no inner object/array e.g. field.with.dots: value // as well as while there is and we are parsing their END_OBJECT or END_ARRAY @@ -199,7 +199,7 @@ public String currentName() throws IOException { @Override public void skipChildren() throws IOException { if (state == State.EXPANDING_START_OBJECT) { - in.skipChildren(); + delegate().skipChildren(); state = State.ENDING_EXPANDED_OBJECT; } if (state == State.PARSING_ORIGINAL_CONTENT) { @@ -231,7 +231,7 @@ public boolean booleanValue() throws IOException { return super.booleanValue(); } - private static class SingletonValueXContentParser extends FilterXContentParser { + private static class SingletonValueXContentParser extends FilterXContentParserWrapper { protected SingletonValueXContentParser(XContentParser in) { super(in); diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/FilterXContentParser.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/FilterXContentParser.java index 877c3daeff636..62c93ba91ef45 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/FilterXContentParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/FilterXContentParser.java @@ -18,234 +18,232 @@ import java.util.function.Supplier; /** - * Filters an existing XContentParser by using a delegate + * Delegates every method to the parser returned by the {@link #delegate()} method. + * To be used extended directly when the delegated parser may dynamically changed. + * Extend {@link FilterXContentParserWrapper} instead when the delegate is fixed and can be provided at construction time. */ public abstract class FilterXContentParser implements XContentParser { - protected final XContentParser in; - - protected FilterXContentParser(XContentParser in) { - this.in = in; - } + protected abstract XContentParser delegate(); @Override public XContentType contentType() { - return in.contentType(); + return delegate().contentType(); } @Override public void allowDuplicateKeys(boolean allowDuplicateKeys) { - in.allowDuplicateKeys(allowDuplicateKeys); + delegate().allowDuplicateKeys(allowDuplicateKeys); } @Override public Token nextToken() throws IOException { - return in.nextToken(); + return delegate().nextToken(); } @Override public void skipChildren() throws IOException { - in.skipChildren(); + delegate().skipChildren(); } @Override public Token currentToken() { - return in.currentToken(); + return delegate().currentToken(); } @Override public String currentName() throws IOException { - return in.currentName(); + return delegate().currentName(); } @Override public Map map() throws IOException { - return in.map(); + return delegate().map(); } @Override public Map mapOrdered() throws IOException { - return in.mapOrdered(); + return delegate().mapOrdered(); } @Override public Map mapStrings() throws IOException { - return in.mapStrings(); + return delegate().mapStrings(); } @Override public Map map(Supplier> mapFactory, CheckedFunction mapValueParser) throws IOException { - return in.map(mapFactory, mapValueParser); + return delegate().map(mapFactory, mapValueParser); } @Override public List list() throws IOException { - return in.list(); + return delegate().list(); } @Override public List listOrderedMap() throws IOException { - return in.listOrderedMap(); + return delegate().listOrderedMap(); } @Override public String text() throws IOException { - return in.text(); + return delegate().text(); } @Override public String textOrNull() throws IOException { - return in.textOrNull(); + return delegate().textOrNull(); } @Override public CharBuffer charBufferOrNull() throws IOException { - return in.charBufferOrNull(); + return delegate().charBufferOrNull(); } @Override public CharBuffer charBuffer() throws IOException { - return in.charBuffer(); + return delegate().charBuffer(); } @Override public Object objectText() throws IOException { - return in.objectText(); + return delegate().objectText(); } @Override public Object objectBytes() throws IOException { - return in.objectBytes(); + return delegate().objectBytes(); } @Override public boolean hasTextCharacters() { - return in.hasTextCharacters(); + return delegate().hasTextCharacters(); } @Override public char[] textCharacters() throws IOException { - return in.textCharacters(); + return delegate().textCharacters(); } @Override public int textLength() throws IOException { - return in.textLength(); + return delegate().textLength(); } @Override public int textOffset() throws IOException { - return in.textOffset(); + return delegate().textOffset(); } @Override public Number numberValue() throws IOException { - return in.numberValue(); + return delegate().numberValue(); } @Override public NumberType numberType() throws IOException { - return in.numberType(); + return delegate().numberType(); } @Override public short shortValue(boolean coerce) throws IOException { - return in.shortValue(coerce); + return delegate().shortValue(coerce); } @Override public int intValue(boolean coerce) throws IOException { - return in.intValue(coerce); + return delegate().intValue(coerce); } @Override public long longValue(boolean coerce) throws IOException { - return in.longValue(coerce); + return delegate().longValue(coerce); } @Override public float floatValue(boolean coerce) throws IOException { - return in.floatValue(coerce); + return delegate().floatValue(coerce); } @Override public double doubleValue(boolean coerce) throws IOException { - return in.doubleValue(coerce); + return delegate().doubleValue(coerce); } @Override public short shortValue() throws IOException { - return in.shortValue(); + return delegate().shortValue(); } @Override public int intValue() throws IOException { - return in.intValue(); + return delegate().intValue(); } @Override public long longValue() throws IOException { - return in.longValue(); + return delegate().longValue(); } @Override public float floatValue() throws IOException { - return in.floatValue(); + return delegate().floatValue(); } @Override public double doubleValue() throws IOException { - return in.doubleValue(); + return delegate().doubleValue(); } @Override public boolean isBooleanValue() throws IOException { - return in.isBooleanValue(); + return delegate().isBooleanValue(); } @Override public boolean booleanValue() throws IOException { - return in.booleanValue(); + return delegate().booleanValue(); } @Override public byte[] binaryValue() throws IOException { - return in.binaryValue(); + return delegate().binaryValue(); } @Override public XContentLocation getTokenLocation() { - return in.getTokenLocation(); + return delegate().getTokenLocation(); } @Override public T namedObject(Class categoryClass, String name, Object context) throws IOException { - return in.namedObject(categoryClass, name, context); + return delegate().namedObject(categoryClass, name, context); } @Override public NamedXContentRegistry getXContentRegistry() { - return in.getXContentRegistry(); + return delegate().getXContentRegistry(); } @Override public boolean isClosed() { - return in.isClosed(); + return delegate().isClosed(); } @Override public void close() throws IOException { - in.close(); + delegate().close(); } @Override public RestApiVersion getRestApiVersion() { - return in.getRestApiVersion(); + return delegate().getRestApiVersion(); } @Override public DeprecationHandler getDeprecationHandler() { - return in.getDeprecationHandler(); + return delegate().getDeprecationHandler(); } } diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/FilterXContentParserWrapper.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/FilterXContentParserWrapper.java new file mode 100644 index 0000000000000..8436d073155e9 --- /dev/null +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/FilterXContentParserWrapper.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.xcontent; + +/** + * Wraps the provided {@link XContentParser} and delegates to it. + */ +public class FilterXContentParserWrapper extends FilterXContentParser { + private final XContentParser delegate; + + public FilterXContentParserWrapper(XContentParser delegate) { + this.delegate = delegate; + } + + @Override + protected final XContentParser delegate() { + return delegate; + } +} diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContent.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContent.java index a78bdf159144b..18c12bc5b918e 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContent.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContent.java @@ -26,6 +26,12 @@ public interface XContent { byte streamSeparator(); + @Deprecated + boolean detectContent(byte[] bytes, int offset, int length); + + @Deprecated + boolean detectContent(CharSequence chars); + /** * Creates a new generator using the provided output stream. */ diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentFactory.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentFactory.java index cca70b2f1a00c..b7c98837678cd 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentFactory.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentFactory.java @@ -8,9 +8,6 @@ package org.elasticsearch.xcontent; -import com.fasterxml.jackson.dataformat.cbor.CBORConstants; -import com.fasterxml.jackson.dataformat.smile.SmileConstants; - import org.elasticsearch.xcontent.cbor.CborXContent; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xcontent.smile.SmileXContent; @@ -142,22 +139,20 @@ public static XContentType xContentType(CharSequence content) { return null; } char first = content.charAt(0); - if (first == '{') { + if (JsonXContent.jsonXContent.detectContent(content)) { return XContentType.JSON; } // Should we throw a failure here? Smile idea is to use it in bytes.... - if (length > 2 - && first == SmileConstants.HEADER_BYTE_1 - && content.charAt(1) == SmileConstants.HEADER_BYTE_2 - && content.charAt(2) == SmileConstants.HEADER_BYTE_3) { + if (SmileXContent.smileXContent.detectContent(content)) { return XContentType.SMILE; } - if (length > 2 && first == '-' && content.charAt(1) == '-' && content.charAt(2) == '-') { + if (YamlXContent.yamlXContent.detectContent(content)) { return XContentType.YAML; } // CBOR is not supported + // fallback for JSON for (int i = 0; i < length; i++) { char c = content.charAt(i); if (c == '{') { @@ -287,34 +282,20 @@ public static XContentType xContentType(byte[] bytes, int offset, int length) { return null; } byte first = bytes[offset]; - if (first == '{') { + if (JsonXContent.jsonXContent.detectContent(bytes, offset, length)) { return XContentType.JSON; } - if (length > 2 - && first == SmileConstants.HEADER_BYTE_1 - && bytes[offset + 1] == SmileConstants.HEADER_BYTE_2 - && bytes[offset + 2] == SmileConstants.HEADER_BYTE_3) { + if (SmileXContent.smileXContent.detectContent(bytes, offset, length)) { return XContentType.SMILE; } if (length > 2 && first == '-' && bytes[offset + 1] == '-' && bytes[offset + 2] == '-') { return XContentType.YAML; } - // CBOR logic similar to CBORFactory#hasCBORFormat - if (first == CBORConstants.BYTE_OBJECT_INDEFINITE && length > 1) { - return XContentType.CBOR; - } - if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_TAG, first) && length > 2) { - // Actually, specific "self-describe tag" is a very good indicator - if (first == (byte) 0xD9 && bytes[offset + 1] == (byte) 0xD9 && bytes[offset + 2] == (byte) 0xF7) { - return XContentType.CBOR; - } - } - // for small objects, some encoders just encode as major type object, we can safely - // say its CBOR since it doesn't contradict SMILE or JSON, and its a last resort - if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_OBJECT, first)) { + if (CborXContent.cborXContent.detectContent(bytes, offset, length)) { return XContentType.CBOR; } + // fallback for JSON int jsonStart = 0; // JSON may be preceded by UTF-8 BOM if (length > 3 && first == (byte) 0xEF && bytes[offset + 1] == (byte) 0xBB && bytes[offset + 2] == (byte) 0xBF) { diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentSubParser.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentSubParser.java index 23285167cc750..851263d18d5d7 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentSubParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentSubParser.java @@ -8,14 +8,7 @@ package org.elasticsearch.xcontent; -import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.core.RestApiVersion; - import java.io.IOException; -import java.nio.CharBuffer; -import java.util.List; -import java.util.Map; -import java.util.function.Supplier; /** * Wrapper for a XContentParser that makes a single object/array look like a complete document. @@ -24,34 +17,23 @@ * as skipping to the end of the object in case of a parsing error. The wrapper is intended to be * used for parsing objects that should be ignored if they are malformed. */ -public class XContentSubParser implements XContentParser { +public class XContentSubParser extends FilterXContentParserWrapper { - private final XContentParser parser; private int level; private boolean closed; public XContentSubParser(XContentParser parser) { - this.parser = parser; + super(parser); if (parser.currentToken() != Token.START_OBJECT && parser.currentToken() != Token.START_ARRAY) { throw new IllegalStateException("The sub parser has to be created on the start of an object or array"); } level = 1; } - @Override - public XContentType contentType() { - return parser.contentType(); - } - - @Override - public void allowDuplicateKeys(boolean allowDuplicateKeys) { - parser.allowDuplicateKeys(allowDuplicateKeys); - } - @Override public Token nextToken() throws IOException { if (level > 0) { - Token token = parser.nextToken(); + Token token = super.nextToken(); if (token == Token.START_OBJECT || token == Token.START_ARRAY) { level++; } else if (token == Token.END_OBJECT || token == Token.END_ARRAY) { @@ -65,7 +47,7 @@ public Token nextToken() throws IOException { @Override public void skipChildren() throws IOException { - Token token = parser.currentToken(); + Token token = currentToken(); if (token != Token.START_OBJECT && token != Token.START_ARRAY) { // skip if not starting on an object or an array return; @@ -78,202 +60,11 @@ public void skipChildren() throws IOException { } } - @Override - public Token currentToken() { - return parser.currentToken(); - } - - @Override - public String currentName() throws IOException { - return parser.currentName(); - } - - @Override - public Map map() throws IOException { - return parser.map(); - } - - @Override - public Map mapOrdered() throws IOException { - return parser.mapOrdered(); - } - - @Override - public Map mapStrings() throws IOException { - return parser.mapStrings(); - } - - @Override - public Map map(Supplier> mapFactory, CheckedFunction mapValueParser) - throws IOException { - return parser.map(mapFactory, mapValueParser); - } - - @Override - public List list() throws IOException { - return parser.list(); - } - - @Override - public List listOrderedMap() throws IOException { - return parser.listOrderedMap(); - } - - @Override - public String text() throws IOException { - return parser.text(); - } - - @Override - public String textOrNull() throws IOException { - return parser.textOrNull(); - } - - @Override - public CharBuffer charBufferOrNull() throws IOException { - return parser.charBufferOrNull(); - } - - @Override - public CharBuffer charBuffer() throws IOException { - return parser.charBuffer(); - } - - @Override - public Object objectText() throws IOException { - return parser.objectText(); - } - - @Override - public Object objectBytes() throws IOException { - return parser.objectBytes(); - } - - @Override - public boolean hasTextCharacters() { - return parser.hasTextCharacters(); - } - - @Override - public char[] textCharacters() throws IOException { - return parser.textCharacters(); - } - - @Override - public int textLength() throws IOException { - return parser.textLength(); - } - - @Override - public int textOffset() throws IOException { - return parser.textOffset(); - } - - @Override - public Number numberValue() throws IOException { - return parser.numberValue(); - } - - @Override - public NumberType numberType() throws IOException { - return parser.numberType(); - } - - @Override - public short shortValue(boolean coerce) throws IOException { - return parser.shortValue(coerce); - } - - @Override - public int intValue(boolean coerce) throws IOException { - return parser.intValue(coerce); - } - - @Override - public long longValue(boolean coerce) throws IOException { - return parser.longValue(coerce); - } - - @Override - public float floatValue(boolean coerce) throws IOException { - return parser.floatValue(coerce); - } - - @Override - public double doubleValue(boolean coerce) throws IOException { - return parser.doubleValue(); - } - - @Override - public short shortValue() throws IOException { - return parser.shortValue(); - } - - @Override - public int intValue() throws IOException { - return parser.intValue(); - } - - @Override - public long longValue() throws IOException { - return parser.longValue(); - } - - @Override - public float floatValue() throws IOException { - return parser.floatValue(); - } - - @Override - public double doubleValue() throws IOException { - return parser.doubleValue(); - } - - @Override - public boolean isBooleanValue() throws IOException { - return parser.isBooleanValue(); - } - - @Override - public boolean booleanValue() throws IOException { - return parser.booleanValue(); - } - - @Override - public byte[] binaryValue() throws IOException { - return parser.binaryValue(); - } - - @Override - public XContentLocation getTokenLocation() { - return parser.getTokenLocation(); - } - - @Override - public T namedObject(Class categoryClass, String name, Object context) throws IOException { - return parser.namedObject(categoryClass, name, context); - } - - @Override - public NamedXContentRegistry getXContentRegistry() { - return parser.getXContentRegistry(); - } - @Override public boolean isClosed() { return closed; } - @Override - public RestApiVersion getRestApiVersion() { - return parser.getRestApiVersion(); - } - - @Override - public DeprecationHandler getDeprecationHandler() { - return parser.getDeprecationHandler(); - } - @Override public void close() throws IOException { if (closed == false) { diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/cbor/CborXContent.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/cbor/CborXContent.java index 90c88b979514e..fbea0aab10019 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/cbor/CborXContent.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/cbor/CborXContent.java @@ -11,6 +11,7 @@ import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.dataformat.cbor.CBORConstants; import com.fasterxml.jackson.dataformat.cbor.CBORFactory; import org.elasticsearch.xcontent.XContent; @@ -60,6 +61,31 @@ public byte streamSeparator() { throw new XContentParseException("cbor does not support stream parsing..."); } + @Override + public boolean detectContent(byte[] bytes, int offset, int length) { + // CBOR logic similar to CBORFactory#hasCBORFormat + if (bytes[offset] == CBORConstants.BYTE_OBJECT_INDEFINITE && length > 1) { + return true; + } + if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_TAG, bytes[offset]) && length > 2) { + // Actually, specific "self-describe tag" is a very good indicator + if (bytes[offset] == (byte) 0xD9 && bytes[offset + 1] == (byte) 0xD9 && bytes[offset + 2] == (byte) 0xF7) { + return true; + } + } + // for small objects, some encoders just encode as major type object, we can safely + // say its CBOR since it doesn't contradict SMILE or JSON, and its a last resort + if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_OBJECT, bytes[offset])) { + return true; + } + return false; + } + + @Override + public boolean detectContent(CharSequence chars) { + return false; + } + @Override public XContentGenerator createGenerator(OutputStream os, Set includes, Set excludes) throws IOException { return new CborXContentGenerator(cborFactory.createGenerator(os, JsonEncoding.UTF8), os, includes, excludes); diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/json/JsonXContent.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/json/JsonXContent.java index b5542a3e1890e..d543c39ae7fcf 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/json/JsonXContent.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/json/JsonXContent.java @@ -62,6 +62,16 @@ public byte streamSeparator() { return '\n'; } + @Override + public boolean detectContent(byte[] bytes, int offset, int length) { + return bytes[offset] == '{'; + } + + @Override + public boolean detectContent(CharSequence chars) { + return chars.charAt(0) == '{'; + } + @Override public XContentGenerator createGenerator(OutputStream os, Set includes, Set excludes) throws IOException { return new JsonXContentGenerator(jsonFactory.createGenerator(os, JsonEncoding.UTF8), os, includes, excludes); diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/smile/SmileXContent.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/smile/SmileXContent.java index b789e691030cb..9d1a41b264ed7 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/smile/SmileXContent.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/smile/SmileXContent.java @@ -11,6 +11,7 @@ import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.dataformat.smile.SmileConstants; import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.fasterxml.jackson.dataformat.smile.SmileGenerator; @@ -62,6 +63,22 @@ public byte streamSeparator() { return (byte) 0xFF; } + @Override + public boolean detectContent(byte[] bytes, int offset, int length) { + return length > 2 + && bytes[offset] == SmileConstants.HEADER_BYTE_1 + && bytes[offset + 1] == SmileConstants.HEADER_BYTE_2 + && bytes[offset + 2] == SmileConstants.HEADER_BYTE_3; + } + + @Override + public boolean detectContent(CharSequence chars) { + return chars.length() > 2 + && chars.charAt(0) == SmileConstants.HEADER_BYTE_1 + && chars.charAt(1) == SmileConstants.HEADER_BYTE_2 + && chars.charAt(2) == SmileConstants.HEADER_BYTE_3; + } + @Override public XContentGenerator createGenerator(OutputStream os, Set includes, Set excludes) throws IOException { return new SmileXContentGenerator(smileFactory.createGenerator(os, JsonEncoding.UTF8), os, includes, excludes); diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/support/MapXContentParser.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/support/MapXContentParser.java index 1250c4cae6bc2..bcfd214506ba8 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/support/MapXContentParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/support/MapXContentParser.java @@ -11,14 +11,12 @@ import org.elasticsearch.xcontent.DeprecationHandler; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentLocation; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; import java.nio.CharBuffer; -import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -28,23 +26,10 @@ */ public class MapXContentParser extends AbstractXContentParser { - private XContentType xContentType; + private final XContentType xContentType; private TokenIterator iterator; private boolean closed; - public static XContentParser wrapObject(Object sourceMap) throws IOException { - XContentParser parser = new MapXContentParser( - NamedXContentRegistry.EMPTY, - DeprecationHandler.IGNORE_DEPRECATIONS, - Collections.singletonMap("dummy_field", sourceMap), - XContentType.JSON - ); - parser.nextToken(); // start object - parser.nextToken(); // field name - parser.nextToken(); // field value - return parser; - } - public MapXContentParser( NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler, diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/yaml/YamlXContent.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/yaml/YamlXContent.java index 971bb6a28ebb7..8dd552e49af0f 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/yaml/YamlXContent.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/yaml/YamlXContent.java @@ -55,6 +55,16 @@ public byte streamSeparator() { throw new UnsupportedOperationException("yaml does not support stream parsing..."); } + @Override + public boolean detectContent(byte[] bytes, int offset, int length) { + return length > 2 && bytes[offset] == '-' && bytes[offset + 1] == '-' && bytes[offset + 2] == '-'; + } + + @Override + public boolean detectContent(CharSequence chars) { + return chars.length() > 2 && chars.charAt(0) == '-' && chars.charAt(1) == '-' && chars.charAt(2) == '-'; + } + @Override public XContentGenerator createGenerator(OutputStream os, Set includes, Set excludes) throws IOException { return new YamlXContentGenerator(yamlFactory.createGenerator(os, JsonEncoding.UTF8), os, includes, excludes); diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java index 924f925d3b6da..08d34a37d1fd6 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -67,6 +68,9 @@ public String getWriteableName() { /** get the number of documents */ @Override public long getDocCount() { + if (results != null) { + return results.getDocCount(); + } if (stats == null) { return 0; } @@ -241,6 +245,17 @@ public InternalAggregation reduce(List aggregations, Aggreg return new InternalMatrixStats(name, runningStats.docCount, runningStats, null, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return new InternalMatrixStats( + name, + samplingContext.inverseScale(getDocCount()), + stats, + new MatrixStatsResults(stats, samplingContext), + getMetadata() + ); + } + @Override protected boolean mustReduceOnSingleInternalAgg() { return true; diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregationBuilder.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregationBuilder.java index dfe395df936b0..c4c476ff65e93 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregationBuilder.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregationBuilder.java @@ -46,6 +46,11 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu return new MatrixStatsAggregationBuilder(this, factoriesBuilder, metadata); } + @Override + public boolean supportsSampling() { + return true; + } + /** * Read from a stream. */ diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java index 563001d4cc9f3..58b14dc390f44 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.search.aggregations.support.SamplingContext; import java.io.IOException; import java.util.Collections; @@ -41,6 +42,18 @@ class MatrixStatsResults implements Writeable { this.compute(); } + /** creates and computes the result from the provided stats, scaling as necessary given the sampling context */ + MatrixStatsResults(RunningStats stats, SamplingContext samplingContext) { + this.results = stats.clone(); + this.correlation = new HashMap<>(); + this.compute(); + // Note: it is important to scale counts AFTER compute as scaling before could introduce bias + this.results.docCount = samplingContext.inverseScale(this.results.docCount); + for (String field : this.results.counts.keySet()) { + this.results.counts.computeIfPresent(field, (k, v) -> samplingContext.inverseScale(v)); + } + } + /** creates a results object from the given stream */ @SuppressWarnings("unchecked") protected MatrixStatsResults(StreamInput in) { diff --git a/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/20_empty_bucket.yml b/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/20_empty_bucket.yml index 3fa6c87869234..0f53b03a84b70 100644 --- a/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/20_empty_bucket.yml +++ b/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/20_empty_bucket.yml @@ -16,13 +16,13 @@ - do: index: index: empty_bucket_idx - id: 1 + id: "1" body: { "value": 0, "val1": 3.1 } - do: index: index: empty_bucket_idx - id: 2 + id: "2" body: { "value": 2, "val1": -3.1 } - do: diff --git a/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/30_single_value_field.yml b/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/30_single_value_field.yml index 77e8bf6359f22..ac0c6eef0cf83 100644 --- a/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/30_single_value_field.yml +++ b/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/30_single_value_field.yml @@ -27,77 +27,77 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "val1": 1.9, "val2": 3.1, "val3": 2.3 } - do: index: index: test - id: 2 + id: "2" body: { "val1": -5.2, "val2": -3.4, "val3": 2.3} - do: index: index: test - id: 3 + id: "3" body: { "val1": -5.2, "val3": 2.3} - do: index: index: test - id: 4 + id: "4" body: { "val1": 18.3, "val2": 104.4, "val3": 2.3} - do: index: index: test - id: 5 + id: "5" body: { "val1": -53.2, "val2": -322.4, "val3": 2.3} - do: index: index: test - id: 6 + id: "6" body: { "val1": -578.9, "val2": 69.9, "val3": 2.3} - do: index: index: test - id: 7 + id: "7" body: { "val1": 16.2, "val2": 17.2, "val3": 2.3} - do: index: index: test - id: 8 + id: "8" body: { "val1": -4222.63, "val2": 316.44, "val3": 2.3} - do: index: index: test - id: 9 + id: "9" body: { "val1": -59999.55, "val2": -3163.4, "val3": 2.3} - do: index: index: test - id: 10 + id: "10" body: { "val1": 782.7, "val2": 789.7, "val3": 2.3} - do: index: index: test - id: 11 + id: "11" body: { "val1": -1.2, "val2": 6.3, "val3": 2.3} - do: index: index: test - id: 12 + id: "12" body: { "val1": 0, "val2": 1.11, "val3": 2.3} - do: index: index: test - id: 13 + id: "13" body: { "val1": 0.1, "val2": 0.92, "val3": 2.3} - do: index: index: test - id: 14 + id: "14" body: { "val1": 0.12, "val2": -82.4, "val3": 2.3} - do: index: index: test - id: 15 + id: "15" body: { "val1": 98.2, "val2": 32.4, "val3": 2.3} - do: diff --git a/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/40_multi_value_field.yml b/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/40_multi_value_field.yml index 467efce78a467..295ac2160f23c 100644 --- a/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/40_multi_value_field.yml +++ b/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/40_multi_value_field.yml @@ -27,77 +27,77 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "val1": 1.9, "val2": 3.1, "val3": 2.3, "vals" : [1.9, 16.143] } - do: index: index: test - id: 2 + id: "2" body: { "val1": -5.2, "val2": -3.4, "val3": 2.3, "vals" : [155, 16.23]} - do: index: index: test - id: 3 + id: "3" body: { "val1": -5.2, "val3": 2.3, "vals" : [-455, -32.32]} - do: index: index: test - id: 4 + id: "4" body: { "val1": 18.3, "val2": 104.4, "val3": 2.3, "vals" : [0.14, 92.1]} - do: index: index: test - id: 5 + id: "5" body: { "val1": -53.2, "val2": -322.4, "val3": 2.3, "vals" : [16, 16]} - do: index: index: test - id: 6 + id: "6" body: { "val1": -578.9, "val2": 69.9, "val3": 2.3} - do: index: index: test - id: 7 + id: "7" body: { "val1": 16.2, "val2": 17.2, "val3": 2.3, "vals" : [1234.3, -3433]} - do: index: index: test - id: 8 + id: "8" body: { "val1": -4222.63, "val2": 316.44, "val3": 2.3, "vals" : [177.2, -93.333]} - do: index: index: test - id: 9 + id: "9" body: { "val1": -59999.55, "val2": -3163.4, "val3": 2.3, "vals" : [-29.9, 163.0]} - do: index: index: test - id: 10 + id: "10" body: { "val1": 782.7, "val2": 789.7, "val3": 2.3, "vals" : [-0.2, 1343.3]} - do: index: index: test - id: 11 + id: "11" body: { "val1": -1.2, "val2": 6.3, "val3": 2.3, "vals" : [15.3, 16.9]} - do: index: index: test - id: 12 + id: "12" body: { "val1": 0, "val2": 1.11, "val3": 2.3, "vals" : [-644.4, -644.4]} - do: index: index: test - id: 13 + id: "13" body: { "val1": 0.1, "val2": 0.92, "val3": 2.3, "vals" : [73.2, 0.12]} - do: index: index: test - id: 14 + id: "14" body: { "val1": 0.12, "val2": -82.4, "val3": 2.3, "vals" : [-0.001, 1295.3]} - do: index: index: test - id: 15 + id: "15" body: { "val1": 98.2, "val2": 32.4, "val3": 2.3, "vals" : [15.5, 16.5]} - do: diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/10_match.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/10_match.yml index 6609eb831b226..543a806b92153 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/10_match.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/10_match.yml @@ -33,7 +33,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "the fox runs across the street" } refresh: true @@ -51,7 +51,7 @@ - do: index: index: test - id: 2 + id: "2" body: { "text": "run fox run" } refresh: true diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/20_ngram_search.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/20_ngram_search.yml index 495932016966d..3993c94937f37 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/20_ngram_search.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/20_ngram_search.yml @@ -25,7 +25,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "foo bar baz" } refresh: true @@ -71,7 +71,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "origin": "C.A1234.5678" } refresh: true diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/30_ngram_highligthing.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/30_ngram_highligthing.yml index 674a6ab438069..5f4545e12e0e1 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/30_ngram_highligthing.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/30_ngram_highligthing.yml @@ -43,7 +43,7 @@ - do: index: index: test - id: 1 + id: "1" refresh: true body: name: logicacmg ehemals avinci - the know how company diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/40_query_string.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/40_query_string.yml index 4ba16007664f1..1d308d766f514 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/40_query_string.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/40_query_string.yml @@ -14,7 +14,7 @@ - do: index: index: test - id: 1 + id: "1" body: { field: foo bar} - do: @@ -41,7 +41,7 @@ - do: explain: index: test - id: 1 + id: "1" q: field:bars analyzer: snowball diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/41_query_string_with_default_analyzer.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/41_query_string_with_default_analyzer.yml index 6b373d41e8e12..5839d35b44c5a 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/41_query_string_with_default_analyzer.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/41_query_string_with_default_analyzer.yml @@ -15,7 +15,7 @@ - do: index: index: test - id: 1 + id: "1" body: body: Ich lese die Bücher diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/50_queries_with_synonyms.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/50_queries_with_synonyms.yml index dca56565e6954..083251fe782fc 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/50_queries_with_synonyms.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/50_queries_with_synonyms.yml @@ -26,7 +26,7 @@ - do: index: index: test - id: 3 + id: "3" body: field1: quick lazy huge brown pidgin field2: the quick lazy huge brown fox jumps over the tree @@ -34,14 +34,14 @@ - do: index: index: test - id: 1 + id: "1" body: field1: the quick brown fox - do: index: index: test - id: 2 + id: "2" body: field1: the quick lazy huge brown fox jumps over the tree refresh: true @@ -133,7 +133,7 @@ - do: index: index: test - id: 1 + id: "1" body: text: quick brown fox refresh: true @@ -174,7 +174,7 @@ - do: index: index: test - id: 2 + id: "2" body: text: fast brown fox refresh: true diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph.yml index ae039e453be6c..58c7df39b65b5 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph.yml @@ -34,41 +34,41 @@ setup: - do: index: index: test - id: 1 + id: "1" body: text: say wtf happened foo - do: index: index: test - id: 2 + id: "2" body: text: bar baz what the fudge man - do: index: index: test - id: 3 + id: "3" body: text: wtf - do: index: index: test - id: 4 + id: "4" body: text: what is the name for fudge - do: index: index: test - id: 5 + id: "5" body: text: bar two three - do: index: index: test - id: 6 + id: "6" body: text: bar baz two three refresh: true @@ -180,14 +180,14 @@ setup: - do: index: index: test - id: 7 + id: "7" body: text: "WTFD!" - do: index: index: test - id: 8 + id: "8" body: text: "Weird Al's WHAT THE FUDGESICLE" refresh: true @@ -214,7 +214,7 @@ setup: - do: index: index: test - id: 9 + id: "9" body: phrase_field: "bar baz" refresh: true diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.suggest/30_synonyms.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.suggest/30_synonyms.yml index 8b67abb193aa0..72539dfd0b618 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.suggest/30_synonyms.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.suggest/30_synonyms.yml @@ -24,7 +24,7 @@ - do: index: index: test - id: 1 + id: "1" body: field: input: [ "Foo Fighters" ] diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/termvectors/10_payloads.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/termvectors/10_payloads.yml index cba4370943206..85e57379bdffe 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/termvectors/10_payloads.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/termvectors/10_payloads.yml @@ -23,7 +23,7 @@ - do: index: index: test - id: 1 + id: "1" refresh: true body: text: The quick brown fox is brown. @@ -31,7 +31,7 @@ - do: termvectors: index: test - id: 1 + id: "1" payloads: true - match: {term_vectors.text.field_statistics.sum_doc_freq: 5} - match: {term_vectors.text.terms.brown.tokens.0.payload: PEFMUEhBTlVNPg==} diff --git a/modules/data-streams/build.gradle b/modules/data-streams/build.gradle index 9951e2a3e1f54..a8afad52077f4 100644 --- a/modules/data-streams/build.gradle +++ b/modules/data-streams/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.test-with-dependencies' @@ -30,9 +31,7 @@ testClusters.configureEach { setting 'xpack.security.enabled', 'true' keystore 'bootstrap.password', 'x-pack-test-password' user username: "x_pack_rest_user", password: "x-pack-test-password" - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } testClusters.matching { it.name == "javaRestTest" }.configureEach { @@ -40,9 +39,7 @@ testClusters.matching { it.name == "javaRestTest" }.configureEach { setting 'xpack.security.enabled', 'false' // disable ILM history, since it disturbs tests using _all setting 'indices.lifecycle.history_index_enabled', 'false' - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } if (BuildParams.inFipsJvm){ @@ -51,3 +48,9 @@ if (BuildParams.inFipsJvm){ tasks.named("javaRestTest").configure{enabled = false } tasks.named("yamlRestTest").configure{enabled = false } } + +if (BuildParams.isSnapshotBuild() == false) { + tasks.named("internalClusterTest").configure { + systemProperty 'es.index_mode_feature_flag_registered', 'true' + } +} diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java index 7b2f8039d1942..be6165026fe46 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.admin.indices.template.put.PutComposableIndexTemplateAction; -import org.elasticsearch.action.datastreams.DeleteDataStreamAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.Template; @@ -24,14 +23,12 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xcontent.XContentType; -import org.junit.After; import java.time.Instant; import java.util.Collection; import java.util.List; import java.util.concurrent.CountDownLatch; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; public class TSDBIndexingIT extends ESSingleNodeTestCase { @@ -68,12 +65,6 @@ protected Settings nodeSettings() { return newSettings.build(); } - @After - public void cleanup() { - DeleteDataStreamAction.Request deleteDataStreamsRequest = new DeleteDataStreamAction.Request("*"); - assertAcked(client().execute(DeleteDataStreamAction.INSTANCE, deleteDataStreamsRequest).actionGet()); - } - public void testTimeRanges() throws Exception { var mappingTemplate = """ { diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/TsdbDataStreamRestIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/TsdbDataStreamRestIT.java index 14ea7ddc3793d..b3a1629176770 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/TsdbDataStreamRestIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/TsdbDataStreamRestIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.datastreams; import org.elasticsearch.client.Request; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; import org.elasticsearch.test.rest.ESRestTestCase; @@ -15,16 +16,19 @@ import java.io.IOException; import java.time.Instant; +import java.time.temporal.ChronoUnit; import java.util.Map; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.backingIndexEqualTo; import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class TsdbDataStreamRestIT extends ESRestTestCase { @@ -84,6 +88,57 @@ public class TsdbDataStreamRestIT extends ESRestTestCase { } }"""; + private static final String NON_TSDB_TEMPLATE = """ + { + "index_patterns": ["k8s*"], + "template": { + "settings":{ + "index": { + "number_of_replicas": 0, + "number_of_shards": 2 + } + }, + "mappings":{ + "properties": { + "@timestamp" : { + "type": "date" + }, + "metricset": { + "type": "keyword" + }, + "k8s": { + "properties": { + "pod": { + "properties": { + "uid": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "ip": { + "type": "ip" + }, + "network": { + "properties": { + "tx": { + "type": "long" + }, + "rx": { + "type": "long" + } + } + } + } + } + } + } + } + } + }, + "data_stream": {} + }"""; + private static final String DOC = """ { "@timestamp": "$time", @@ -235,6 +290,82 @@ public void testSubsequentRollovers() throws Exception { } } + public void testMigrateRegularDataStreamToTsdbDataStream() throws Exception { + // Create a non tsdb template + var putComposableIndexTemplateRequest = new Request("POST", "/_index_template/1"); + putComposableIndexTemplateRequest.setJsonEntity(NON_TSDB_TEMPLATE); + assertOK(client().performRequest(putComposableIndexTemplateRequest)); + + // Index a few docs and sometimes rollover + int numRollovers = 4; + int numDocs = 32; + var currentTime = Instant.now(); + var currentMinus30Days = currentTime.minus(30, ChronoUnit.DAYS); + for (int i = 0; i < numRollovers; i++) { + for (int j = 0; j < numDocs; j++) { + var indexRequest = new Request("POST", "/k8s/_doc"); + var time = Instant.ofEpochMilli(randomLongBetween(currentMinus30Days.toEpochMilli(), currentTime.toEpochMilli())); + indexRequest.setJsonEntity(DOC.replace("$time", formatInstant(time))); + var response = client().performRequest(indexRequest); + assertOK(response); + var responseBody = entityAsMap(response); + // i rollovers and +1 offset: + assertThat((String) responseBody.get("_index"), backingIndexEqualTo("k8s", i + 1)); + } + var rolloverRequest = new Request("POST", "/k8s/_rollover"); + var rolloverResponse = client().performRequest(rolloverRequest); + assertOK(rolloverResponse); + var rolloverResponseBody = entityAsMap(rolloverResponse); + assertThat(rolloverResponseBody.get("rolled_over"), is(true)); + } + + var getDataStreamsRequest = new Request("GET", "/_data_stream"); + var getDataStreamResponse = client().performRequest(getDataStreamsRequest); + assertOK(getDataStreamResponse); + var dataStreams = entityAsMap(getDataStreamResponse); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.name"), equalTo("k8s")); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.generation"), equalTo(5)); + for (int i = 0; i < 5; i++) { + String backingIndex = ObjectPath.evaluate(dataStreams, "data_streams.0.indices." + i + ".index_name"); + assertThat(backingIndex, backingIndexEqualTo("k8s", i + 1)); + var indices = getIndex(backingIndex); + var escapedBackingIndex = backingIndex.replace(".", "\\."); + assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".data_stream"), equalTo("k8s")); + assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.mode"), nullValue()); + assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.time_series.start_time"), nullValue()); + assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.time_series.end_time"), nullValue()); + } + + // Update template + putComposableIndexTemplateRequest = new Request("POST", "/_index_template/1"); + putComposableIndexTemplateRequest.setJsonEntity(TEMPLATE); + assertOK(client().performRequest(putComposableIndexTemplateRequest)); + + var rolloverRequest = new Request("POST", "/k8s/_rollover"); + var rolloverResponse = client().performRequest(rolloverRequest); + assertOK(rolloverResponse); + var rolloverResponseBody = entityAsMap(rolloverResponse); + assertThat(rolloverResponseBody.get("rolled_over"), is(true)); + var newIndex = (String) rolloverResponseBody.get("new_index"); + assertThat(newIndex, backingIndexEqualTo("k8s", 6)); + + // Ingest documents that will land in the new tsdb backing index: + for (int i = 0; i < numDocs; i++) { + var indexRequest = new Request("POST", "/k8s/_doc"); + indexRequest.setJsonEntity(DOC.replace("$time", formatInstant(currentTime))); + var response = client().performRequest(indexRequest); + assertOK(response); + var responseBody = entityAsMap(response); + assertThat((String) responseBody.get("_index"), backingIndexEqualTo("k8s", 6)); + } + + // Fail if documents target older non tsdb backing index: + var indexRequest = new Request("POST", "/k8s/_doc"); + indexRequest.setJsonEntity(DOC.replace("$time", formatInstant(currentMinus30Days))); + var e = expectThrows(ResponseException.class, () -> client().performRequest(indexRequest)); + assertThat(e.getMessage(), containsString("is outside of ranges of currently writable indices")); + } + private static Map getIndex(String indexName) throws IOException { var getIndexRequest = new Request("GET", "/" + indexName + "?human"); var response = client().performRequest(getIndexRequest); diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java index 41126f6d9f0df..531dcc1ac9a19 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java @@ -36,8 +36,16 @@ public Settings getAdditionalIndexSettings( ) { if (dataStreamName != null) { DataStream dataStream = metadata.dataStreams().get(dataStreamName); + // First backing index is created and then data stream is rolled over (in a single cluster state update). + // So at this point we can't check index_mode==time_series, + // so checking that index_mode==null|standard and templateIndexMode == TIME_SERIES + boolean migrating = dataStream != null + && (dataStream.getIndexMode() == null || dataStream.getIndexMode() == IndexMode.STANDARD) + && templateIndexMode == IndexMode.TIME_SERIES; IndexMode indexMode; - if (dataStream != null) { + if (migrating) { + indexMode = IndexMode.TIME_SERIES; + } else if (dataStream != null) { indexMode = dataStream.getIndexMode(); } else { indexMode = templateIndexMode; @@ -50,7 +58,7 @@ public Settings getAdditionalIndexSettings( TimeValue lookAheadTime = IndexSettings.LOOK_AHEAD_TIME.get(allSettings); final Instant start; final Instant end; - if (dataStream == null) { + if (dataStream == null || migrating) { start = resolvedAt.minusMillis(lookAheadTime.getMillis()); end = resolvedAt.plusMillis(lookAheadTime.getMillis()); } else { diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java index 84c897f4717be..d7d2652481534 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java @@ -44,6 +44,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.notNullValue; public class MetadataDataStreamRolloverServiceTests extends ESTestCase { @@ -63,7 +64,7 @@ public void testRolloverClusterStateForDataStream() throws Exception { IndexMode.TIME_SERIES ); ComposableIndexTemplate template = new ComposableIndexTemplate.Builder().indexPatterns(List.of(dataStream.getName() + "*")) - .template(new Template(Settings.builder().put("index.mode", "time_series").build(), null, null)) + .template(new Template(Settings.builder().put("index.routing_path", "uid").build(), null, null)) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, IndexMode.TIME_SERIES)) .build(); Metadata.Builder builder = Metadata.builder(); @@ -75,6 +76,7 @@ public void testRolloverClusterStateForDataStream() throws Exception { .put("index.hidden", true) .put(SETTING_INDEX_UUID, dataStream.getWriteIndex().getUUID()) .put("index.mode", "time_series") + .put("index.routing_path", "uid") .put("index.time_series.start_time", FORMATTER.format(now.minus(4, ChronoUnit.HOURS))) .put("index.time_series.end_time", FORMATTER.format(now.minus(2, ChronoUnit.HOURS))) ) @@ -144,4 +146,183 @@ public void testRolloverClusterStateForDataStream() throws Exception { } } + public void testRolloverAndMigrateDataStream() throws Exception { + Instant now = Instant.now().truncatedTo(ChronoUnit.MILLIS); + String dataStreamName = "logs-my-app"; + IndexMode dsIndexMode = randomBoolean() ? null : IndexMode.STANDARD; + final DataStream dataStream = new DataStream( + dataStreamName, + new DataStream.TimestampField("@timestamp"), + List.of(new Index(DataStream.getDefaultBackingIndexName(dataStreamName, 1, now.toEpochMilli()), "uuid")), + 1, + null, + false, + false, + false, + false, + dsIndexMode + ); + ComposableIndexTemplate template = new ComposableIndexTemplate.Builder().indexPatterns(List.of(dataStream.getName() + "*")) + .template(new Template(Settings.builder().put("index.routing_path", "uid").build(), null, null)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, IndexMode.TIME_SERIES)) + .build(); + Metadata.Builder builder = Metadata.builder(); + builder.put("template", template); + Settings.Builder indexSettings = ESTestCase.settings(Version.CURRENT) + .put("index.hidden", true) + .put(SETTING_INDEX_UUID, dataStream.getWriteIndex().getUUID()); + if (dsIndexMode != null) { + indexSettings.put("index.mode", dsIndexMode.getName()); + } + builder.put( + IndexMetadata.builder(dataStream.getWriteIndex().getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0) + ); + builder.put(dataStream); + final ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metadata(builder).build(); + + ThreadPool testThreadPool = new TestThreadPool(getTestName()); + try { + MetadataRolloverService rolloverService = DataStreamTestHelper.getMetadataRolloverService( + dataStream, + testThreadPool, + Set.of(new DataStreamIndexSettingsProvider()), + xContentRegistry() + ); + MaxDocsCondition condition = new MaxDocsCondition(randomNonNegativeLong()); + List> metConditions = Collections.singletonList(condition); + CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_"); + + MetadataRolloverService.RolloverResult rolloverResult = rolloverService.rolloverClusterState( + clusterState, + dataStream.getName(), + null, + createIndexRequest, + metConditions, + now, + randomBoolean(), + false + ); + + String sourceIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration()); + String newIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration() + 1); + assertEquals(sourceIndexName, rolloverResult.sourceIndexName()); + assertEquals(newIndexName, rolloverResult.rolloverIndexName()); + Metadata rolloverMetadata = rolloverResult.clusterState().metadata(); + assertEquals(dataStream.getIndices().size() + 1, rolloverMetadata.indices().size()); + + // Assert data stream's index_mode has been changed to time_series. + assertThat(rolloverMetadata.dataStreams().get(dataStreamName), notNullValue()); + assertThat(rolloverMetadata.dataStreams().get(dataStreamName).getIndexMode(), equalTo(IndexMode.TIME_SERIES)); + + // Nothing changed for the original backing index: + IndexMetadata im = rolloverMetadata.index(rolloverMetadata.dataStreams().get(dataStreamName).getIndices().get(0)); + assertThat(IndexSettings.MODE.get(im.getSettings()), equalTo(IndexMode.STANDARD)); + assertThat(IndexSettings.TIME_SERIES_START_TIME.exists(im.getSettings()), is(false)); + assertThat(IndexSettings.TIME_SERIES_END_TIME.exists(im.getSettings()), is(false)); + // New backing index is a tsdb index: + im = rolloverMetadata.index(rolloverMetadata.dataStreams().get(dataStreamName).getIndices().get(1)); + assertThat(IndexSettings.MODE.get(im.getSettings()), equalTo(IndexMode.TIME_SERIES)); + Instant startTime = IndexSettings.TIME_SERIES_START_TIME.get(im.getSettings()); + Instant endTime = IndexSettings.TIME_SERIES_END_TIME.get(im.getSettings()); + assertThat(startTime.isBefore(endTime), is(true)); + assertThat(startTime, equalTo(now.minus(2, ChronoUnit.HOURS))); + assertThat(endTime, equalTo(now.plus(2, ChronoUnit.HOURS))); + } finally { + testThreadPool.shutdown(); + } + } + + public void testChangingIndexModeFromTimeSeriesToSomethingElseNoEffectOnExistingDataStreams() throws Exception { + Instant now = Instant.now().truncatedTo(ChronoUnit.MILLIS); + String dataStreamName = "logs-my-app"; + final DataStream dataStream = new DataStream( + dataStreamName, + new DataStream.TimestampField("@timestamp"), + List.of(new Index(DataStream.getDefaultBackingIndexName(dataStreamName, 1, now.toEpochMilli()), "uuid")), + 1, + null, + false, + false, + false, + false, + IndexMode.TIME_SERIES + ); + ComposableIndexTemplate template = new ComposableIndexTemplate.Builder().indexPatterns(List.of(dataStream.getName() + "*")) + .template(new Template(Settings.builder().put("index.routing_path", "uid").build(), null, null)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, randomBoolean() ? IndexMode.STANDARD : null)) + .build(); + Metadata.Builder builder = Metadata.builder(); + builder.put("template", template); + builder.put( + IndexMetadata.builder(dataStream.getWriteIndex().getName()) + .settings( + ESTestCase.settings(Version.CURRENT) + .put("index.hidden", true) + .put(SETTING_INDEX_UUID, dataStream.getWriteIndex().getUUID()) + .put("index.mode", "time_series") + .put("index.routing_path", "uid") + .put("index.time_series.start_time", FORMATTER.format(now.minus(4, ChronoUnit.HOURS))) + .put("index.time_series.end_time", FORMATTER.format(now.minus(2, ChronoUnit.HOURS))) + ) + .numberOfShards(1) + .numberOfReplicas(0) + ); + builder.put(dataStream); + final ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metadata(builder).build(); + + ThreadPool testThreadPool = new TestThreadPool(getTestName()); + try { + MetadataRolloverService rolloverService = DataStreamTestHelper.getMetadataRolloverService( + dataStream, + testThreadPool, + Set.of(new DataStreamIndexSettingsProvider()), + xContentRegistry() + ); + MaxDocsCondition condition = new MaxDocsCondition(randomNonNegativeLong()); + List> metConditions = Collections.singletonList(condition); + CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_"); + + MetadataRolloverService.RolloverResult rolloverResult = rolloverService.rolloverClusterState( + clusterState, + dataStream.getName(), + null, + createIndexRequest, + metConditions, + now, + randomBoolean(), + false + ); + + String sourceIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration()); + String newIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration() + 1); + assertEquals(sourceIndexName, rolloverResult.sourceIndexName()); + assertEquals(newIndexName, rolloverResult.rolloverIndexName()); + Metadata rolloverMetadata = rolloverResult.clusterState().metadata(); + assertEquals(dataStream.getIndices().size() + 1, rolloverMetadata.indices().size()); + + // Assert data stream's index_mode remains time_series. + assertThat(rolloverMetadata.dataStreams().get(dataStreamName), notNullValue()); + assertThat(rolloverMetadata.dataStreams().get(dataStreamName).getIndexMode(), equalTo(IndexMode.TIME_SERIES)); + + // Nothing changed for the original tsdb backing index: + IndexMetadata im = rolloverMetadata.index(rolloverMetadata.dataStreams().get(dataStreamName).getIndices().get(0)); + assertThat(IndexSettings.MODE.exists(im.getSettings()), is(true)); + Instant startTime = IndexSettings.TIME_SERIES_START_TIME.get(im.getSettings()); + Instant endTime = IndexSettings.TIME_SERIES_END_TIME.get(im.getSettings()); + assertThat(startTime.isBefore(endTime), is(true)); + assertThat(startTime, equalTo(now.minus(4, ChronoUnit.HOURS))); + assertThat(endTime, equalTo(now.minus(2, ChronoUnit.HOURS))); + // New backing index is also a tsdb index: + im = rolloverMetadata.index(rolloverMetadata.dataStreams().get(dataStreamName).getIndices().get(1)); + assertThat(IndexSettings.MODE.get(im.getSettings()), equalTo(IndexMode.TIME_SERIES)); + startTime = IndexSettings.TIME_SERIES_START_TIME.get(im.getSettings()); + endTime = IndexSettings.TIME_SERIES_END_TIME.get(im.getSettings()); + assertThat(startTime.isBefore(endTime), is(true)); + assertThat(startTime, equalTo(now.minus(2, ChronoUnit.HOURS))); + assertThat(endTime, equalTo(now.plus(2, ChronoUnit.HOURS))); + } finally { + testThreadPool.shutdown(); + } + } + } diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/100_delete_by_query.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/100_delete_by_query.yml index 29e2ec83cab19..8b76faf6c44c2 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/100_delete_by_query.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/100_delete_by_query.yml @@ -22,7 +22,7 @@ - do: index: index: simple-data-stream1 - id: 1 + id: "1" op_type: create body: foo: bar diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/110_update_by_query.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/110_update_by_query.yml index b8323d2276395..027b0b1f94050 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/110_update_by_query.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/110_update_by_query.yml @@ -22,7 +22,7 @@ - do: index: index: simple-data-stream1 - id: 1 + id: "1" op_type: create body: { "number": 4, '@timestamp': '2020-12-12' } @@ -39,7 +39,7 @@ - do: index: index: simple-data-stream1 - id: 2 + id: "2" op_type: create body: { "number": 1, '@timestamp': '2020-12-12' } @@ -56,7 +56,7 @@ - do: index: index: simple-data-stream1 - id: 3 + id: "3" op_type: create body: { "number": 5, '@timestamp': '2020-12-12' } diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/20_unsupported_apis.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/20_unsupported_apis.yml index a3461b5c2c1db..32eb908331026 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/20_unsupported_apis.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/20_unsupported_apis.yml @@ -227,7 +227,7 @@ - '@timestamp': '2020-12-12' - delete: _index: logs-foobar - _id: 10 + _id: "10" - match: { errors: true } - match: { items.0.index.status: 400 } - match: { items.0.index.error.type: illegal_argument_exception } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java index e6baafa3a9750..031ed9cf86bf7 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java @@ -102,6 +102,7 @@ public IngestDocument execute(IngestDocument ingestDocument) { for (Function, Function> dateParser : dateParsers) { try { dateTime = dateParser.apply(ingestDocument.getSourceAndMetadata()).apply(value); + break; } catch (Exception e) { // try the next parser and keep track of the exceptions lastException = ExceptionsHelper.useOrSuppress(lastException, e); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java index 2beba89adfd1a..9cc376fc379b8 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java @@ -100,6 +100,28 @@ public void testJavaPatternMultipleFormats() { } } + public void testShortCircuitAdditionalPatternsAfterFirstMatchingPattern() { + List matchFormats = new ArrayList<>(); + matchFormats.add("invalid"); + matchFormats.add("uuuu-dd-MM"); + matchFormats.add("uuuu-MM-dd"); + DateProcessor dateProcessor = new DateProcessor( + randomAlphaOfLength(10), + null, + templatize(ZoneId.of("Europe/Amsterdam")), + templatize(Locale.ENGLISH), + "date_as_string", + matchFormats, + "date_as_date" + ); + + Map document = new HashMap<>(); + document.put("date_as_string", "2010-03-04"); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + dateProcessor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2010-04-03T00:00:00.000+02:00")); + } + public void testJavaPatternNoTimezone() { DateProcessor dateProcessor = new DateProcessor( randomAlphaOfLength(10), diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/100_date_index_name_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/100_date_index_name_processor.yml index 80598adf5f567..fe880181b5896 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/100_date_index_name_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/100_date_index_name_processor.yml @@ -27,7 +27,7 @@ teardown: - do: index: index: events - id: 1 + id: "1" pipeline: "1" body: { date: "2016-04-22T16:32:14.968Z" diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/110_sort.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/110_sort.yml index 3c24d93ad8e58..45ea9a618d305 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/110_sort.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/110_sort.yml @@ -26,7 +26,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -36,5 +36,5 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.values: ["bar", "baz", "foo"] } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/120_grok.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/120_grok.yml index e09ba43506f78..75a15fd415e40 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/120_grok.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/120_grok.yml @@ -27,14 +27,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "123.42 400 "} - do: get: index: test - id: 1 + id: "1" - match: { _source.val: 123.42 } - match: { _source.status: 400 } - match: { _source.msg: "foo" } @@ -64,14 +64,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: ""} - do: get: index: test - id: 1 + id: "1" - match: { _source.msg: "foo" } --- @@ -99,14 +99,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: ""} - do: get: index: test - id: 1 + id: "1" - match: { _source.msg: "foo" } --- diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/130_escape_dot.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/130_escape_dot.yml index f60a6946c2928..5ac3967b27b01 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/130_escape_dot.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/130_escape_dot.yml @@ -29,7 +29,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "1" body: { foo.bar: "baz" @@ -38,7 +38,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.foo.bar: "baz" } --- "Test escape_dot processor with override and wildcard": @@ -61,7 +61,7 @@ teardown: - do: index: index: test - id: 2 + id: "2" pipeline: "2" body: { foo.bar: "baz", @@ -74,6 +74,6 @@ teardown: - do: get: index: test - id: 2 + id: "2" - match: { _source.foo.bar: "baz" } - match: { _source.foo.qux: "quux" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/140_json.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/140_json.yml index 746858a673531..60208a52aba3e 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/140_json.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/140_json.yml @@ -58,7 +58,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "1" body: { foo_object: "{\"hello\": \"world\"}", @@ -72,7 +72,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.foo_object.hello: "world" } - match: { _source.foo_array.0: 1 } - match: { _source.foo_string: "bla bla" } @@ -101,7 +101,7 @@ teardown: - do: index: index: test - id: 2 + id: "2" pipeline: "2" body: { json: "{\"dupe\": 1, \"dupe\": 2}", @@ -110,7 +110,7 @@ teardown: - do: get: index: test - id: 2 + id: "2" - match: { _source.dupe: 2 } --- @@ -134,7 +134,7 @@ teardown: - do: index: index: test - id: 3 + id: "3" pipeline: "3" body: { json: "{\"foo\": {\"bar\": \"baz\"} }", @@ -147,6 +147,6 @@ teardown: - do: get: index: test - id: 3 + id: "3" - match: { _source.foo.bar: "baz" } - match: { _source.foo.qux: "quux" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/150_kv.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/150_kv.yml index 486739e49283c..5dfa8052cd7a4 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/150_kv.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/150_kv.yml @@ -27,7 +27,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "1" body: { foo: "goodbye=everybody hello=world" @@ -36,7 +36,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.goodbye: "everybody" } - match: { _source.hello: "world" } @@ -78,7 +78,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "1" body: { origin: "field1", @@ -89,7 +89,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.bar.goodbye: "everybody" } - match: { _source.bar.hello: "world" } - match: { _source.goodbye: "everybody" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/160_urldecode.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/160_urldecode.yml index dc428d989a76f..3ad13d2cee1f2 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/160_urldecode.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/160_urldecode.yml @@ -25,7 +25,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "1" body: { my_url: "https%3a%2f%2felastic.co%2f" @@ -34,5 +34,5 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.my_url: "https://elastic.co/" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/170_version.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/170_version.yml index b57cbbe3b7fb3..822272526eb56 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/170_version.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/170_version.yml @@ -61,14 +61,14 @@ teardown: catch: conflict index: index: test - id: 1 + id: "1" pipeline: "my_pipeline1" body: {} - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline2" body: {} - match: { _version: 1 } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/180_bytes_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/180_bytes_processor.yml index 1deeaa1edf7e3..4db194130c2af 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/180_bytes_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/180_bytes_processor.yml @@ -27,13 +27,13 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {bytes_source_field: "1kb"} - do: get: index: test - id: 1 + id: "1" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml index 85ef086ace51e..75f6867083498 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml @@ -27,14 +27,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {source_field: "1kb"} - do: get: index: test - id: 1 + id: "1" - match: { _source.source_field: "1kb" } - match: { _source.target_field: 1024 } @@ -60,14 +60,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {source_field: "FooBar"} - do: get: index: test - id: 1 + id: "1" - match: { _source.source_field: "FooBar" } - match: { _source.target_field: "foobar" } @@ -93,14 +93,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {source_field: "FooBar"} - do: get: index: test - id: 1 + id: "1" - match: { _source.source_field: "FooBar" } - match: { _source.target_field: "FOOBAR" } @@ -126,14 +126,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {source_field: "{\"foo\":\"bar\"}"} - do: get: index: test - id: 1 + id: "1" - match: { _source.source_field: "{\"foo\":\"bar\"}" } - match: { _source.target_field.foo: "bar" } @@ -159,14 +159,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {source_field: "{\"foo\":\"bar\"}"} - do: get: index: test - id: 1 + id: "1" - match: { _source.source_field: "{\"foo\":\"bar\"}" } - match: { _source.foo: "bar" } @@ -192,14 +192,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {source_field: "foo%20bar"} - do: get: index: test - id: 1 + id: "1" - match: { _source.source_field: "foo%20bar" } - match: { _source.target_field: "foo bar" } @@ -231,14 +231,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {source_field: "foo"} - do: get: index: test - id: 1 + id: "1" - match: { _source.source_field: "foo" } - match: { _source.target_field1: "1:hTSGlFQnR58UCk+NfKRZzA32dPg=" } - match: { _source.target_field2: "1:LQU9qZlK+B5F3KDmev6m5PMibrg=" } @@ -265,14 +265,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {source_field: "http://www.example.com/index.html"} - do: get: index: test - id: 1 + id: "1" - match: { _source.source_field: "http://www.example.com/index.html" } - match: { _source.target_field.scheme: "http" } - match: { _source.target_field.domain: "www.example.com" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_default_pipeline.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_default_pipeline.yml index cee76d0eaca64..b35fd38f08ae3 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_default_pipeline.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_default_pipeline.yml @@ -37,33 +37,33 @@ teardown: - do: index: index: test - id: 1 + id: "1" body: {bytes_source_field: "1kb"} - do: get: index: test - id: 1 + id: "1" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # default pipeline via alias - do: index: index: test_alias - id: 2 + id: "2" body: {bytes_source_field: "1kb"} - do: get: index: test - id: 2 + id: "2" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # default pipeline via upsert - do: update: index: test - id: 3 + id: "3" body: script: source: "ctx._source.ran_script = true" @@ -72,14 +72,14 @@ teardown: - do: get: index: test - id: 3 + id: "3" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # default pipeline via scripted upsert - do: update: index: test - id: 4 + id: "4" body: script: source: "ctx._source.bytes_source_field = '1kb'" @@ -89,21 +89,21 @@ teardown: - do: get: index: test - id: 4 + id: "4" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # default pipeline via doc_as_upsert - do: update: index: test - id: 5 + id: "5" body: doc: { "bytes_source_field":"1kb" } doc_as_upsert: true - do: get: index: test - id: 5 + id: "5" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # default pipeline via bulk upsert @@ -169,14 +169,14 @@ teardown: - do: index: index: test - id: 9 + id: "9" pipeline: "_none" body: {bytes_source_field: "1kb"} - do: get: index: test - id: 9 + id: "9" - match: { _source.bytes_source_field: "1kb" } - is_false: _source.bytes_target_field # bad request @@ -184,7 +184,7 @@ teardown: catch: bad_request index: index: test - id: 10 + id: "10" pipeline: "" body: {bytes_source_field: "1kb"} diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_dissect_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_dissect_processor.yml index 916a7fe656cc2..b170f282ec7f6 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_dissect_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_dissect_processor.yml @@ -27,14 +27,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {message: "foo bar baz"} - do: get: index: test - id: 1 + id: "1" - match: { _source.message: "foo bar baz" } - match: { _source.a: "foo" } - match: { _source.b: "bar" } @@ -62,7 +62,7 @@ teardown: catch: '/Unable to find match for dissect pattern: \%\{a\},\%\{b\},\%\{c\} against source: foo bar baz/' index: index: test - id: 2 + id: "2" pipeline: "my_pipeline" body: {message: "foo bar baz"} diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/210_conditional_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/210_conditional_processor.yml index 7b0999e4e2980..8ad2be2b41fe4 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/210_conditional_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/210_conditional_processor.yml @@ -28,14 +28,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {bytes_source_field: "1kb", conditional_field: "bar"} - do: get: index: test - id: 1 + id: "1" - match: { _source.bytes_source_field: "1kb" } - match: { _source.conditional_field: "bar" } - match: { _source.bytes_target_field: 1024 } @@ -63,14 +63,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {bytes_source_field: "1kb", conditional_field: "bar"} - do: get: index: test - id: 1 + id: "1" - match: { _source.bytes_source_field: "1kb" } - match: { _source.conditional_field: "bar" } - is_false: _source.bytes_target_field diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/210_pipeline_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/210_pipeline_processor.yml index bc82b7f1ca7e1..71e2ea3b9ea79 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/210_pipeline_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/210_pipeline_processor.yml @@ -54,14 +54,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "outer" body: {} - do: get: index: test - id: 1 + id: "1" - match: { _source.foo: "bar" } - match: { _source.baz: "blub" } @@ -103,7 +103,7 @@ teardown: catch: /illegal_state_exception/ index: index: test - id: 1 + id: "1" pipeline: "outer" body: {} - match: { error.root_cause.0.type: "illegal_state_exception" } @@ -161,7 +161,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "outer" body: > { @@ -171,13 +171,13 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.manager: "john" } - do: index: index: test - id: 2 + id: "2" pipeline: "outer" body: > { @@ -187,14 +187,14 @@ teardown: - do: get: index: test - id: 2 + id: "2" - match: { _source.manager: "jan" } - do: catch: /illegal_state_exception/ index: index: test - id: 3 + id: "3" pipeline: "outer" body: > { @@ -266,7 +266,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "pipeline1" body: > { @@ -275,7 +275,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - length: { _source.pipelines: 3 } - match: { _source.pipelines.0: "pipeline1" } - match: { _source.pipelines.1: "another_pipeline" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml index 77a1df81a296a..dcf201666dfeb 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml @@ -26,7 +26,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { foo: "bar" @@ -35,7 +35,7 @@ teardown: - do: index: index: test - id: 2 + id: "2" pipeline: "my_pipeline" body: { foo: "blub" @@ -45,13 +45,13 @@ teardown: catch: missing get: index: test - id: 1 + id: "1" - match: { found: false } - do: get: index: test - id: 2 + id: "2" - match: { _source.foo: "blub" } --- @@ -80,7 +80,7 @@ teardown: - do: index: index: test - id: 3 + id: "3" pipeline: "my_pipeline_with_failure" body: { foo: "bar" @@ -90,5 +90,5 @@ teardown: catch: missing get: index: test - id: 3 + id: "3" diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/230_change_target_index.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/230_change_target_index.yml index bb2677f9b193f..fde49c280fae2 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/230_change_target_index.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/230_change_target_index.yml @@ -39,7 +39,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "retarget" body: { a: true @@ -48,7 +48,7 @@ teardown: - do: get: index: foo - id: 1 + id: "1" - match: { _source.a: true } # only the foo index @@ -98,7 +98,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" body: { a: true } @@ -106,7 +106,7 @@ teardown: - do: get: index: foo - id: 1 + id: "1" - match: { _source.a: true } # only the foo index diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/240_required_pipeline.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/240_required_pipeline.yml index dafbe0510c321..db4e0e42de78a 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/240_required_pipeline.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/240_required_pipeline.yml @@ -52,33 +52,33 @@ teardown: - do: index: index: test - id: 1 + id: "1" body: {bytes_source_field: "1kb"} - do: get: index: test - id: 1 + id: "1" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # final pipeline via alias - do: index: index: test_alias - id: 2 + id: "2" body: {bytes_source_field: "1kb"} - do: get: index: test - id: 2 + id: "2" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # final pipeline via upsert - do: update: index: test - id: 3 + id: "3" body: script: source: "ctx._source.ran_script = true" @@ -87,14 +87,14 @@ teardown: - do: get: index: test - id: 3 + id: "3" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # final pipeline via scripted upsert - do: update: index: test - id: 4 + id: "4" body: script: source: "ctx._source.bytes_source_field = '1kb'" @@ -104,21 +104,21 @@ teardown: - do: get: index: test - id: 4 + id: "4" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # final pipeline via doc_as_upsert - do: update: index: test - id: 5 + id: "5" body: doc: { "bytes_source_field":"1kb" } doc_as_upsert: true - do: get: index: test - id: 5 + id: "5" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # final pipeline via bulk upsert @@ -258,7 +258,7 @@ teardown: - do: index: index: index_with_final_pipeline_1 - id: 1 + id: "1" pipeline: "change_target_index" body: {foo: "bar"} @@ -267,14 +267,14 @@ teardown: catch: missing get: index: index_with_final_pipeline_1 - id: 1 + id: "1" - match: { found: false } # document present in re-targeted index and re-targeted index's final pipeline was executed - do: get: index: index_with_final_pipeline_2 - id: 1 + id: "1" - match: { _source.foo: "bar" } - match: { _source.final_pipeline_2: true } - is_false: _source.final_pipeline_1 diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/250_csv.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/250_csv.yml index a38805fb1fec3..ce7ca3866147d 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/250_csv.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/250_csv.yml @@ -27,7 +27,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -37,7 +37,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.a: "aa" } - match: { _source.b: "bb" } - match: { _source.c: "cc" } @@ -66,7 +66,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -76,7 +76,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.a: "aa" } - match: { _source.b: "b;b" } - match: { _source.c: "cc" } @@ -107,7 +107,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -117,7 +117,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.a: "aa" } - match: { _source.b: "bb" } - match: { _source.c: "cc" } @@ -146,7 +146,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -156,7 +156,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.date: "2018-01-06 16:56:14.295748" } - match: { _source.level: "INFO" } - match: { _source.server: "VirtualServer" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/260_seq_no.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/260_seq_no.yml index 7c8c7d1c67750..16e527d409c14 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/260_seq_no.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/260_seq_no.yml @@ -38,7 +38,7 @@ teardown: catch: conflict index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {} - match: { error.root_cause.0.type: "version_conflict_engine_exception" } @@ -47,7 +47,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" body: {} - match: { _seq_no: 0 } - match: { _primary_term: 1 } @@ -55,7 +55,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {} - match: { _seq_no: 1 } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/270_set_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/270_set_processor.yml index 41fc5b99fb275..61fc876d81809 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/270_set_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/270_set_processor.yml @@ -27,7 +27,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "1" body: { foo: "hello" @@ -35,7 +35,7 @@ teardown: - do: index: index: test - id: 2 + id: "2" pipeline: "1" body: { foo: "hello", @@ -45,13 +45,13 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.foo: "hello" } - do: get: index: test - id: 2 + id: "2" - match: { _source.foo: "hello" } --- "Test set processor with index change and require_alias": @@ -154,7 +154,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "1" body: { foo_object: { @@ -170,7 +170,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.copied_foo_object.hello: "world" } - match: { _source.copied_foo_array.0: 1 } - match: { _source.copied_foo_string: "bla bla" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/280_rename.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/280_rename.yml index a6c663a260aae..5e38f09dbd024 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/280_rename.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/280_rename.yml @@ -27,7 +27,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "1" body: { message: "test" @@ -36,5 +36,5 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.message: "test" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/30_date_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/30_date_processor.yml index cee302f0e20ed..707a2e2ddb0f7 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/30_date_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/30_date_processor.yml @@ -29,14 +29,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {date_source_field: "12/06/2010"} - do: get: index: test - id: 1 + id: "1" - match: { _source.date_source_field: "12/06/2010" } - match: { _source.date_target_field: "2010-06-12T00:00:00.000+02:00" } @@ -66,14 +66,14 @@ teardown: - do: index: index: test2 - id: 1 + id: "1" pipeline: "my_pipeline_2" body: {date_source_field: "2010-06-01T00:00:00.000"} - do: get: index: test2 - id: 1 + id: "1" - match: { _source.date_source_field: "2010-06-01T00:00:00.000" } # date field without a timezone gets timezone from a pipeline - match: { _source.date_target_field: "2010-06-01T00:00:00.000+01:00" } @@ -81,14 +81,14 @@ teardown: - do: index: index: test2 - id: 2 + id: "2" pipeline: "my_pipeline_2" body: {date_source_field: "2010-06-01T00:00:00.000Z"} - do: get: index: test2 - id: 2 + id: "2" - match: { _source.date_source_field: "2010-06-01T00:00:00.000Z" } # date field with a timezone has its time recalculated to a target timezone from a pipeline - match: { _source.date_target_field: "2010-06-01T01:00:00.000+01:00" } @@ -160,14 +160,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { date_source_1: "2018-02-05T13:44:56.657+0100", date_source_2: "2017-04-04 13:43:09 +0200", date_source_3: "10/Aug/2018:09:45:56 +0200", date_source_4: "1", date_source_5: "1", date_source_6: "4000000050d506482dbdf024", date_source_7: "2018-02-05T13:44:56.657+0100" } - do: get: index: test - id: 1 + id: "1" - match: { _source.date_source_1: "2018-02-05T13:44:56.657+0100" } - match: { _source.date_target_1: "2018-02-05T12:44:56.657Z" } - match: { _source.date_source_2: "2017-04-04 13:43:09 +0200" } @@ -236,14 +236,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {date_source_field: "2020-33"} - do: get: index: test - id: 1 + id: "1" - match: { _source.date_source_field: "2020-33" } - match: { _source.date_target_field: "2020-08-10T00:00:00.000Z" } @@ -302,13 +302,13 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {date_source_field: "2020-33"} - do: get: index: test - id: 1 + id: "1" - match: { _source.date_source_field: "2020-33" } - match: { _source.date_target_field: "2020-08-09T00:00:00.000Z" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/40_mutate.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/40_mutate.yml index 9de9d19c0b879..fac7a9cdb28e8 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/40_mutate.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/40_mutate.yml @@ -89,7 +89,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -108,7 +108,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - is_false: _source.field_to_rename - is_false: _source.field_to_remove - match: { _source.new_field: ["new_value", "item2", "item3", "item4"] } @@ -148,13 +148,13 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field: "value"} - do: get: index: surprise - id: 1 + id: "1" - length: { _source: 1 } - match: { _source.field: "value" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/50_on_failure.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/50_on_failure.yml index 4d74acdcab39c..eed7b8c76d295 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/50_on_failure.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/50_on_failure.yml @@ -47,14 +47,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "value1"} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "value1" } - match: { _source._executed: true } - match: { _source._failed: true } @@ -103,14 +103,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "value1"} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "value1" } - match: { _source.foofield: "exists" } - match: { _source.foofield2: "ran" } @@ -198,12 +198,12 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {} - do: get: index: test - id: 1 + id: "1" - match: { _source.field: "value" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml index 6b580a09239ec..e2c331deae340 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml @@ -27,7 +27,7 @@ teardown: catch: request index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {} @@ -60,12 +60,12 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {} - do: get: index: test - id: 1 + id: "1" - match: { _source.error_message: "fail_processor_ran" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_foreach.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_foreach.yml index 9142317ce1507..f76d5ad2ade3b 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_foreach.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_foreach.yml @@ -31,7 +31,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -41,14 +41,14 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.values: ["FOO", "BAR", "BAZ"] } #exceeds the recurse max per thread and will runs some of these on a different thread - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -59,6 +59,6 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.values: ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"] } diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index b265a162f07e7..efe677d7ef223 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -35,7 +35,6 @@ restResources { } def useFixture = providers.environmentVariable("geoip_use_service") - .forUseAtConfigurationTime() .map { s -> Boolean.parseBoolean(s) == false } .getOrElse(true) diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java index 4d1c594ab7b7c..5ec08891981f6 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java @@ -66,9 +66,15 @@ public class GeoIpDownloader extends AllocatedPersistentTask { Property.Dynamic, Property.NodeScope ); + + // for overriding in tests + private static final String DEFAULT_ENDPOINT = System.getProperty( + "ingest.geoip.downloader.endpoint.default", + "https://geoip.elastic.co/v1/database" + ); public static final Setting ENDPOINT_SETTING = Setting.simpleString( "ingest.geoip.downloader.endpoint", - "https://geoip.elastic.co/v1/database", + DEFAULT_ENDPOINT, Property.NodeScope ); @@ -258,6 +264,7 @@ void runDownloader() { try { updateDatabases(); } catch (Exception e) { + stats = stats.failedDownload(); logger.error("exception during geoip databases update", e); } try { diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java index 16cb86953003e..9d65b17bacc5e 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java @@ -12,11 +12,13 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -29,6 +31,7 @@ import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.RemoteTransportException; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; @@ -128,14 +131,18 @@ public void clusterChanged(ClusterChangedEvent event) { // wait for state recovered return; } - // bootstrap downloader after first cluster start + + DiscoveryNode masterNode = event.state().nodes().getMasterNode(); + if (masterNode == null || masterNode.getVersion().before(Version.V_7_14_0)) { + // wait for master to be upgraded so it understands geoip task + return; + } + clusterService.removeListener(this); - if (event.localNodeMaster()) { - if (ENABLED_SETTING.get(event.state().getMetadata().settings(), settings)) { - startTask(() -> clusterService.addListener(this)); - } else { - stopTask(() -> clusterService.addListener(this)); - } + if (ENABLED_SETTING.get(event.state().getMetadata().settings(), settings)) { + startTask(() -> clusterService.addListener(this)); + } else { + stopTask(() -> clusterService.addListener(this)); } } @@ -144,8 +151,9 @@ private void startTask(Runnable onFailure) { GEOIP_DOWNLOADER, GEOIP_DOWNLOADER, new GeoIpTaskParams(), - ActionListener.wrap(r -> {}, e -> { - if (e instanceof ResourceAlreadyExistsException == false) { + ActionListener.wrap(r -> logger.debug("Started geoip downloader task"), e -> { + Throwable t = e instanceof RemoteTransportException ? e.getCause() : e; + if (t instanceof ResourceAlreadyExistsException == false) { logger.error("failed to create geoip downloader task", e); onFailure.run(); } @@ -154,18 +162,23 @@ private void startTask(Runnable onFailure) { } private void stopTask(Runnable onFailure) { - ActionListener> listener = ActionListener.wrap(r -> {}, e -> { - if (e instanceof ResourceNotFoundException == false) { - logger.error("failed to remove geoip downloader task", e); - onFailure.run(); + ActionListener> listener = ActionListener.wrap( + r -> logger.debug("Stopped geoip downloader task"), + e -> { + Throwable t = e instanceof RemoteTransportException ? e.getCause() : e; + if (t instanceof ResourceNotFoundException == false) { + logger.error("failed to remove geoip downloader task", e); + onFailure.run(); + } } - }); + ); persistentTasksService.sendRemoveRequest( GEOIP_DOWNLOADER, ActionListener.runAfter( listener, () -> client.admin().indices().prepareDelete(DATABASES_INDEX).execute(ActionListener.wrap(rr -> {}, e -> { - if (e instanceof ResourceNotFoundException == false) { + Throwable t = e instanceof RemoteTransportException ? e.getCause() : e; + if (t instanceof ResourceNotFoundException == false) { logger.warn("failed to remove " + DATABASES_INDEX, e); } })) diff --git a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml index d0da405cdeea9..b09dac97eba23 100644 --- a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml +++ b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml @@ -19,14 +19,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "89.160.20.128"} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "89.160.20.128" } - length: { _source.geoip: 7 } - match: { _source.geoip.city_name: "Linköping" } @@ -60,14 +60,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: ["89.160.20.128", "127.0.0.1"]} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: ["89.160.20.128", "127.0.0.1"] } - length: { _source.geoip: 2 } - length: { _source.geoip.0: 7 } @@ -102,14 +102,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: ["127.0.0.1", "89.160.20.128", "89.160.20.128"]} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: ["127.0.0.1", "89.160.20.128", "89.160.20.128"] } - length: { _source.geoip: 7 } - match: { _source.geoip.city_name: "Linköping" } @@ -147,14 +147,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "89.160.20.128"} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "89.160.20.128" } - length: { _source.geoip: 9 } - match: { _source.geoip.city_name: "Linköping" } @@ -190,14 +190,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "89.160.20.128"} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "89.160.20.128" } - length: { _source.geoip: 3 } - match: { _source.geoip.country_iso_code: "SE" } @@ -240,28 +240,28 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { field1: "80.231.5.0" } - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "80.231.5.0" } - is_false: _source.geoip - do: index: index: test - id: 2 + id: "2" pipeline: "my_pipeline" body: { field1: "89.160.20.128" } - do: get: index: test - id: 2 + id: "2" - match: { _source.field1: "89.160.20.128" } - length: { _source.geoip: 7 } - match: { _source.geoip.city_name: "Linköping" } @@ -295,14 +295,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "89.160.20.128"} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "89.160.20.128" } - length: { _source.geoip: 4 } - match: { _source.geoip.ip: "89.160.20.128" } diff --git a/modules/ingest-user-agent/src/yamlRestTest/resources/rest-api-spec/test/ingest-useragent/20_useragent_processor.yml b/modules/ingest-user-agent/src/yamlRestTest/resources/rest-api-spec/test/ingest-useragent/20_useragent_processor.yml index 8d938eb957222..3293f571a35d0 100644 --- a/modules/ingest-user-agent/src/yamlRestTest/resources/rest-api-spec/test/ingest-useragent/20_useragent_processor.yml +++ b/modules/ingest-user-agent/src/yamlRestTest/resources/rest-api-spec/test/ingest-useragent/20_useragent_processor.yml @@ -19,14 +19,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36"} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36" } - match: { _source.user_agent.name: "Chrome" } - match: { _source.user_agent.original: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36" } @@ -56,14 +56,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36"} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36" } - match: { _source.field2.os.full: "Mac OS X 10.9.2" } - is_false: _source.user_agent diff --git a/modules/ingest-user-agent/src/yamlRestTest/resources/rest-api-spec/test/ingest-useragent/30_custom_regex.yml b/modules/ingest-user-agent/src/yamlRestTest/resources/rest-api-spec/test/ingest-useragent/30_custom_regex.yml index 3d0179d6ad51a..85b9e24db4bd7 100644 --- a/modules/ingest-user-agent/src/yamlRestTest/resources/rest-api-spec/test/ingest-useragent/30_custom_regex.yml +++ b/modules/ingest-user-agent/src/yamlRestTest/resources/rest-api-spec/test/ingest-useragent/30_custom_regex.yml @@ -20,14 +20,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36"} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36" } - match: { _source.user_agent.name: "Test" } - is_false: _source.user_agent.os diff --git a/modules/lang-expression/src/yamlRestTest/resources/rest-api-spec/test/lang_expression/20_search.yml b/modules/lang-expression/src/yamlRestTest/resources/rest-api-spec/test/lang_expression/20_search.yml index 33bad01a1232a..e850d7273eea6 100644 --- a/modules/lang-expression/src/yamlRestTest/resources/rest-api-spec/test/lang_expression/20_search.yml +++ b/modules/lang-expression/src/yamlRestTest/resources/rest-api-spec/test/lang_expression/20_search.yml @@ -11,7 +11,7 @@ setup: - do: index: index: test123 - id: 1 + id: "1" body: { age: 23 } - do: diff --git a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/20_render_search_template.yml b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/20_render_search_template.yml index 946b63a65d923..6fc117ed2189a 100644 --- a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/20_render_search_template.yml +++ b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/20_render_search_template.yml @@ -109,12 +109,12 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "value1_foo" } - do: index: index: test - id: 2 + id: "2" body: { "text": "value2_foo value3_foo" } - do: indices.refresh: {} diff --git a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/30_search_template.yml b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/30_search_template.yml index a9d3c2da68617..55cd3710219ef 100644 --- a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/30_search_template.yml +++ b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/30_search_template.yml @@ -4,12 +4,12 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "value1" } - do: index: index: test - id: 2 + id: "2" body: { "text": "value2" } - do: indices.refresh: {} @@ -68,25 +68,25 @@ - do: index: index: test - id: 1 + id: "1" body: { "theField": "foo" } - do: index: index: test - id: 2 + id: "2" body: { "theField": "foo 2" } - do: index: index: test - id: 3 + id: "3" body: { "theField": "foo 3" } - do: index: index: test - id: 4 + id: "4" body: { "theField": "foo 4" } # we use a different index here since we compare the explain description which contains a doc ID and we can only be sure that it's 0 @@ -94,7 +94,7 @@ - do: index: index: otherindex - id: 5 + id: "5" body: { "otherField": "foo" } - do: indices.refresh: {} @@ -144,7 +144,7 @@ - do: index: index: test - id: 1 + id: "1" body: {} refresh: true diff --git a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml index e92e10b9ad276..b07131c809d55 100644 --- a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml +++ b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml @@ -4,25 +4,25 @@ setup: - do: index: index: index_1 - id: 1 + id: "1" body: { foo: bar } - do: index: index: index_1 - id: 2 + id: "2" body: { foo: baz } - do: index: index: index_1 - id: 3 + id: "3" body: { foo: foo } - do: index: index: index_2 - id: 1 + id: "1" body: { foo: foo } - do: diff --git a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/60_typed_keys.yml b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/60_typed_keys.yml index 0ffb7640c1976..9bd58d554d0c3 100644 --- a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/60_typed_keys.yml +++ b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/60_typed_keys.yml @@ -1,8 +1,6 @@ setup: - skip: - version: all - reason: 'AwaitsFix: https://github.com/elastic/elasticsearch/issues/82502' - features: allowed_warnings + features: allowed_warnings_regex - do: indices.put_template: name: index_template @@ -22,8 +20,8 @@ setup: type: keyword - do: - allowed_warnings: - - "index [test-0] matches multiple legacy templates [global, index_template], composable templates will only match a single template" + allowed_warnings_regex: + - "index \\[test-[0-2]\\] matches multiple legacy templates \\[global, index_template\\], composable templates will only match a single template" bulk: refresh: true body: diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle index ae3a9ee4ca5d7..b5c7bbf484a58 100644 --- a/modules/lang-painless/build.gradle +++ b/modules/lang-painless/build.gradle @@ -181,7 +181,7 @@ tasks.register("generateContextApiSpec", DefaultTestClustersTask) { classpath = sourceSets.doc.runtimeClasspath systemProperty "cluster.uri", "${-> generateContextApiSpecCluster.get().singleNode().getAllHttpSocketURI().get(0)}" systemProperty "jdksrc", providers.systemProperty("jdksrc").getOrNull() - systemProperty "packageSources", providers.systemProperty("packageSources").forUseAtConfigurationTime().getOrNull() + systemProperty "packageSources", providers.systemProperty("packageSources").getOrNull() }.assertNormalExitValue() } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Constant.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Constant.java deleted file mode 100644 index eabef10ed5374..0000000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Constant.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.painless; - -import java.util.function.Consumer; - -/** - * A constant initializer to be added to the class file. - */ -public class Constant { - public final Location location; - public final String name; - public final org.objectweb.asm.Type type; - public final Consumer initializer; - - /** - * Create a new constant. - * - * @param location the location in the script that is creating it - * @param type the type of the constant - * @param name the name of the constant - * @param initializer code to initialize the constant. It will be called when generating the clinit method and is expected to leave the - * value of the constant on the stack. Generating the load instruction is managed by the caller. - */ - public Constant(Location location, org.objectweb.asm.Type type, String name, Consumer initializer) { - this.location = location; - this.name = name; - this.type = type; - this.initializer = initializer; - } -} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Globals.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Globals.java deleted file mode 100644 index 043940011b55e..0000000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Globals.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.painless; - -import java.util.BitSet; -import java.util.HashMap; -import java.util.Map; - -/** - * Program-wide globals (initializers, synthetic methods, etc) - */ -public class Globals { - private final Map constantInitializers = new HashMap<>(); - private final BitSet statements; - - /** Create a new Globals from the set of statement boundaries */ - public Globals(BitSet statements) { - this.statements = statements; - } - - /** Adds a new constant initializer to be written */ - public void addConstantInitializer(Constant constant) { - if (constantInitializers.put(constant.name, constant) != null) { - throw new IllegalStateException("constant initializer: " + constant.name + " already exists"); - } - } - - /** Returns the current initializers */ - public Map getConstantInitializers() { - return constantInitializers; - } - - /** Returns the set of statement boundaries */ - public BitSet getStatements() { - return statements; - } -} diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/100_terms_agg.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/100_terms_agg.yml index 000e1af694d7d..612f56dd2617e 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/100_terms_agg.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/100_terms_agg.yml @@ -21,7 +21,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: str: "abc" double: 1.0 @@ -30,7 +30,7 @@ setup: - do: index: index: test_1 - id: 2 + id: "2" body: str: "abc" double: 1.0 @@ -39,7 +39,7 @@ setup: - do: index: index: test_1 - id: 3 + id: "3" body: str: "bcd" double: 2.0 diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/130_metric_agg.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/130_metric_agg.yml index 355da5cd27e9e..1c934b8421716 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/130_metric_agg.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/130_metric_agg.yml @@ -17,21 +17,21 @@ setup: - do: index: index: test - id: 1 + id: "1" body: double: 1.0 - do: index: index: test - id: 2 + id: "2" body: double: 1.0 - do: index: index: test - id: 3 + id: "3" body: double: 2.0 diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml index cb118ed9d562f..a23a27a2e6578 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml @@ -4,7 +4,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: foo: bar count: 1 @@ -12,7 +12,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: script: lang: painless @@ -26,7 +26,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: xxx } - match: { _source.count: 1 } @@ -34,7 +34,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: script: lang: painless @@ -47,7 +47,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: yyy } - match: { _source.count: 1 } @@ -55,7 +55,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: script: lang: painless @@ -68,7 +68,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: yyy } - match: { _source.count: 1 } @@ -78,7 +78,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: script: lang: painless @@ -91,7 +91,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: yyy } - match: { _source.foo_length: 3 } @@ -104,7 +104,7 @@ - do: index: index: test_1 - id: 2 + id: "2" body: foo: bar count: 1 @@ -113,7 +113,7 @@ catch: bad_request update: index: test_1 - id: 2 + id: "2" body: script: lang: painless diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/20_scriptfield.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/20_scriptfield.yml index c4ae4fbf38cba..d0ddb9f32d565 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/20_scriptfield.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/20_scriptfield.yml @@ -22,7 +22,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "foo": "aaa", "date": "2017/01/01", diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml index bb6362389a3da..559a54d28a19e 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml @@ -4,7 +4,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: script: source: "ctx._source.foo = params.bar" @@ -15,7 +15,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: baz } @@ -23,7 +23,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: script: source: "ctx._source.foo = params.bar" @@ -34,14 +34,14 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: xxx } - do: update: index: test_1 - id: 2 + id: "2" body: script: source: "ctx._source.foo = params.bar" @@ -53,14 +53,14 @@ - do: get: index: test_1 - id: 2 + id: "2" - match: { _source.foo: xxx } - do: update: index: test_1 - id: 3 + id: "3" body: script: source: "ctx._source.has_now = ctx._now > 0" @@ -71,14 +71,14 @@ - do: get: index: test_1 - id: 3 + id: "3" - match: { _source.has_now: true } - do: update: index: test_1 - id: 4 + id: "4" body: script: # assume _now is an absolute clock if it's in the range [now - 1m, now]; this tolerance might need adjustment after CI cycles @@ -90,6 +90,6 @@ - do: get: index: test_1 - id: 4 + id: "4" - match: { _source.within_one_minute: true } diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/30_search.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/30_search.yml index a006fde630716..5674d79b52a94 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/30_search.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/30_search.yml @@ -4,17 +4,17 @@ - do: index: index: test - id: 1 + id: "1" body: { "test": "value beck", "num1": 1.0, "bool": true } - do: index: index: test - id: 2 + id: "2" body: { "test": "value beck", "num1": 2.0, "bool": false } - do: index: index: test - id: 3 + id: "3" body: { "test": "value beck", "num1": 3.0, "bool": true } - do: indices.refresh: {} @@ -115,12 +115,12 @@ - do: index: index: test - id: 1 + id: "1" body: { "test": "value beck", "num1": 1.0 } - do: index: index: test - id: 2 + id: "2" body: { "test": "value beck", "num1": 2.0 } - do: indices.refresh: {} @@ -272,7 +272,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "dummy_field": 1 } - do: indices.refresh: {} @@ -322,7 +322,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "dummy_field": 1 } - do: indices.refresh: {} @@ -361,7 +361,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "f": 42 } - do: indices.refresh: {} @@ -388,7 +388,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "dummy_field": 1 } - do: indices.refresh: {} @@ -424,7 +424,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "genre": 1 } - do: @@ -455,7 +455,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "test": "value beck", "num1": 1.0 } - do: indices.refresh: {} diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/50_script_doc_values.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/50_script_doc_values.yml index c2880d17361c3..d3b43575b58a4 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/50_script_doc_values.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/50_script_doc_values.yml @@ -48,7 +48,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: rank: 1 boolean: true @@ -70,14 +70,14 @@ setup: - do: index: index: test - id: 2 + id: "2" body: rank: 2 - do: index: index: test - id: 3 + id: "3" body: rank: 3 boolean: [true, false, true] @@ -105,7 +105,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -116,7 +116,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -127,7 +127,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -138,7 +138,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -149,7 +149,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 2 } } + query: { term: { _id: "2" } } script_fields: field: script: @@ -160,7 +160,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 2 } } + query: { term: { _id: "2" } } script_fields: field: script: @@ -171,7 +171,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -182,7 +182,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -193,7 +193,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 3 } } + query: { term: { _id: "3" } } script_fields: field: script: @@ -204,7 +204,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 2 } } + query: { term: { _id: "2" } } script_fields: field: script: @@ -221,7 +221,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -232,7 +232,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -243,7 +243,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -254,7 +254,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -353,7 +353,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 2 } } + query: { term: { _id: "2" } } script_fields: field: script: @@ -364,7 +364,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 2 } } + query: { term: { _id: "2" } } script_fields: field: script: @@ -375,7 +375,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -386,7 +386,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -397,7 +397,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -408,7 +408,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 2 } } + query: { term: { _id: "2" } } script_fields: field: script: @@ -419,7 +419,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 2 } } + query: { term: { _id: "2" } } script_fields: field: script: @@ -430,7 +430,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -441,7 +441,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -452,7 +452,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -463,7 +463,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 2 } } + query: { term: { _id: "2" } } script_fields: field: script: @@ -474,7 +474,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 3 } } + query: { term: { _id: "3" } } script_fields: field: script: @@ -485,7 +485,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 3 } } + query: { term: { _id: "3" } } script_fields: field: script: @@ -496,7 +496,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 3 } } + query: { term: { _id: "3" } } script_fields: field: script: @@ -507,7 +507,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 3 } } + query: { term: { _id: "3" } } script_fields: field: script: @@ -520,7 +520,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -532,7 +532,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -544,7 +544,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -556,7 +556,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -568,7 +568,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 3 } } + query: { term: { _id: "3" } } script_fields: field: script: @@ -580,7 +580,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 3 } } + query: { term: { _id: "3" } } script_fields: field: script: @@ -592,7 +592,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 3 } } + query: { term: { _id: "3" } } script_fields: field: script: @@ -604,7 +604,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 2 } } + query: { term: { _id: "2" } } script_fields: field: script: @@ -616,7 +616,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: centroid: script: @@ -628,7 +628,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: bbox: script: @@ -642,7 +642,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: topLeft: script: @@ -659,7 +659,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: type: script: @@ -670,7 +670,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: width: script: @@ -687,7 +687,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -698,7 +698,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -780,7 +780,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -791,7 +791,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -860,7 +860,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -871,7 +871,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -884,7 +884,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -895,7 +895,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -958,7 +958,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -969,7 +969,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1069,7 +1069,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1080,7 +1080,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1223,7 +1223,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1234,7 +1234,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1312,7 +1312,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1323,7 +1323,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1404,7 +1404,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1415,7 +1415,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1493,7 +1493,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1543,7 +1543,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1556,7 +1556,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1567,7 +1567,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1622,7 +1622,7 @@ setup: - do: index: index: versiontest - id: 3000 + id: "3000" version: 50 version_type: external body: @@ -1636,7 +1636,7 @@ setup: index: versiontest rest_total_hits_as_int: true body: - query: { term: { _id: 3000 } } + query: { term: { _id: "3000" } } script_fields: ver: script: @@ -1650,7 +1650,7 @@ setup: - do: index: index: versiontest - id: 3000 + id: "3000" version: 60 version_type: external body: @@ -1662,7 +1662,7 @@ setup: catch: conflict index: index: versiontest - id: 3000 + id: "3000" version: 55 version_type: external body: @@ -1675,7 +1675,7 @@ setup: index: versiontest rest_total_hits_as_int: true body: - query: { term: { _id: 3000 } } + query: { term: { _id: "3000" } } script_fields: ver: script: diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/60_script_doc_values_binary.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/60_script_doc_values_binary.yml index 614ce53fd0649..67457e64c874e 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/60_script_doc_values_binary.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/60_script_doc_values_binary.yml @@ -18,7 +18,7 @@ Content-Type: application/json index: index: test - id: 1 + id: "1" body: binary: "U29tZSBiaW5hcnkgYmxvYg==" @@ -28,7 +28,7 @@ Content-Type: application/json index: index: test - id: 2 + id: "2" body: binary: [ "U29tZSBiaW5hcnkgYmxvYg==", @@ -42,7 +42,7 @@ Content-Type: application/json index: index: test - id: 3 + id: "3" body: {} - do: diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml index 57e7b4e31e057..d699b58607da3 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml @@ -18,32 +18,32 @@ setup: body: - index: _index: test - _id: 1 + _id: "1" - date: "2017-01-01T00:00:00" value_field: 1 - index: _index: test - _id: 2 + _id: "2" - date: "2017-01-02T00:00:00" value_field: 2 - index: _index: test - _id: 3 + _id: "3" - date: "2017-01-03T00:00:00" value_field: 3 - index: _index: test - _id: 4 + _id: "4" - date: "2017-01-04T00:00:00" value_field: 4 - index: _index: test - _id: 5 + _id: "5" - date: "2017-01-05T00:00:00" value_field: 5 - index: _index: test - _id: 6 + _id: "6" - date: "2017-01-06T00:00:00" value_field: 6 diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/80_script_score.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/80_script_score.yml index 1af3cb3c622b6..1cf44a6b27d1c 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/80_script_score.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/80_script_score.yml @@ -84,12 +84,12 @@ - do: index: index: test - id: 1 + id: "1" body: { "text-location": "location1", "location" : {"lat" : 40.24, "lon" : -70.24} } - do: index: index: test - id: 2 + id: "2" body: { "text-location": "location2", "location" : {"lat" : 40.12, "lon" : -70.12} } - do: indices.refresh: {} @@ -167,12 +167,12 @@ - do: index: index: test - id: 1 + id: "1" body: { "date": "2018-01-01T02:00:00Z"} - do: index: index: test - id: 2 + id: "2" body: { "date": "2018-01-01T01:00:00Z" } - do: indices.refresh: {} @@ -257,14 +257,14 @@ - do: index: index: test - id: 1 + id: "1" body: { "ival" : 40, "lval" : 40, "fval": 40.0, "dval": 40.0} # for this document, the smallest value in the array is chosen, which will be the closest to the origin - do: index: index: test - id: 2 + id: "2" body: { "ival" : [50, 40, 20], "lval" : [50, 40, 20], "fval" : [50.0, 40.0, 20.0], "dval" : [50.0, 40.0, 20.0] } - do: indices.refresh: {} @@ -565,7 +565,7 @@ - do: index: index: test - id: 1 + id: "1" body: object: field: "some text" diff --git a/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/builders/PolygonBuilder.java b/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/builders/PolygonBuilder.java index 314d63d2792e5..b71094f51cc20 100644 --- a/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/builders/PolygonBuilder.java +++ b/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/builders/PolygonBuilder.java @@ -721,11 +721,10 @@ private static Edge[] ring( minX = Math.min(minX, points[i].x); maxX = Math.max(maxX, points[i].x); } - if (signedArea == 0) { - // Points are collinear or self-intersection - throw new InvalidShapeException("Cannot determine orientation: signed area equal to 0"); - } - boolean orientation = signedArea < 0; + + // if the polygon is tiny, the computed area can result in zero. In that case + // we assume orientation is correct + boolean orientation = signedArea == 0 ? handedness != false : signedArea < 0; // OGC requires shell as ccw (Right-Handedness) and holes as cw (Left-Handedness) // since GeoJSON doesn't specify (and doesn't need to) GEO core will assume OGC standards diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/ShapeBuilderTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/ShapeBuilderTests.java index d70dc7981296b..0603533a8503f 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/ShapeBuilderTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/ShapeBuilderTests.java @@ -225,7 +225,7 @@ public void testPolygonSelfIntersection() { new CoordinatesBuilder().coordinate(-40.0, 50.0).coordinate(40.0, 50.0).coordinate(-40.0, -50.0).coordinate(40.0, -50.0).close() ); Exception e = expectThrows(InvalidShapeException.class, () -> newPolygon.buildS4J()); - assertThat(e.getMessage(), containsString("Cannot determine orientation: signed area equal to 0")); + assertThat(e.getMessage(), containsString("Self-intersection at or near point (0.0, 0.0, NaN)")); } /** note: only supported by S4J at the moment */ diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/builders/PolygonBuilderTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/builders/PolygonBuilderTests.java index 35b98df7a397d..ba82ad37425be 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/builders/PolygonBuilderTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/builders/PolygonBuilderTests.java @@ -167,7 +167,7 @@ public void testPolygonWithUndefinedOrientationDueToCollinearPoints() { new CoordinatesBuilder().coordinate(0.0, 0.0).coordinate(1.0, 1.0).coordinate(-1.0, -1.0).close() ); InvalidShapeException e = expectThrows(InvalidShapeException.class, pb::buildS4J); - assertEquals("Cannot determine orientation: signed area equal to 0", e.getMessage()); + assertEquals("Self-intersection at or near point (-1.0, -1.0, NaN)", e.getMessage()); } public void testCrossingDateline() { diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilder.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilder.java index 2b3ec09abdfd7..25f0786534110 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilder.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilder.java @@ -32,6 +32,7 @@ * Query to run on a [rank_feature] field. */ public final class RankFeatureQueryBuilder extends AbstractQueryBuilder { + private static final ScoreFunction DEFAULT_SCORE_FUNCTION = new ScoreFunction.Saturation(); /** * Scoring function for a [rank_feature] field. @@ -309,7 +310,7 @@ private static ScoreFunction readScoreFunction(StreamInput in) throws IOExceptio if (numNonNulls > 1) { throw new IllegalArgumentException("Can only specify one of [log], [saturation], [sigmoid] and [linear]"); } else if (numNonNulls == 0) { - query = new RankFeatureQueryBuilder(field, new ScoreFunction.Saturation()); + query = new RankFeatureQueryBuilder(field, DEFAULT_SCORE_FUNCTION); } else { ScoreFunction scoreFunction = (ScoreFunction) Arrays.stream(args, 3, args.length).filter(Objects::nonNull).findAny().get(); query = new RankFeatureQueryBuilder(field, scoreFunction); @@ -368,8 +369,10 @@ protected void doWriteTo(StreamOutput out) throws IOException { protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(getName()); builder.field("field", field); - scoreFunction.doXContent(builder); - printBoostAndQueryName(builder); + if (false == scoreFunction.equals(DEFAULT_SCORE_FUNCTION)) { + scoreFunction.doXContent(builder); + } + boostAndQueryNameToXContent(builder); builder.endObject(); } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilderTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilderTests.java index c4e7e35c646a1..4bd2d0714e93d 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilderTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilderTests.java @@ -147,4 +147,21 @@ public void testIllegalCombination() { e.getMessage() ); } + + public void testParseDefaultsRemoved() throws IOException { + String json = """ + { + "rank_feature" : { + "field": "foo", + "boost": 1, + "saturation": {} + } + }"""; + checkGeneratedJson(""" + { + "rank_feature": { + "field": "foo" + } + }""", parseQuery(json)); + } } diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/match_only_text/10_basic.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/match_only_text/10_basic.yml index d58ef74ea6316..40342b52aeea2 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/match_only_text/10_basic.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/match_only_text/10_basic.yml @@ -16,25 +16,25 @@ setup: - do: index: index: test - id: 1 + id: "1" body: {} - do: index: index: test - id: 2 + id: "2" body: { "foo": "Apache Lucene powers Elasticsearch" } - do: index: index: test - id: 3 + id: "3" body: { "foo": "Elasticsearch is based on Apache Lucene" } - do: index: index: test - id: 4 + id: "4" body: { "foo": "The Apache Software Foundation manages many projects including Lucene" } - do: diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml index 4fce6c84e817d..fcdf3f5a5fdf2 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml @@ -16,7 +16,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: pagerank: 10 url_length: 50 @@ -24,7 +24,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: pagerank: 100 url_length: 20 diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml index 7ba892cc87183..4302e33e1cf54 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml @@ -17,7 +17,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: tags: foo: 3 @@ -29,7 +29,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: tags: bar: 6 diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/scaled_float/10_basic.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/scaled_float/10_basic.yml index f68385d25114f..b7f810fa48206 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/scaled_float/10_basic.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/scaled_float/10_basic.yml @@ -14,25 +14,25 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "number" : 1 } - do: index: index: test - id: 2 + id: "2" body: { "number" : 1.53 } - do: index: index: test - id: 3 + id: "3" body: { "number" : -2.1 } - do: index: index: test - id: 4 + id: "4" body: { "number" : 1.53 } - do: diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/10_basic.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/10_basic.yml index 21843dad1d177..15e44e5e7b73a 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/10_basic.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/10_basic.yml @@ -19,7 +19,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: a_field: "quick brown fox jump lazy dog" @@ -27,7 +27,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: a_field: "xylophone xylophone xylophone" @@ -39,7 +39,7 @@ setup: - do: get: index: test - id: 1 + id: "1" - is_true: found - match: { _source.a_field: "quick brown fox jump lazy dog" } diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml index 58441abac8f88..f9be93ca8c110 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml @@ -22,7 +22,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: a_field: "quick brown fox jump lazy dog" text_field: "quick brown fox jump lazy dog" diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java index 306931fbf8b78..0ec4c0c79c3e0 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java @@ -60,6 +60,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder { - transformSpec.getFrom().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE); - transformSpec.getTo().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + transformSpec.getFrom().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE); + transformSpec.getTo().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); }); def versions = ['2', '1', '090'] @@ -124,7 +123,7 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) { } versions.each { version -> def oldEsDependency = configurations['es' + version] - oldEsDependency.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + oldEsDependency.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); TaskProvider fixture = tasks.register("oldEs${version}Fixture", AntFixture) { dependsOn project.configurations.oldesFixture, jdks.legacy, oldEsDependency executable = "${BuildParams.runtimeJavaHome}/bin/java" diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java index 14a7f86b8fdb0..958ebf14d8a23 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java @@ -25,6 +25,8 @@ import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; +import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; @@ -37,6 +39,7 @@ import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.upgrades.FeatureMigrationResults; +import org.elasticsearch.upgrades.SingleFeatureMigrationResult; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; @@ -50,6 +53,8 @@ import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import java.util.function.Function; @@ -267,6 +272,67 @@ public void testMigrateIndexWithWriteBlock() throws Exception { }); } + public void testMigrationWillRunAfterError() throws Exception { + createSystemIndexForDescriptor(INTERNAL_MANAGED); + + TestPlugin.preMigrationHook.set((state) -> Collections.emptyMap()); + TestPlugin.postMigrationHook.set((state, metadata) -> {}); + + ensureGreen(); + + SetOnce failure = new SetOnce<>(); + CountDownLatch clusterStateUpdated = new CountDownLatch(1); + internalCluster().getCurrentMasterNodeInstance(ClusterService.class) + .submitStateUpdateTask(this.getTestName(), new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + FeatureMigrationResults newResults = new FeatureMigrationResults( + Collections.singletonMap( + FEATURE_NAME, + SingleFeatureMigrationResult.failure(INTERNAL_MANAGED_INDEX_NAME, new RuntimeException("it failed :(")) + ) + ); + Metadata newMetadata = Metadata.builder(currentState.metadata()) + .putCustom(FeatureMigrationResults.TYPE, newResults) + .build(); + return ClusterState.builder(currentState).metadata(newMetadata).build(); + } + + @Override + public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + clusterStateUpdated.countDown(); + } + + @Override + public void onFailure(Exception e) { + failure.set(e); + clusterStateUpdated.countDown(); + } + }, ClusterStateTaskExecutor.unbatched()); + + clusterStateUpdated.await(10, TimeUnit.SECONDS); // Should be basically instantaneous + if (failure.get() != null) { + logger.error("cluster state update to inject migration failure state did not succeed", failure.get()); + fail("cluster state update failed, see log for details"); + } + + PostFeatureUpgradeRequest migrationRequest = new PostFeatureUpgradeRequest(); + PostFeatureUpgradeResponse migrationResponse = client().execute(PostFeatureUpgradeAction.INSTANCE, migrationRequest).get(); + // Make sure we actually started the migration + assertTrue( + "could not find [" + FEATURE_NAME + "] in response: " + Strings.toString(migrationResponse), + migrationResponse.getFeatures().stream().anyMatch(feature -> feature.getFeatureName().equals(FEATURE_NAME)) + ); + + // Now wait for the migration to finish (otherwise the test infra explodes) + assertBusy(() -> { + GetFeatureUpgradeStatusRequest getStatusRequest = new GetFeatureUpgradeStatusRequest(); + GetFeatureUpgradeStatusResponse statusResp = client().execute(GetFeatureUpgradeStatusAction.INSTANCE, getStatusRequest).get(); + logger.info(Strings.toString(statusResp)); + assertThat(statusResp.getUpgradeStatus(), equalTo(GetFeatureUpgradeStatusResponse.UpgradeStatus.NO_MIGRATION_NEEDED)); + }); + } + public void assertIndexHasCorrectProperties( Metadata metadata, String indexName, @@ -344,6 +410,7 @@ public void createSystemIndexForDescriptor(SystemIndexDescriptor descriptor) thr static final String FEATURE_NAME = "A-test-feature"; // Sorts alphabetically before the feature from MultiFeatureMigrationIT static final String ORIGIN = FeatureMigrationIT.class.getSimpleName(); static final String FlAG_SETTING_KEY = IndexMetadata.INDEX_PRIORITY_SETTING.getKey(); + static final String INTERNAL_MANAGED_INDEX_NAME = ".int-man-old"; static final int INDEX_DOC_COUNT = 100; // arbitrarily chosen public static final Version NEEDS_UPGRADE_VERSION = Version.V_7_0_0; @@ -354,7 +421,7 @@ public void createSystemIndexForDescriptor(SystemIndexDescriptor descriptor) thr static final SystemIndexDescriptor INTERNAL_MANAGED = SystemIndexDescriptor.builder() .setIndexPattern(".int-man-*") .setAliasName(".internal-managed-alias") - .setPrimaryIndex(".int-man-old") + .setPrimaryIndex(INTERNAL_MANAGED_INDEX_NAME) .setType(SystemIndexDescriptor.Type.INTERNAL_MANAGED) .setSettings(createSimpleSettings(NEEDS_UPGRADE_VERSION, INTERNAL_MANAGED_FLAG_VALUE)) .setMappings(createSimpleMapping(true, true)) diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java index 2fc6358f5c468..f0838dd571637 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java @@ -37,6 +37,8 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import java.util.function.Function; @@ -91,6 +93,8 @@ public void testMultipleFeatureMigration() throws Exception { ensureGreen(); + CountDownLatch hooksCalled = new CountDownLatch(4); + SetOnce preMigrationHookCalled = new SetOnce<>(); SetOnce postMigrationHookCalled = new SetOnce<>(); SetOnce secondPluginPreMigrationHookCalled = new SetOnce<>(); @@ -109,6 +113,7 @@ public void testMultipleFeatureMigration() throws Exception { assertThat(currentResults, nullValue()); preMigrationHookCalled.set(true); + hooksCalled.countDown(); return metadata; }); @@ -125,6 +130,7 @@ public void testMultipleFeatureMigration() throws Exception { assertThat(currentResults, nullValue()); postMigrationHookCalled.set(true); + hooksCalled.countDown(); }); SecondPlugin.preMigrationHook.set(clusterState -> { @@ -145,6 +151,7 @@ public void testMultipleFeatureMigration() throws Exception { assertThat(currentResults.getFeatureStatuses().get(FEATURE_NAME).getException(), nullValue()); secondPluginPreMigrationHookCalled.set(true); + hooksCalled.countDown(); return metadata; }); @@ -165,6 +172,7 @@ public void testMultipleFeatureMigration() throws Exception { assertThat(currentResults.getFeatureStatuses().get(FEATURE_NAME).getException(), nullValue()); secondPluginPostMigrationHookCalled.set(true); + hooksCalled.countDown(); }); PostFeatureUpgradeRequest migrationRequest = new PostFeatureUpgradeRequest(); @@ -177,6 +185,9 @@ public void testMultipleFeatureMigration() throws Exception { .collect(Collectors.toSet()); assertThat(migratingFeatures, hasItems(FEATURE_NAME, SECOND_FEATURE_NAME)); + // wait for all the plugin methods to have been called before assertBusy since that will exponentially backoff + assertThat(hooksCalled.await(30, TimeUnit.SECONDS), is(true)); + GetFeatureUpgradeStatusRequest getStatusRequest = new GetFeatureUpgradeStatusRequest(); assertBusy(() -> { GetFeatureUpgradeStatusResponse statusResponse = client().execute(GetFeatureUpgradeStatusAction.INSTANCE, getStatusRequest) diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java index bab93e56b653f..beac9ab88c78c 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java @@ -593,7 +593,7 @@ protected void finishHim(Exception failure) { */ protected void finishHim(Exception failure, List indexingFailures, List searchFailures, boolean timedOut) { logger.debug("[{}]: finishing without any catastrophic failures", task.getId()); - scrollSource.close(() -> { + scrollSource.close(threadPool.getThreadContext().preserveContext(() -> { if (failure == null) { BulkByScrollResponse response = buildResponse( timeValueNanos(System.nanoTime() - startTime.get()), @@ -605,7 +605,7 @@ protected void finishHim(Exception failure, List indexingFailures, List } else { listener.onFailure(failure); } - }); + })); } /** diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/10_basic.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/10_basic.yml index ab049d5bbc870..90c09c4a23741 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/10_basic.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/10_basic.yml @@ -3,7 +3,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -41,7 +41,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -101,7 +101,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -109,7 +109,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test2" } - do: @@ -153,7 +153,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -161,7 +161,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test2" } - do: @@ -204,7 +204,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -212,7 +212,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test2" } - do: @@ -245,12 +245,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "junk" } - do: indices.refresh: {} @@ -284,12 +284,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -327,12 +327,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -370,12 +370,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "kimchy" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/20_validation.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/20_validation.yml index cd7a4e8bce12f..d07b61e32b9e9 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/20_validation.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/20_validation.yml @@ -19,7 +19,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /conflicts may only be .* but was \[cat\]/ @@ -39,7 +39,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] parameter cannot be negative, found \[-4\]/ @@ -59,7 +59,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] should be >= \[slices\]/ @@ -76,7 +76,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /Failed to parse int parameter \[scroll_size\] with value \[asdf\]/ diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/40_versioning.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/40_versioning.yml index 8832b6a65c3dd..cb97e957e8394 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/40_versioning.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/40_versioning.yml @@ -7,7 +7,7 @@ - do: index: index: index1 - id: 1 + id: "1" version: 0 # Starting version is zero version_type: external body: {"delete": 0} @@ -27,5 +27,5 @@ - do: get: index: index1 - id: 1 + id: "1" - match: {_version: 0} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/50_wait_for_active_shards.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/50_wait_for_active_shards.yml index ea8ed4df3e748..0d4b4bc30673b 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/50_wait_for_active_shards.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/50_wait_for_active_shards.yml @@ -9,7 +9,7 @@ - do: index: index: test - id: 1 + id: "1" body: {"text": "test"} - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/80_slices.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/80_slices.yml index efc669173328c..08eb80d789155 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/80_slices.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/80_slices.yml @@ -3,22 +3,22 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: index: index: test - id: 2 + id: "2" body: { "text": "test" } - do: index: index: test - id: 3 + id: "3" body: { "text": "test" } - do: index: index: test - id: 4 + id: "4" body: { "text": "test" } - do: indices.refresh: {} @@ -67,22 +67,22 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: index: index: test - id: 2 + id: "2" body: { "text": "test" } - do: index: index: test - id: 3 + id: "3" body: { "text": "test" } - do: index: index: test - id: 4 + id: "4" body: { "text": "test" } - do: indices.refresh: {} @@ -177,32 +177,32 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: index: index: test - id: 2 + id: "2" body: { "text": "test" } - do: index: index: test - id: 3 + id: "3" body: { "text": "test" } - do: index: index: test - id: 4 + id: "4" body: { "text": "test" } - do: index: index: test - id: 5 + id: "5" body: { "text": "test" } - do: index: index: test - id: 6 + id: "6" body: { "text": "test" } - do: indices.refresh: @@ -298,22 +298,22 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: index: index: test - id: 2 + id: "2" body: { "text": "test" } - do: index: index: test - id: 3 + id: "3" body: { "text": "test" } - do: index: index: test - id: 4 + id: "4" body: { "text": "test" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/10_basic.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/10_basic.yml index 9ef6c1a90c400..4053b0bb9d6b1 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/10_basic.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/10_basic.yml @@ -3,7 +3,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -37,12 +37,12 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: dest - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -76,7 +76,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -132,12 +132,12 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: dest - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -177,12 +177,12 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: dest - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -216,7 +216,7 @@ - do: index: index: source - id: 1 + id: "1" body: {} - do: indices.refresh: {} @@ -233,7 +233,7 @@ - do: get: index: dest - id: 1 + id: "1" - match: { _source: {} } # Make sure reindex closed all the scroll contexts @@ -248,7 +248,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test", "filtered": "removed" } refresh: true @@ -276,7 +276,7 @@ - do: get: index: dest - id: 1 + id: "1" - match: { _source.text: "test" } - is_false: _source.filtered diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml index 4a1b9f9112e96..762e63fb329b4 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml @@ -44,7 +44,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: reindex: @@ -103,7 +103,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: @@ -125,7 +125,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] parameter cannot be negative, found \[-4\]/ @@ -146,7 +146,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] should be >= \[slices\]/ @@ -168,7 +168,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] parameter cannot be negative, found \[-4\]/ @@ -189,7 +189,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] set to two different values \[4\] and \[5\]/ @@ -262,7 +262,7 @@ - do: index: index: test - id: 1 + id: "1" body: { age: 23 } - do: indices.refresh: {} @@ -388,7 +388,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: catch: /_source:false is not supported in this context/ diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/25_no_auto_create.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/25_no_auto_create.yml index b64da0831621d..844873f95b0f2 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/25_no_auto_create.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/25_no_auto_create.yml @@ -10,7 +10,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: cluster.put_settings: @@ -36,7 +36,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /no such index \[dest\] and \[action.auto_create_index\] \(\[test\]\) doesn't match/ @@ -57,7 +57,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /no such index \[dest\] and \[action.auto_create_index\] contains \[-dest\] which forbids automatic creation of the index/ diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/30_search.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/30_search.yml index d021848216517..4aac33d52fb6f 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/30_search.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/30_search.yml @@ -3,12 +3,12 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: index: index: test - id: 2 + id: "2" body: { "text": "junk" } - do: indices.refresh: {} @@ -40,12 +40,12 @@ - do: index: index: test - id: 1 + id: "1" body: { "order": 1 } - do: index: index: test - id: 2 + id: "2" body: { "order": 2 } - do: indices.refresh: {} @@ -84,12 +84,12 @@ - do: index: index: test - id: 1 + id: "1" body: { "order": 1 } - do: index: index: test - id: 2 + id: "2" body: { "order": 2 } - do: indices.refresh: {} @@ -131,7 +131,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "order": 1 } - do: indices.refresh: {} @@ -165,12 +165,12 @@ - do: index: index: test - id: 1 + id: "1" body: { "value": 17 } - do: index: index: test - id: 2 + id: "2" body: { "value": 17 } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/35_search_failures.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/35_search_failures.yml index 44b36119fbc76..fb9fb0302de6c 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/35_search_failures.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/35_search_failures.yml @@ -10,7 +10,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/40_versioning.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/40_versioning.yml index 3d718831187b4..bfd722d2a8d96 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/40_versioning.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/40_versioning.yml @@ -7,19 +7,19 @@ - do: index: index: src - id: 1 + id: "1" body: { "company": "cat" } version: 2 version_type: external - do: index: index: src - id: 2 + id: "2" body: { "company": "cow" } - do: index: index: dest - id: 1 + id: "1" body: { "company": "dog" } - do: indices.refresh: {} @@ -51,19 +51,19 @@ - do: index: index: src - id: 1 + id: "1" body: { "company": "cat" } version: 2 version_type: external - do: index: index: src - id: 2 + id: "2" body: { "company": "cow" } - do: index: index: dest - id: 1 + id: "1" body: { "company": "dog" } - do: indices.refresh: {} @@ -97,19 +97,19 @@ - do: index: index: src - id: 1 + id: "1" body: { "company": "cat" } version: 2 version_type: external - do: index: index: src - id: 2 + id: "2" body: { "company": "cow" } - do: index: index: dest - id: 1 + id: "1" body: { "company": "dog" } - do: indices.refresh: {} @@ -142,17 +142,17 @@ - do: index: index: src - id: 1 + id: "1" body: { "company": "cat" } - do: index: index: src - id: 2 + id: "2" body: { "company": "cow" } - do: index: index: dest - id: 1 + id: "1" body: { "company": "dog" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/50_routing.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/50_routing.yml index d7a0db5451a1d..feea88932f545 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/50_routing.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/50_routing.yml @@ -3,7 +3,7 @@ - do: index: index: src - id: 1 + id: "1" body: { "company": "cat" } - do: indices.refresh: {} @@ -21,7 +21,7 @@ - do: get: index: dest - id: 1 + id: "1" routing: cat - match: { _routing: cat } @@ -30,7 +30,7 @@ - do: index: index: src - id: 1 + id: "1" body: { "company": "cat" } routing: null - do: @@ -49,5 +49,5 @@ - do: get: index: dest - id: 1 + id: "1" - is_false: _routing diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/60_wait_for_active_shards.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/60_wait_for_active_shards.yml index 3498e555d2879..58253459cc03a 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/60_wait_for_active_shards.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/60_wait_for_active_shards.yml @@ -9,7 +9,7 @@ - do: index: index: src - id: 1 + id: "1" body: {"text": "test"} - do: indices.refresh: {} @@ -42,4 +42,4 @@ - do: get: index: dest - id: 1 + id: "1" diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/70_throttle.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/70_throttle.yml index 696fdd068c454..67c917f629db6 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/70_throttle.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/70_throttle.yml @@ -12,17 +12,17 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } - do: index: index: source - id: 3 + id: "3" body: { "text": "test" } - do: indices.refresh: {} @@ -58,17 +58,17 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } - do: index: index: source - id: 3 + id: "3" body: { "text": "test" } - do: indices.refresh: {} @@ -104,17 +104,17 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } - do: index: index: source - id: 3 + id: "3" body: { "text": "test" } - do: indices.refresh: {} @@ -156,17 +156,17 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } - do: index: index: source - id: 3 + id: "3" body: { "text": "test" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/80_slices.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/80_slices.yml index d5beb06449a97..8441dfdc3b02a 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/80_slices.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/80_slices.yml @@ -3,22 +3,22 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } - do: index: index: source - id: 3 + id: "3" body: { "text": "test" } - do: index: index: source - id: 4 + id: "4" body: { "text": "test" } - do: indices.refresh: {} @@ -63,22 +63,22 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } - do: index: index: source - id: 3 + id: "3" body: { "text": "test" } - do: index: index: source - id: 4 + id: "4" body: { "text": "test" } - do: indices.refresh: {} @@ -182,32 +182,32 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } - do: index: index: source - id: 3 + id: "3" body: { "text": "test" } - do: index: index: source - id: 4 + id: "4" body: { "text": "test" } - do: index: index: source - id: 5 + id: "5" body: { "text": "test" } - do: index: index: source - id: 6 + id: "6" body: { "text": "test" } - do: indices.refresh: {} @@ -306,22 +306,22 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } - do: index: index: source - id: 3 + id: "3" body: { "text": "test" } - do: index: index: source - id: 4 + id: "4" body: { "text": "test" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml index 8716ad303c57f..c0bfa464a0c04 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml @@ -3,7 +3,7 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -37,12 +37,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "blort" } - do: indices.refresh: {} @@ -86,12 +86,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "foo" } - do: indices.refresh: {} @@ -113,14 +113,14 @@ - do: get: index: new_twitter - id: 1 + id: "1" routing: kimchy - match: { _routing: kimchy } - do: get: index: new_twitter - id: 2 + id: "2" routing: foo - match: { _routing: foo } @@ -129,12 +129,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "foo" } - do: indices.refresh: {} @@ -183,12 +183,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "foo" } - do: indices.refresh: {} @@ -216,14 +216,14 @@ - do: index: index: twitter - id: 1 + id: "1" version: 1 version_type: external body: { "user": "kimchy" } - do: index: index: new_twitter - id: 1 + id: "1" version: 1 version_type: external body: { "user": "kimchy" } @@ -260,12 +260,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: new_twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -299,12 +299,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "another" } - do: indices.refresh: {} @@ -349,17 +349,17 @@ - do: index: index: index1 - id: 1 + id: "1" body: { "lang": "en", "id": 123 } - do: index: index: index1 - id: 2 + id: "2" body: { "lang": "en", "id": 456 } - do: index: index: index1 - id: 3 + id: "3" body: { "lang": "fr", "id": 789 } # Destination index - do: @@ -422,7 +422,7 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/90_remote.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/90_remote.yml index 8354fc0aaf322..325f044466847 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/90_remote.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/90_remote.yml @@ -3,7 +3,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } refresh: true @@ -58,12 +58,12 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test2" } - do: indices.refresh: {} @@ -113,7 +113,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } routing: foo refresh: true @@ -165,7 +165,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } refresh: true @@ -226,13 +226,13 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } refresh: true - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } refresh: true @@ -288,7 +288,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } refresh: true @@ -319,7 +319,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } refresh: true @@ -340,7 +340,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test", "filtered": "removed" } refresh: true @@ -379,7 +379,7 @@ - do: get: index: dest - id: 1 + id: "1" - match: { _source.text: "test" } - is_false: _source.filtered @@ -397,17 +397,17 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } - do: index: index: source - id: 3 + id: "3" body: { "text": "test" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/95_parent_join.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/95_parent_join.yml index e4861d35e781a..07aa512a1f4cc 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/95_parent_join.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/95_parent_join.yml @@ -18,20 +18,20 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "join_field": { "name": "parent" } } - do: index: index: source - id: 2 + id: "2" routing: "1" body: { "join_field": { "name": "child", "parent": "1" } } - do: index: index: source - id: 3 + id: "3" routing: "1" body: { "join_field": { "name": "grand_child", "parent": "2" } } @@ -59,7 +59,7 @@ setup: query: parent_id: type: child - id: 1 + id: "1" - match: {hits.total: 1} - match: {hits.hits.0._id: "2"} @@ -74,7 +74,7 @@ setup: query: parent_id: type: child - id: 1 + id: "1" - match: {hits.total: 1} - match: {hits.hits.0._id: "3"} @@ -117,7 +117,7 @@ setup: query: parent_id: type: child - id: 1 + id: "1" - match: {hits.total: 1} - match: {hits.hits.0._id: "2"} @@ -132,7 +132,7 @@ setup: query: parent_id: type: child - id: 1 + id: "1" - match: {hits.total: 1} - match: {hits.hits.0._id: "3"} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/10_basic.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/10_basic.yml index 02c4afd731531..a2334139c20e9 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/10_basic.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/10_basic.yml @@ -3,7 +3,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -29,7 +29,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -85,7 +85,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -93,7 +93,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test2" } - do: @@ -124,7 +124,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -132,7 +132,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test2" } - do: @@ -162,7 +162,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -170,7 +170,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test2" } - do: @@ -190,12 +190,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "junk" } - do: indices.refresh: {} @@ -222,12 +222,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -252,12 +252,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -282,12 +282,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -341,7 +341,7 @@ - do: index: index: test - id: 1 + id: "1" body: {} - do: indices.refresh: {} @@ -354,7 +354,7 @@ - do: get: index: test - id: 1 + id: "1" - match: { _source: {} } - match: { _version: 2 } diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/20_validation.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/20_validation.yml index 46d1db0d74df4..df3d7d5ef5da4 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/20_validation.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/20_validation.yml @@ -3,7 +3,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /conflicts may only be .* but was \[cat\]/ @@ -20,7 +20,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] parameter cannot be negative, found \[-4\]/ @@ -37,7 +37,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] parameter cannot be negative, found \[-4\]/ @@ -55,7 +55,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] set to two different values \[4\] and \[5\]/ @@ -76,7 +76,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] should be >= \[slices\]/ @@ -90,7 +90,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /Failed to parse int parameter \[scroll_size\] with value \[asdf\]/ @@ -110,7 +110,7 @@ - do: index: index: test - id: 1 + id: "1" body: { age: 23 } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/30_new_fields.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/30_new_fields.yml index ba14b34cf0ef5..d5c3404b482e8 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/30_new_fields.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/30_new_fields.yml @@ -11,7 +11,7 @@ - do: index: index: test - id: 1 + id: "1" refresh: true body: { "name": "bob! house" } diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/35_search_failure.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/35_search_failure.yml index 5a22eec88c0f7..1a28ec8b183b5 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/35_search_failure.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/35_search_failure.yml @@ -10,7 +10,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/40_versioning.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/40_versioning.yml index 3aa6c0918800d..477e83452adf3 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/40_versioning.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/40_versioning.yml @@ -3,7 +3,7 @@ - do: index: index: test - id: 1 + id: "1" body: {"text": "test"} - do: indices.refresh: {} @@ -17,7 +17,7 @@ - do: get: index: test - id: 1 + id: "1" - match: {_version: 2} --- @@ -28,7 +28,7 @@ - do: index: index: index1 - id: 1 + id: "1" version: 0 # Starting version is zero version_type: external body: {"update": 0} @@ -45,5 +45,5 @@ - do: get: index: index1 - id: 1 + id: "1" - match: {_version: 0} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/50_consistency.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/50_consistency.yml index 4a067580b54d3..ef71dd36a18b8 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/50_consistency.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/50_consistency.yml @@ -9,7 +9,7 @@ - do: index: index: test - id: 1 + id: "1" body: {"text": "test"} - do: indices.refresh: {} @@ -34,4 +34,4 @@ - do: get: index: test - id: 1 + id: "1" diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/70_slices.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/70_slices.yml index 29330f5b32f58..a6e4005e2db59 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/70_slices.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/70_slices.yml @@ -3,22 +3,22 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: index: index: test - id: 2 + id: "2" body: { "text": "test" } - do: index: index: test - id: 3 + id: "3" body: { "text": "test" } - do: index: index: test - id: 4 + id: "4" body: { "text": "test" } - do: indices.refresh: {} @@ -59,22 +59,22 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: index: index: test - id: 2 + id: "2" body: { "text": "test" } - do: index: index: test - id: 3 + id: "3" body: { "text": "test" } - do: index: index: test - id: 4 + id: "4" body: { "text": "test" } - do: indices.refresh: {} @@ -164,32 +164,32 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: index: index: test - id: 2 + id: "2" body: { "text": "test" } - do: index: index: test - id: 3 + id: "3" body: { "text": "test" } - do: index: index: test - id: 4 + id: "4" body: { "text": "test" } - do: index: index: test - id: 5 + id: "5" body: { "text": "test" } - do: index: index: test - id: 6 + id: "6" body: { "text": "test" } - do: indices.refresh: {} @@ -280,22 +280,22 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: index: index: test - id: 2 + id: "2" body: { "text": "test" } - do: index: index: test - id: 3 + id: "3" body: { "text": "test" } - do: index: index: test - id: 4 + id: "4" body: { "text": "test" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/80_scripting.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/80_scripting.yml index 0c297b13dbd81..d73bdcc6b4de5 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/80_scripting.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/80_scripting.yml @@ -3,7 +3,7 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -34,7 +34,7 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -62,12 +62,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "foo" } - do: indices.refresh: {} @@ -108,12 +108,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "foo" } - do: indices.refresh: {} @@ -135,7 +135,7 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -154,7 +154,7 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -173,22 +173,22 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "level": 9, "last_updated": "2016-01-01T12:10:30Z" } - do: index: index: twitter - id: 2 + id: "2" body: { "level": 10, "last_updated": "2016-01-01T12:10:30Z" } - do: index: index: twitter - id: 3 + id: "3" body: { "level": 11, "last_updated": "2016-01-01T12:10:30Z" } - do: index: index: twitter - id: 4 + id: "4" body: { "level": 12, "last_updated": "2016-01-01T12:10:30Z" } - do: indices.refresh: {} @@ -235,22 +235,22 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "level": 9, "last_updated": "2016-01-01T12:10:30Z" } - do: index: index: twitter - id: 2 + id: "2" body: { "level": 10, "last_updated": "2016-01-01T12:10:30Z" } - do: index: index: twitter - id: 3 + id: "3" body: { "level": 11, "last_updated": "2016-01-01T12:10:30Z" } - do: index: index: twitter - id: 4 + id: "4" body: { "level": 12, "last_updated": "2016-01-01T12:10:30Z" } - do: indices.refresh: {} @@ -310,12 +310,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "foo" } - do: indices.refresh: {} @@ -337,22 +337,22 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "level": 9, "last_updated": "2016-01-01T12:10:30Z" } - do: index: index: twitter - id: 2 + id: "2" body: { "level": 10, "last_updated": "2016-01-01T12:10:30Z" } - do: index: index: twitter - id: 3 + id: "3" body: { "level": 11, "last_updated": "2016-01-01T12:10:30Z" } - do: index: index: twitter - id: 4 + id: "4" body: { "level": 12, "last_updated": "2016-01-01T12:10:30Z" } - do: indices.refresh: {} @@ -417,7 +417,7 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} diff --git a/modules/repository-azure/build.gradle b/modules/repository-azure/build.gradle index 8da8646fdcc83..e0c57ea89ff8e 100644 --- a/modules/repository-azure/build.gradle +++ b/modules/repository-azure/build.gradle @@ -82,7 +82,9 @@ dependencies { api "io.projectreactor:reactor-core:${versions.reactorCore}" api "org.reactivestreams:reactive-streams:${versions.reactiveStreams}" - api "org.slf4j:slf4j-api:${versions.slf4j}" + runtimeOnly("org.slf4j:slf4j-api:${versions.slf4j}") + runtimeOnly("org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}") + testImplementation project(':test:fixtures:azure-fixture') } @@ -268,10 +270,8 @@ tasks.named("thirdPartyAudit").configure { // from com.ctc.wstx.shaded.msv_core.driver.textui.Driver (woodstox-core) 'com.sun.org.apache.xml.internal.resolver.Catalog', 'com.sun.org.apache.xml.internal.resolver.tools.CatalogResolver', - - 'org.slf4j.impl.StaticLoggerBinder', - 'org.slf4j.impl.StaticMDCBinder', - 'org.slf4j.impl.StaticMarkerBinder', + // [missing classes] SLF4j includes an optional class that depends on an extension class. see Log4jLogger#createConverter + 'org.slf4j.ext.EventData' ) ignoreViolations( @@ -358,7 +358,7 @@ testClusters.matching { it.name == "yamlRestTest" }.configureEach { setting 'azure.client.integration_test.endpoint_suffix', azureAddress String firstPartOfSeed = BuildParams.testSeed.tokenize(':').get(0) - def ignoreTestSeed = providers.systemProperty('ignore.tests.seed').forUseAtConfigurationTime().isPresent() ? PropertyNormalization.IGNORE_VALUE : PropertyNormalization.DEFAULT + def ignoreTestSeed = providers.systemProperty('ignore.tests.seed').isPresent() ? PropertyNormalization.IGNORE_VALUE : PropertyNormalization.DEFAULT setting 'thread_pool.repository_azure.max', (Math.abs(Long.parseUnsignedLong(firstPartOfSeed, 16) % 10) + 1).toString(), ignoreTestSeed } } diff --git a/modules/repository-azure/licenses/log4j-slf4j-impl-2.17.1.jar.sha1 b/modules/repository-azure/licenses/log4j-slf4j-impl-2.17.1.jar.sha1 new file mode 100644 index 0000000000000..894ed8d886c3f --- /dev/null +++ b/modules/repository-azure/licenses/log4j-slf4j-impl-2.17.1.jar.sha1 @@ -0,0 +1 @@ +84692d456bcce689355d33d68167875e486954dd \ No newline at end of file diff --git a/modules/repository-azure/licenses/log4j-slf4j-impl-LICENSE.txt b/modules/repository-azure/licenses/log4j-slf4j-impl-LICENSE.txt new file mode 100644 index 0000000000000..6279e5206de13 --- /dev/null +++ b/modules/repository-azure/licenses/log4j-slf4j-impl-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 1999-2005 The Apache Software Foundation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/modules/repository-azure/licenses/log4j-slf4j-impl-NOTICE.txt b/modules/repository-azure/licenses/log4j-slf4j-impl-NOTICE.txt new file mode 100644 index 0000000000000..ea99ef1d4726b --- /dev/null +++ b/modules/repository-azure/licenses/log4j-slf4j-impl-NOTICE.txt @@ -0,0 +1,8 @@ + +Apache Log4j SLF4J Binding +Copyright 1999-2017 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + + diff --git a/modules/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml b/modules/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml index ffe3c4988f051..299183f26d9dc 100644 --- a/modules/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml +++ b/modules/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml @@ -45,15 +45,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - snapshot: one - index: _index: docs - _id: 2 + _id: "2" - snapshot: one - index: _index: docs - _id: 3 + _id: "3" - snapshot: one - do: @@ -90,19 +90,19 @@ setup: body: - index: _index: docs - _id: 4 + _id: "4" - snapshot: two - index: _index: docs - _id: 5 + _id: "5" - snapshot: two - index: _index: docs - _id: 6 + _id: "6" - snapshot: two - index: _index: docs - _id: 7 + _id: "7" - snapshot: two - do: diff --git a/modules/repository-gcs/build.gradle b/modules/repository-gcs/build.gradle index ff6dbe983d9c4..0f81809b15a20 100644 --- a/modules/repository-gcs/build.gradle +++ b/modules/repository-gcs/build.gradle @@ -30,7 +30,7 @@ dependencies { api 'com.google.cloud:google-cloud-core:1.95.4' api 'com.google.cloud:google-cloud-core-http:1.95.4' runtimeOnly 'com.google.guava:guava:30.1.1-jre' - api 'com.google.guava:failureaccess:1.0.1' + runtimeOnly 'com.google.guava:failureaccess:1.0.1' api "commons-logging:commons-logging:${versions.commonslogging}" api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" api "commons-codec:commons-codec:${versions.commonscodec}" diff --git a/modules/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml b/modules/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml index 7ce4e44dada35..68d61be4983c5 100644 --- a/modules/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml +++ b/modules/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml @@ -45,15 +45,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - snapshot: one - index: _index: docs - _id: 2 + _id: "2" - snapshot: one - index: _index: docs - _id: 3 + _id: "3" - snapshot: one - do: @@ -90,19 +90,19 @@ setup: body: - index: _index: docs - _id: 4 + _id: "4" - snapshot: two - index: _index: docs - _id: 5 + _id: "5" - snapshot: two - index: _index: docs - _id: 6 + _id: "6" - snapshot: two - index: _index: docs - _id: 7 + _id: "7" - snapshot: two - do: diff --git a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml index 60a4133aa58c1..77870697f93ae 100644 --- a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml +++ b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml @@ -69,15 +69,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - snapshot: one - index: _index: docs - _id: 2 + _id: "2" - snapshot: one - index: _index: docs - _id: 3 + _id: "3" - snapshot: one - do: @@ -130,15 +130,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - snapshot: one - index: _index: docs - _id: 2 + _id: "2" - snapshot: one - index: _index: docs - _id: 3 + _id: "3" - snapshot: one - do: @@ -175,19 +175,19 @@ setup: body: - index: _index: docs - _id: 4 + _id: "4" - snapshot: two - index: _index: docs - _id: 5 + _id: "5" - snapshot: two - index: _index: docs - _id: 6 + _id: "6" - snapshot: two - index: _index: docs - _id: 7 + _id: "7" - snapshot: two - do: diff --git a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/30_repository_temporary_credentials.yml b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/30_repository_temporary_credentials.yml index 148ac94b709fb..4a62d6183470d 100644 --- a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/30_repository_temporary_credentials.yml +++ b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/30_repository_temporary_credentials.yml @@ -41,15 +41,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - snapshot: one - index: _index: docs - _id: 2 + _id: "2" - snapshot: one - index: _index: docs - _id: 3 + _id: "3" - snapshot: one - do: @@ -86,19 +86,19 @@ setup: body: - index: _index: docs - _id: 4 + _id: "4" - snapshot: two - index: _index: docs - _id: 5 + _id: "5" - snapshot: two - index: _index: docs - _id: 6 + _id: "6" - snapshot: two - index: _index: docs - _id: 7 + _id: "7" - snapshot: two - do: diff --git a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml index 21112bc99defc..e24ff1ad0e559 100644 --- a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml +++ b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml @@ -41,15 +41,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - snapshot: one - index: _index: docs - _id: 2 + _id: "2" - snapshot: one - index: _index: docs - _id: 3 + _id: "3" - snapshot: one - do: @@ -86,19 +86,19 @@ setup: body: - index: _index: docs - _id: 4 + _id: "4" - snapshot: two - index: _index: docs - _id: 5 + _id: "5" - snapshot: two - index: _index: docs - _id: 6 + _id: "6" - snapshot: two - index: _index: docs - _id: 7 + _id: "7" - snapshot: two - do: diff --git a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/50_repository_ecs_credentials.yml b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/50_repository_ecs_credentials.yml index daf5739f6720d..9c332cc7d9301 100644 --- a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/50_repository_ecs_credentials.yml +++ b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/50_repository_ecs_credentials.yml @@ -41,15 +41,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - snapshot: one - index: _index: docs - _id: 2 + _id: "2" - snapshot: one - index: _index: docs - _id: 3 + _id: "3" - snapshot: one - do: @@ -86,19 +86,19 @@ setup: body: - index: _index: docs - _id: 4 + _id: "4" - snapshot: two - index: _index: docs - _id: 5 + _id: "5" - snapshot: two - index: _index: docs - _id: 6 + _id: "6" - snapshot: two - index: _index: docs - _id: 7 + _id: "7" - snapshot: two - do: diff --git a/modules/repository-url/src/yamlRestTest/resources/rest-api-spec/test/repository_url/10_basic.yml b/modules/repository-url/src/yamlRestTest/resources/rest-api-spec/test/repository_url/10_basic.yml index b932f0d53caad..4508dacbfe7e9 100644 --- a/modules/repository-url/src/yamlRestTest/resources/rest-api-spec/test/repository_url/10_basic.yml +++ b/modules/repository-url/src/yamlRestTest/resources/rest-api-spec/test/repository_url/10_basic.yml @@ -23,15 +23,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - snapshot: one - index: _index: docs - _id: 2 + _id: "2" - snapshot: one - index: _index: docs - _id: 3 + _id: "3" - snapshot: one # Create a first snapshot using the FS repository @@ -48,19 +48,19 @@ setup: body: - index: _index: docs - _id: 4 + _id: "4" - snapshot: two - index: _index: docs - _id: 5 + _id: "5" - snapshot: two - index: _index: docs - _id: 6 + _id: "6" - snapshot: two - index: _index: docs - _id: 7 + _id: "7" - snapshot: two # Create a second snapshot diff --git a/plugins/analysis-icu/src/yamlRestTest/resources/rest-api-spec/test/analysis_icu/20_search.yml b/plugins/analysis-icu/src/yamlRestTest/resources/rest-api-spec/test/analysis_icu/20_search.yml index 90aae30bbb5b5..4a73e628dadb7 100644 --- a/plugins/analysis-icu/src/yamlRestTest/resources/rest-api-spec/test/analysis_icu/20_search.yml +++ b/plugins/analysis-icu/src/yamlRestTest/resources/rest-api-spec/test/analysis_icu/20_search.yml @@ -27,7 +27,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "Bâton enflammé" } - do: indices.refresh: {} diff --git a/plugins/analysis-kuromoji/src/yamlRestTest/resources/rest-api-spec/test/analysis_kuromoji/20_search.yml b/plugins/analysis-kuromoji/src/yamlRestTest/resources/rest-api-spec/test/analysis_kuromoji/20_search.yml index 9a052469c2019..6e0b54d91222a 100644 --- a/plugins/analysis-kuromoji/src/yamlRestTest/resources/rest-api-spec/test/analysis_kuromoji/20_search.yml +++ b/plugins/analysis-kuromoji/src/yamlRestTest/resources/rest-api-spec/test/analysis_kuromoji/20_search.yml @@ -15,7 +15,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "JR新宿駅の近くにビールを飲みに行こうか" } - do: indices.refresh: {} diff --git a/plugins/analysis-nori/src/yamlRestTest/resources/rest-api-spec/test/analysis_nori/20_search.yml b/plugins/analysis-nori/src/yamlRestTest/resources/rest-api-spec/test/analysis_nori/20_search.yml index b7ecd933b6676..c75a182dad11c 100644 --- a/plugins/analysis-nori/src/yamlRestTest/resources/rest-api-spec/test/analysis_nori/20_search.yml +++ b/plugins/analysis-nori/src/yamlRestTest/resources/rest-api-spec/test/analysis_nori/20_search.yml @@ -15,7 +15,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "뿌리가 깊은 나무는" } - do: indices.refresh: {} diff --git a/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/40_search.yml b/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/40_search.yml index 2e6ee7ebd102a..4b664867a39c5 100644 --- a/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/40_search.yml +++ b/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/40_search.yml @@ -27,7 +27,7 @@ - do: index: index: phonetic_sample - id: 1 + id: "1" body: { "text": "hello world" } - do: indices.refresh: {} diff --git a/plugins/analysis-smartcn/src/yamlRestTest/resources/rest-api-spec/test/analysis_smartcn/20_search.yml b/plugins/analysis-smartcn/src/yamlRestTest/resources/rest-api-spec/test/analysis_smartcn/20_search.yml index 0a1f6e2af42bf..bfd9bda2a4836 100644 --- a/plugins/analysis-smartcn/src/yamlRestTest/resources/rest-api-spec/test/analysis_smartcn/20_search.yml +++ b/plugins/analysis-smartcn/src/yamlRestTest/resources/rest-api-spec/test/analysis_smartcn/20_search.yml @@ -15,7 +15,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "我购买了道具和服装" } - do: indices.refresh: {} diff --git a/plugins/analysis-stempel/src/yamlRestTest/resources/rest-api-spec/test/analysis_stempel/20_search.yml b/plugins/analysis-stempel/src/yamlRestTest/resources/rest-api-spec/test/analysis_stempel/20_search.yml index 7276b6744dfb5..d09b65296e431 100644 --- a/plugins/analysis-stempel/src/yamlRestTest/resources/rest-api-spec/test/analysis_stempel/20_search.yml +++ b/plugins/analysis-stempel/src/yamlRestTest/resources/rest-api-spec/test/analysis_stempel/20_search.yml @@ -15,7 +15,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "studenta był" } - do: indices.refresh: {} diff --git a/plugins/analysis-ukrainian/src/yamlRestTest/resources/rest-api-spec/test/analysis_ukrainian/20_search.yml b/plugins/analysis-ukrainian/src/yamlRestTest/resources/rest-api-spec/test/analysis_ukrainian/20_search.yml index ba860729ebf23..f8993414c96b3 100644 --- a/plugins/analysis-ukrainian/src/yamlRestTest/resources/rest-api-spec/test/analysis_ukrainian/20_search.yml +++ b/plugins/analysis-ukrainian/src/yamlRestTest/resources/rest-api-spec/test/analysis_ukrainian/20_search.yml @@ -15,7 +15,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "Ця п'єса у свою чергу рухається по колу." } - do: indices.refresh: {} diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java index 55f0292285135..5667de257d867 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java @@ -69,14 +69,19 @@ public Collection instances() { try { // hack around code messiness in GCE code // TODO: get this fixed - InstanceList instanceList = Access.doPrivilegedIOException(() -> { - Compute.Instances.List list = client().instances().list(project, zoneId); - return list.execute(); + return Access.doPrivilegedIOException(() -> { + String nextPageToken = null; + List zoneInstances = new ArrayList<>(); + do { + Compute.Instances.List list = client().instances().list(project, zoneId).setPageToken(nextPageToken); + InstanceList instanceList = list.execute(); + nextPageToken = instanceList.getNextPageToken(); + if (instanceList.isEmpty() == false && instanceList.getItems() != null) { + zoneInstances.addAll(instanceList.getItems()); + } + } while (nextPageToken != null); + return zoneInstances; }); - // assist type inference - return instanceList.isEmpty() || instanceList.getItems() == null - ? Collections.emptyList() - : instanceList.getItems(); } catch (IOException e) { logger.warn((Supplier) () -> new ParameterizedMessage("Problem fetching instance list for zone {}", zoneId), e); logger.debug("Full exception:", e); diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java index a32f54638f8d6..f363b0bd2bc94 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java @@ -272,4 +272,17 @@ public void testMetadataServerValues() { List dynamicHosts = buildDynamicNodes(mock, nodeSettings); assertThat(dynamicHosts, hasSize(1)); } + + public void testNodesWithPagination() { + Settings nodeSettings = Settings.builder() + .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) + .put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b") + .putList(GceSeedHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev") + .build(); + mock = new GceInstancesServiceMock(nodeSettings); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(2)); + assertEquals("10.240.79.59", dynamicHosts.get(0).getAddress()); + assertEquals("10.240.79.60", dynamicHosts.get(1).getAddress()); + } } diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java index 889228ac838a6..f2833fda8a0c5 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java @@ -67,7 +67,7 @@ public static String readGoogleApiJsonResponse(String url) throws IOException { private static String readJsonResponse(String url, String urlRoot) throws IOException { // We extract from the url the mock file path we want to use - String mockFileName = Strings.replace(url, urlRoot, ""); + String mockFileName = Strings.replace(url, urlRoot, "").replace("?", "%3F"); URL resource = GceMockUtils.class.getResource(mockFileName); if (resource == null) { diff --git a/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances new file mode 100644 index 0000000000000..e2fb8b6c21256 --- /dev/null +++ b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances @@ -0,0 +1,37 @@ +{ + "id": "dummy", + "items":[ + { + "description": "ES Node 1", + "id": "9309873766428965105", + "kind": "compute#instance", + "machineType": "n1-standard-1", + "name": "test1", + "networkInterfaces": [ + { + "accessConfigs": [ + { + "kind": "compute#accessConfig", + "name": "External NAT", + "natIP": "104.155.13.147", + "type": "ONE_TO_ONE_NAT" + } + ], + "name": "nic0", + "network": "default", + "networkIP": "10.240.79.59" + } + ], + "status": "RUNNING", + "tags": { + "fingerprint": "xA6QJb-rGtg=", + "items": [ + "elasticsearch", + "dev" + ] + }, + "zone": "europe-west1-b" + } + ], + "nextPageToken": "next-token" +} diff --git a/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances%3FpageToken=next-token b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances%3FpageToken=next-token new file mode 100644 index 0000000000000..62bd2b2d8f4f8 --- /dev/null +++ b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances%3FpageToken=next-token @@ -0,0 +1,36 @@ +{ + "id": "dummy", + "items":[ + { + "description": "ES Node 2", + "id": "9309873766428965105", + "kind": "compute#instance", + "machineType": "n1-standard-1", + "name": "test2", + "networkInterfaces": [ + { + "accessConfigs": [ + { + "kind": "compute#accessConfig", + "name": "External NAT", + "natIP": "104.155.13.147", + "type": "ONE_TO_ONE_NAT" + } + ], + "name": "nic0", + "network": "default", + "networkIP": "10.240.79.60" + } + ], + "status": "RUNNING", + "tags": { + "fingerprint": "xA6QJb-rGtg=", + "items": [ + "elasticsearch", + "dev" + ] + }, + "zone": "europe-west1-b" + } + ] +} diff --git a/plugins/examples/gradle/wrapper/gradle-wrapper.properties b/plugins/examples/gradle/wrapper/gradle-wrapper.properties index 7cec6af44e192..d5190930b2f32 100644 --- a/plugins/examples/gradle/wrapper/gradle-wrapper.properties +++ b/plugins/examples/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionSha256Sum=c9490e938b221daf0094982288e4038deed954a3f12fb54cbf270ddf4e37d879 +distributionSha256Sum=cd5c2958a107ee7f0722004a12d0f8559b4564c34daad7df06cffd4d12a426d0 diff --git a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml index 51a440142fd5e..28740fc1844ed 100644 --- a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml +++ b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml @@ -4,7 +4,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "num1": 1.0 } - do: indices.refresh: {} diff --git a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/30_static.yml b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/30_static.yml index c6d8048b97961..1430aa19e9ecc 100644 --- a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/30_static.yml +++ b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/30_static.yml @@ -4,7 +4,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "num1": 1 } - do: indices.refresh: {} diff --git a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/40_instance.yml b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/40_instance.yml index 385d576ae48e9..faf1aa77ed51b 100644 --- a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/40_instance.yml +++ b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/40_instance.yml @@ -4,7 +4,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "num1": 1 } - do: indices.refresh: {} diff --git a/plugins/examples/rescore/src/yamlRestTest/resources/rest-api-spec/test/example-rescore/20_score.yml b/plugins/examples/rescore/src/yamlRestTest/resources/rest-api-spec/test/example-rescore/20_score.yml index bcdc05b4c8612..9657c981977ad 100644 --- a/plugins/examples/rescore/src/yamlRestTest/resources/rest-api-spec/test/example-rescore/20_score.yml +++ b/plugins/examples/rescore/src/yamlRestTest/resources/rest-api-spec/test/example-rescore/20_score.yml @@ -11,12 +11,12 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "test": 1 } - do: index: index: test - id: 2 + id: "2" body: { "test": 2 } - do: indices.refresh: {} diff --git a/plugins/examples/script-expert-scoring/src/yamlRestTest/resources/rest-api-spec/test/script_expert_scoring/20_score.yml b/plugins/examples/script-expert-scoring/src/yamlRestTest/resources/rest-api-spec/test/script_expert_scoring/20_score.yml index c771ba82312a6..89194d162872d 100644 --- a/plugins/examples/script-expert-scoring/src/yamlRestTest/resources/rest-api-spec/test/script_expert_scoring/20_score.yml +++ b/plugins/examples/script-expert-scoring/src/yamlRestTest/resources/rest-api-spec/test/script_expert_scoring/20_score.yml @@ -9,17 +9,17 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "important_field": "foo" } - do: index: index: test - id: 2 + id: "2" body: { "important_field": "foo foo foo" } - do: index: index: test - id: 3 + id: "3" body: { "important_field": "foo foo" } - do: diff --git a/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/20_attachment_processor.yml b/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/20_attachment_processor.yml index 5aba14690ee18..714a434775dbf 100644 --- a/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/20_attachment_processor.yml +++ b/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/20_attachment_processor.yml @@ -19,14 +19,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { field1: "VGhpcyBpcyBhbiBlbmdsaXNoIHRleHQgdG8gdGVzdCBpZiB0aGUgcGlwZWxpbmUgd29ya3M=" } - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "VGhpcyBpcyBhbiBlbmdsaXNoIHRleHQgdG8gdGVzdCBpZiB0aGUgcGlwZWxpbmUgd29ya3M=" } - length: { _source.attachment: 4 } - match: { _source.attachment.content: "This is an english text to test if the pipeline works" } @@ -60,14 +60,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { field1: "VGhpcyBpcyBhbiBlbmdsaXNoIHRleHQgdG8gdGVzdCBpZiB0aGUgcGlwZWxpbmUgd29ya3MK" } - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "VGhpcyBpcyBhbiBlbmdsaXNoIHRleHQgdG8gdGVzdCBpZiB0aGUgcGlwZWxpbmUgd29ya3MK" } - length: { _source.attachment: 1 } - match: { _source.attachment.language: "en" } @@ -94,14 +94,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { field1: "VGhpcyBpcyBhbiBlbmdsaXNoIHRleHQgdG8gdGVzdCBpZiB0aGUgcGlwZWxpbmUgd29ya3M=" } - do: get: index: test - id: 1 + id: "1" - length: { _source.attachment: 4 } - match: { _source.attachment.content: "This is an english text to tes" } - match: { _source.attachment.language: "en" } @@ -130,14 +130,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { field1: "VGhpcyBpcyBhbiBlbmdsaXNoIHRleHQgdG8gdGVzdCBpZiB0aGUgcGlwZWxpbmUgd29ya3M=" } - do: get: index: test - id: 1 + id: "1" - length: { _source.attachment: 4 } - match: { _source.attachment.content: "This is an english text to tes" } - match: { _source.attachment.language: "en" } @@ -146,14 +146,14 @@ - do: index: index: test - id: 2 + id: "2" pipeline: "my_pipeline" body: { field1: "VGhpcyBpcyBhbiBlbmdsaXNoIHRleHQgdG8gdGVzdCBpZiB0aGUgcGlwZWxpbmUgd29ya3M=", "max_size": 18 } - do: get: index: test - id: 2 + id: "2" - length: { _source.attachment: 4 } - match: { _source.attachment.content: "This is an english" } - match: { _source.attachment.language: "en" } @@ -162,14 +162,14 @@ - do: index: index: test - id: 3 + id: "3" pipeline: "my_pipeline" body: { field1: "VGhpcyBpcyBhbiBlbmdsaXNoIHRleHQgdG8gdGVzdCBpZiB0aGUgcGlwZWxpbmUgd29ya3M=", "max_size": 100000000 } - do: get: index: test - id: 3 + id: "3" - length: { _source.attachment: 4 } - match: { _source.attachment.content: "This is an english text to test if the pipeline works" } - match: { _source.attachment.language: "en" } diff --git a/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/30_files_supported.yml b/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/30_files_supported.yml index 324776bc20f87..b16f8701f6364 100644 --- a/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/30_files_supported.yml +++ b/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/30_files_supported.yml @@ -22,14 +22,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { field1: "0M8R4KGxGuEAAAAAAAAAAAAAAAAAAAAAPgADAP7/CQAGAAAAAAAAAAAAAAAEAAAAjAEAAAAAAAAAEAAAjgEAAAEAAAD+////AAAAAIgBAACJAQAAigEAAIsBAAD////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////spcEAg+kMBAAA8BK/AAAAAAABEQABAAEACAAAEwgAAA4AYmpiaoI4gjgAAAAAAAAAAAAAAAAAAAAAAAAMBBYANA4AAOBSAADgUgAAEwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//w8AAAAAAAAAAAD//w8AAAAAAAAAAAD//w8AAAAAAAAAAAAAAAAAAAAAALcAAAAAAFAHAAAAAAAAUAcAAMcUAAAAAAAAxxQAAAAAAADHFAAAAAAAAMcUAAAAAAAAxxQAABQAAAAAAAAAAAAAAP////8AAAAA2xQAAAAAAADbFAAAAAAAANsUAAAAAAAA2xQAAAwAAADnFAAADAAAANsUAAAAAAAA3hUAADABAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAAVRUAAAIAAABXFQAAAAAAAFcVAAAAAAAAVxUAAAAAAABXFQAAAAAAAFcVAAAAAAAAVxUAACwAAAAOFwAAtgIAAMQZAABaAAAAgxUAABUAAAAAAAAAAAAAAAAAAAAAAAAAxxQAAAAAAADzFAAAAAAAAAAAAAAAAAAAAAAAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAIMVAAAAAAAAGRUAAAAAAADHFAAAAAAAAMcUAAAAAAAA8xQAAAAAAAAAAAAAAAAAAPMUAAAAAAAAmBUAABYAAAAZFQAAAAAAABkVAAAAAAAAGRUAAAAAAADzFAAAFgAAAMcUAAAAAAAA8xQAAAAAAADHFAAAAAAAAPMUAAAAAAAAVRUAAAAAAAAAAAAAAAAAABkVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8xQAAAAAAABVFQAAAAAAAAAAAAAAAAAAGRUAAAAAAAAAAAAAAAAAABkVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGRUAAAAAAAAAAAAAAAAAAP////8AAAAAgI6XYKZ60QEAAAAAAAAAAP////8AAAAACRUAABAAAAAZFQAAAAAAAAAAAAAAAAAAQRUAABQAAACuFQAAMAAAAN4VAAAAAAAAGRUAAAAAAAAeGgAAAAAAABkVAAAAAAAAHhoAAAAAAAAZFQAAAAAAABkVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADHFAAAAAAAABkVAAAoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAAgxUAAAAAAACDFQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGRUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAN4VAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAAAAAAAAAAAAAP////8AAAAA/////wAAAAD/////AAAAAAAAAAAAAAAA/////wAAAAD/////AAAAAP////8AAAAA/////wAAAAD/////AAAAAP////8AAAAA/////wAAAAD/////AAAAAP////8AAAAA/////wAAAAD/////AAAAAP////8AAAAA/////wAAAAD/////AAAAAB4aAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAABQBwAAPQwAAI0TAAA6AQAABwAMAQ8ADQEAAAwEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFRlc3QgZWxhc3RpY3NlYXJjaA0AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAABIIAAATCAAA/PgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYWaJVGuQAABhZo3wiGAAIACAAAEwgAAP0AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAATIAMZBoATpwpBeqAB+wfC4gsMhBIbCJBSKwiQUjkIkFJJCJBSWwAAAXsMQCGLDEAgyQxAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALgYPABIAAQB8AQ8ACAADAAMAAwAAAAQACAAAAJgAAACeAAAAngAAAJ4AAACeAAAAngAAAJ4AAACeAAAAngAAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAAHYCAAB2AgAAdgIAAHYCAAB2AgAAdgIAAHYCAAB2AgAAdgIAADYGAAA2BgAANgYAADYGAAA2BgAANgYAAD4CAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAACoAAAANgYAADYGAAAWAAAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAC4AAAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAAaAEAAEgBAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAAHACAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAAMgYAABgAAADGAwAA1gMAAOYDAAD2AwAABgQAABYEAAAmBAAANgQAAEYEAABWBAAAZgQAAHYEAACGBAAAlgQAAMYDAADWAwAA5gMAAPYDAAAGBAAAFgQAADIGAAAoAgAA2AEAAOgBAAAmBAAANgQAAEYEAABWBAAAZgQAAHYEAACGBAAAlgQAAMYDAADWAwAA5gMAAPYDAAAGBAAAFgQAACYEAAA2BAAARgQAAFYEAABmBAAAdgQAAIYEAACWBAAAxgMAANYDAADmAwAA9gMAAAYEAAAWBAAAJgQAADYEAABGBAAAVgQAAGYEAAB2BAAAhgQAAJYEAADGAwAA1gMAAOYDAAD2AwAABgQAABYEAAAmBAAANgQAAEYEAABWBAAAZgQAAHYEAACGBAAAlgQAAMYDAADWAwAA5gMAAPYDAAAGBAAAFgQAACYEAAA2BAAARgQAAFYEAABmBAAAdgQAAIYEAACWBAAAxgMAANYDAADmAwAA9gMAAAYEAAAWBAAAJgQAADYEAABGBAAAVgQAAGYEAAB2BAAAhgQAAJYEAAA4AQAAWAEAAPgBAAAIAgAAGAIAAFYCAAB+AgAAkAIAAKACAACwAgAAwAIAANACAACAAgAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAADgAgAA8AIAAAADAAAQAwAAIAMAADADAABAAwAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAADgAgAA8AIAAAADAAAQAwAAIAMAADADAABAAwAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAADgAgAA8AIAAAADAAAQAwAAIAMAADADAABAAwAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAADgAgAA8AIAAAADAAAQAwAAIAMAADADAABAAwAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAAAgAAAAT0oDAFBKAwBRSgMAX0gBBG1IDARuSAwEc0gMBHRIDAQAAAAAQAAAYPH/AgBAAAwQAAAAAAAAAAAGAE4AbwByAG0AYQBsAAAAAgAAABgAQ0oYAF9IAQRhShgAbUgMBHNIDAR0SAkEAAAAAAAAAAAAAAAAAAAAAAAAOgBBIPL/oQA6AAwNAAAAAAAAEAARAFAAbwBsAGkAYwBlACAAcABhAHIAIABkAOkAZgBhAHUAdAAAAAAAVgBpAPP/swBWAAwNAAAAAAAAMAYOAFQAYQBiAGwAZQBhAHUAIABOAG8AcgBtAGEAbAAAABwAF/YDAAA01gYAAQoDbAA01gYAAQUDAABh9gMAAAIACwAAADIAayD0/8EAMgAADQAAAAAAADAGDABBAHUAYwB1AG4AZQAgAGwAaQBzAHQAZQAAAAIADAAAAAAAUEsDBBQABgAIAAAAIQCb6HBP/AAAABwCAAATAAAAW0NvbnRlbnRfVHlwZXNdLnhtbKyRy2rDMBBF94X+g9C22HK6KKXYzqKPXR+L9AMGeWyL2CMhTULy9x07LpQSAoVuBNLMvffMqFwfxkHtMSbnqdKrvNAKyfrGUVfpz81Ldq9VYqAGBk9Y6SMmva6vr8rNMWBSoqZU6Z45PBiTbI8jpNwHJKm0Po7Aco2dCWC30KG5LYo7Yz0xEmc8eei6fMIWdgOr54M8n0hErtXjqW+KqjSEMDgLLKBmqpqzuohDuiDcU/OLLlvIclHO5ql3Id0sCe+ymugaVB8Q+Q1G4TAsQ+LP8xVIRov5ZeYz0b5tncXG290o68hn48XsTwCr/4n+zjTz39ZfAAAA//8DAFBLAwQUAAYACAAAACEApdan58AAAAA2AQAACwAAAF9yZWxzLy5yZWxzhI/PasMwDIfvhb2D0X1R0sMYJXYvpZBDL6N9AOEof2giG9sb69tPxwYKuwiEpO/3qT3+rov54ZTnIBaaqgbD4kM/y2jhdj2/f4LJhaSnJQhbeHCGo3vbtV+8UNGjPM0xG6VItjCVEg+I2U+8Uq5CZNHJENJKRds0YiR/p5FxX9cfmJ4Z4DZM0/UWUtc3YK6PqMn/s8MwzJ5PwX+vLOVFBG43lExp5GKhqC/jU72QqGWq1B7Qtbj51v0BAAD//wMAUEsDBBQABgAIAAAAIQBreZYWgwAAAIoAAAAcAAAAdGhlbWUvdGhlbWUvdGhlbWVNYW5hZ2VyLnhtbAzMTQrDIBBA4X2hd5DZN2O7KEVissuuu/YAQ5waQceg0p/b1+XjgzfO3xTVm0sNWSycBw2KZc0uiLfwfCynG6jaSBzFLGzhxxXm6XgYybSNE99JyHNRfSPVkIWttd0g1rUr1SHvLN1euSRqPYtHV+jT9yniResrJgoCOP0BAAD//wMAUEsDBBQABgAIAAAAIQBtTVmryAYAAI4aAAAWAAAAdGhlbWUvdGhlbWUvdGhlbWUxLnhtbOxZ3YrbRhS+L/QdhO4d/0n+WeINtmxv2uwmIXbS5nJWHkuTHWmMZrwbEwJ9gkIhLb0p9K6F3gTaN+i7pLTpQ/TMSJZn7HH2hy2E0jUs8vg7Z7455+g7I83dey8T6pzjjBOW9tz6nZrr4DRkM5JGPffpdFzpuA4XKJ0hylLcc1eYu/cOP/3kLjoQMU6wA/YpP0A9NxZicVCt8hCGEb/DFjiF3+YsS5CAr1lUnWXoAvwmtNqo1VrVBJHUdVKUgNtp/PvP4OzRfE5C7B6uvY8oTJEKLgdCmk2kb1yYDJYZRkuFnZ3VJYKveEAz5xzRngsTzdjFFL8UrkMRF/BDz62pP7d6eLeKDgojKvbYanZj9VfYFQazs4aaM4tOy0k9z/da/dK/AlCxixu1R61Rq/SnACgMYaU5F92nP+gOhn6B1UD5pcX3sD1s1g285r+5w7nvy4+BV6Dcv7eDH48DiKKBV6Ac7+/gPa/dCDwDr0A5vrWDb9f6Q69t4BUopiQ920HX/FYzWK+2hMwZvW+Fd31v3G4UzjcoqIayuuQUc5aKfbWWoBcsGwNAAikSJHXEaoHnKIQyDhAlpxlxjkkUQ+EtUMo4DNcatXGtCf/lx1NXKiLoACPNWvICJnxnSPJxeJiRhei5n4NXV4M8XzpHTMQkLGZVTgyL+yiNdIv3P33z9w9fOX/9+uP7N9/mk27juY4f4jT6kqD0QxPAajdhePfd2z9+e/vu+6///OWNxX8/Q6c6fEoSzJ2H+MJ5whJYnGUF+DS7nsU0RkS36KcRRymSs1j8jyB+OvrhClFkwQ0gEjruWQYyYwMeLV8YhCdxthTE4vFBnBjAE8bogGXWKDyQc2lhni7TyD55ttRxTxA6t80doNTI82i5AH0lNpdBjA2ajylKBYpwioUjf2NnGFtW95wQI64nJMwYZ3PhPCfOABFrSKbk1KimjdF9kkBeVjaCkG8jNifPnAGjtlUP8bmJhLsDUQv5KaZGGI/QUqDE5nKKEqoH/BiJ2EZysspCHTfiAjIdYcqc0QxzbrN5lMF6taQ/AImxp/2ErhITmQlyZvN5jBjTkUN2FsQoWdiwE5LGOvYzfgYlipzHTNjgJ8y8Q+R3yAOIx750PyPYSPflavAU1FWntCkQ+csys+TyCDOjficrOkdYSQ2Iv6HpCUkvFfgtaff/PWk/IWkYM8uKbkvU7a6NjFxTzvsZsd5P97dEfB9uW7oDls3Ix6/cQ7RMH2O4WXbb1//C/b9wu/954d53P9++XG8UGsRbbl3zzbrauid7d+5zQulErCg+5mrzzqEvzcYwKO3UYysun+QWMVzKOxkmMHBRhpSNkzHxBRHxJEYL2OHXXekk4oXriDsLxmHjr4atviWeLpMTNssfWOt1+XCaiwdHYjNe88txeNgQObrV3jyEle4V20g9LK8JSNvrkNAmM0k0LSTa60EZJPVoDkGzkFAruxUWXQuLjnS/TtUOC6BWZgU2Tg5st3qu74EJGMEzFaJ4JvOUp3qdXZXM28z0vmAaFQC7iHUFbDLdlVz3Lk+uLi+1K2TaIKGVm0lCRUb1MB6jGS6qU45ehcZ1c93dpNSgJ0Oh5oPS2tBodz7E4qa5BrttbaCprhQ0dS56bqvpQ8mEaNFz5/DgD5fJAmqHyw0vohG8PgtFlt/wN1GWRcbFEPE4D7gSnVwNEiJw5lCS9Fy5/DINNFUaorjVGyAIHy25LsjKx0YOkm4mGc/nOBR62rURGen8Kyh8rhXWX5X5zcHSki0h3ZN4duGc0mX2BEGJ+e26DOCMcHj/U8+jOSPwQrMUsk39bTWmQnb1N4qqhvJxRBcxKjqKLuY5XEl5SUd9K2OgfSvWDAHVQlI0wtNINlg9qEY3LbtGzmFv173cSEZOE81NzzRURXZNu4oZM6zbwFYsb9bkNVbrEIOm6R0+l+5tye2utW5rn1B2CQh4GT9L171CQ9CobSYzqEnGuzIsNbsYNXvHeoGXULtKk9BUv7V2uxW3skdYp4PBG3V+sNuuWhiar/eVKtLq6EM/nGCnL0A8hvAaeEkFV6mEo4cMwYZoovYkuWzALfJSFLcGXDnLjPTcVzW/7wUNP6jUOv6o4jW9WqXj95uVvu836yO/XhsOGq+hsYg4qfv5scsYXkTRVXH4osZ3DmCS9bu2OyFLqkydrFQVcXUAU28YBzD5yYszlQcsrkNAdF61GuNusztoVbrN/rjiDQedSjdoDSrDVtAejoeB3+mOX7vOuQJ7/WbgtUadSqseBBWvVZP0O91K22s0+l673xl5/dfFNgZWnstHEQsIr+J1+A8AAAD//wMAUEsDBBQABgAIAAAAIQAN0ZCftgAAABsBAAAnAAAAdGhlbWUvdGhlbWUvX3JlbHMvdGhlbWVNYW5hZ2VyLnhtbC5yZWxzhI9NCsIwFIT3gncIb2/TuhCRJt2I0K3UA4TkNQ02PyRR7O0NriwILodhvplpu5edyRNjMt4xaKoaCDrplXGawW247I5AUhZOidk7ZLBggo5vN+0VZ5FLKE0mJFIoLjGYcg4nSpOc0IpU+YCuOKOPVuQio6ZByLvQSPd1faDxmwF8xSS9YhB71QAZllCa/7P9OBqJZy8fFl3+UUFz2YUFKKLGzOAjm6pMBMpburrE3wAAAP//AwBQSwECLQAUAAYACAAAACEAm+hwT/wAAAAcAgAAEwAAAAAAAAAAAAAAAAAAAAAAW0NvbnRlbnRfVHlwZXNdLnhtbFBLAQItABQABgAIAAAAIQCl1qfnwAAAADYBAAALAAAAAAAAAAAAAAAAAC0BAABfcmVscy8ucmVsc1BLAQItABQABgAIAAAAIQBreZYWgwAAAIoAAAAcAAAAAAAAAAAAAAAAABYCAAB0aGVtZS90aGVtZS90aGVtZU1hbmFnZXIueG1sUEsBAi0AFAAGAAgAAAAhAG1NWavIBgAAjhoAABYAAAAAAAAAAAAAAAAA0wIAAHRoZW1lL3RoZW1lL3RoZW1lMS54bWxQSwECLQAUAAYACAAAACEADdGQn7YAAAAbAQAAJwAAAAAAAAAAAAAAAADPCQAAdGhlbWUvdGhlbWUvX3JlbHMvdGhlbWVNYW5hZ2VyLnhtbC5yZWxzUEsFBgAAAAAFAAUAXQEAAMoKAAAAADw/eG1sIHZlcnNpb249IjEuMCIgZW5jb2Rpbmc9IlVURi04IiBzdGFuZGFsb25lPSJ5ZXMiPz4NCjxhOmNsck1hcCB4bWxuczphPSJodHRwOi8vc2NoZW1hcy5vcGVueG1sZm9ybWF0cy5vcmcvZHJhd2luZ21sLzIwMDYvbWFpbiIgYmcxPSJsdDEiIHR4MT0iZGsxIiBiZzI9Imx0MiIgdHgyPSJkazIiIGFjY2VudDE9ImFjY2VudDEiIGFjY2VudDI9ImFjY2VudDIiIGFjY2VudDM9ImFjY2VudDMiIGFjY2VudDQ9ImFjY2VudDQiIGFjY2VudDU9ImFjY2VudDUiIGFjY2VudDY9ImFjY2VudDYiIGhsaW5rPSJobGluayIgZm9sSGxpbms9ImZvbEhsaW5rIi8+AAAAABMAAAAUAAAOAAAIAP////8ACAAAEwgAAAUAAAAACAAAEwgAAAYAAAAAAAAABQAAABIAAAAVAAAABwAEAAcAAAAAABIAAAAVAAAABAAHAAQAAAAEAAAACAAAAOUAAAAAAAAAAwAAAN8IhgCkF6oAlUa5AH419AAAAAAAEwAAABUAAAAAAAAAAQAAAP9AAIABABIAAAASAAAAAEBDewEAAQASAAAAAAAAABIAAAAAAAAAAAAAAAAAAAACEAAAAAAAAAATAAAAoAAAEABAAAD//wEAAAAHAFUAbgBrAG4AbwB3AG4A//8BAAgAAAAAAAAAAAAAAP//AQAAAAAA//8AAAIA//8AAAAA//8AAAIA//8AAAAABQAAAEcOkAEAAAICBgMFBAUCAwTvKgDgQXgAwAkAAAAAAAAA/wEAAAAAAABUAGkAbQBlAHMAIABOAGUAdwAgAFIAbwBtAGEAbgAAADUOkAECAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAgAAAAABTAHkAbQBiAG8AbAAAADMOkAEAAAILBgQCAgICAgT/KgDgQ3gAwAkAAAAAAAAA/wEAAAAAAABBAHIAaQBhAGwAAAA3DpABAAACDwUCAgIEAwIE/wIA4P+sAEABAAAAAAAAAJ8BAAAAAAAAQwBhAGwAaQBiAHIAaQAAAEESkAEBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABDAGEAbQBiAHIAaQBhACAATQBhAHQAaAAAACAABADxCIgIAPDEAgAAqQEAAAAAWVJDh1lSQ4cAAAAAAgABAAAAAgAAABEAAAABAAEAAAAEAAOQAQAAAAIAAAARAAAAAQABAAAAAQAAAAAAAAAhAwDwEAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAClBsAHtAC0AIGBcjAAAAAAAAAAAAAAAAAAABIAAAASAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAAAAABAAAAA8BAACAD8/QEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACSFAAAAAACfH/DwAAJFAAABAnAAD///9/////f////3////9/////f////3////9/3wiGAAAEAAAyAAAAAAAAAAAAAAAAAAAAAAAAAAAAIQQAAAAAAAAAAAAAAAAAAAAAAAAQHAAABAAAAAAAAAAAAHgAAAB4AAAAAAAAAAAAAACgBQAAGkjOCAsAAAAAAAAA3AAAAAEAAAD//xIAAAAAAAAAAAAAAAAAAAAMAEQAYQB2AGkAZAAgAFAAaQBsAGEAdABvAAwARABhAHYAaQBkACAAUABpAGwAYQB0AG8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP7/AAADCgEAAAAAAAAAAAAAAAAAAAAAAAEAAADghZ/y+U9oEKuRCAArJ7PZMAAAANzSAgASAAAAAQAAAJgAAAACAAAAoAAAAAMAAACsAAAABAAAALgAAAAFAAAA0AAAAAYAAADcAAAABwAAAOgAAAAIAAAA/AAAAAkAAAAUAQAAEgAAACABAAAKAAAARAEAAAwAAABQAQAADQAAAFwBAAAOAAAAaAEAAA8AAABwAQAAEAAAAHgBAAATAAAAgAEAABEAAACIAQAAAgAAABAnAAAeAAAABAAAAAAAAAAeAAAABAAAAAAAAAAeAAAAEAAAAERhdmlkIFBpbGF0bwAAAAAeAAAABAAAAAAAAAAeAAAABAAAAAAAAAAeAAAADAAAAE5vcm1hbC5kb3RtAB4AAAAQAAAARGF2aWQgUGlsYXRvAAAAAB4AAAAEAAAAMgAAAB4AAAAcAAAATWljcm9zb2Z0IE1hY2ludG9zaCBXb3JkAAAAAEAAAAAARsMjAAAAAEAAAAAAFjZWpnrRAUAAAAAAFjZWpnrRAQMAAAABAAAAAwAAAAIAAAADAAAAEQAAAAMAAAAAAAAARwAAAEzRAgD/////DgAAAAEAAABsAAAAAAAAAAAAAAD/AAAAswAAAAAAAAAAAAAAZhkAANsRAAAgRU1GAAABAETRAgAIAAAAAQAAAAAAAAAAAAAAAAAAAOwEAACxAwAAQAEAAPAAAAAAAAAAAAAAAAAAAAAA4gQAgKkDABEAAAAMAAAACAAAAAoAAAAQAAAAAAAAAAAAAAAJAAAAEAAAAAABAAC0AAAADAAAABAAAAAAAAAAAAAAAAsAAAAQAAAAAAEAALQAAABRAAAAeNACAAAAAAAAAAAA/wAAALMAAAAAAAAAAAAAAAAAAAAAAAAAAAEAALQAAABQAAAAKAAAAHgAAAAA0AIAAAAAACAAzAAAAQAAtAAAACgAAAAAAQAAtAAAAAEAIAAAAAAAANACAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////vr6+/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/76+vv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////7vf//+rz7v/Yzc3/0NLY/+DX2f/N4PL/3tXI/8jV4v/Q0cX/1tDI/9ve2f/U0tX/0NLQ/83I0P/I2N7/4tnI/9LZ4v/v6tz/5eXl////9////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////83g9//e3M3/vrG3/8TCxv/Xwrz/vdfu/8W/rv/K1tX/x8bB/8LJxv/Oxb7/yMTE/8vCwv+3scH/zd7Z/9DNyP/BwcT/z97X/82xq/////v////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////u9/v/+/Lu////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////++vr7/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/vr6+//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////8OAAAAFAAAAAAAAAAQAAAAFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD+/wAAAwoBAAAAAAAAAAAAAAAAAAAAAAABAAAAAtXN1ZwuGxCTlwgAKyz5rjAAAADUAAAACwAAAAEAAABgAAAABQAAAGgAAAAGAAAAcAAAABEAAAB4AAAAFwAAAIAAAAALAAAAiAAAABAAAACQAAAAEwAAAJgAAAAWAAAAoAAAAA0AAACoAAAADAAAALUAAAACAAAAECcAAAMAAAABAAAAAwAAAAEAAAADAAAAEgAAAAMAAAAAAA8ACwAAAAAAAAALAAAAAAAAAAsAAAAAAAAACwAAAAAAAAAeEAAAAQAAAAEAAAAADBAAAAIAAAAeAAAABgAAAFRpdHJlAAMAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAIAAAADAAAABAAAAAUAAAAGAAAABwAAAP7///8JAAAACgAAAAsAAAAMAAAADQAAAA4AAAAPAAAAEAAAABEAAAASAAAAEwAAABQAAAAVAAAA/v///xcAAAAYAAAAGQAAABoAAAAbAAAAHAAAAB0AAAAeAAAAHwAAACAAAAAhAAAAIgAAACMAAAAkAAAAJQAAACYAAAAnAAAAKAAAACkAAAAqAAAAKwAAACwAAAAtAAAALgAAAC8AAAAwAAAAMQAAADIAAAAzAAAANAAAADUAAAA2AAAANwAAADgAAAA5AAAAOgAAADsAAAA8AAAAPQAAAD4AAAA/AAAAQAAAAEEAAABCAAAAQwAAAEQAAABFAAAARgAAAEcAAABIAAAASQAAAEoAAABLAAAATAAAAE0AAABOAAAATwAAAFAAAABRAAAAUgAAAFMAAABUAAAAVQAAAFYAAABXAAAAWAAAAFkAAABaAAAAWwAAAFwAAABdAAAAXgAAAF8AAABgAAAAYQAAAGIAAABjAAAAZAAAAGUAAABmAAAAZwAAAGgAAABpAAAAagAAAGsAAABsAAAAbQAAAG4AAABvAAAAcAAAAHEAAAByAAAAcwAAAHQAAAB1AAAAdgAAAHcAAAB4AAAAeQAAAHoAAAB7AAAAfAAAAH0AAAB+AAAAfwAAAIAAAACBAAAAggAAAIMAAACEAAAAhQAAAIYAAACHAAAAiAAAAIkAAACKAAAAiwAAAIwAAACNAAAAjgAAAI8AAACQAAAAkQAAAJIAAACTAAAAlAAAAJUAAACWAAAAlwAAAJgAAACZAAAAmgAAAJsAAACcAAAAnQAAAJ4AAACfAAAAoAAAAKEAAACiAAAAowAAAKQAAAClAAAApgAAAKcAAACoAAAAqQAAAKoAAACrAAAArAAAAK0AAACuAAAArwAAALAAAACxAAAAsgAAALMAAAC0AAAAtQAAALYAAAC3AAAAuAAAALkAAAC6AAAAuwAAALwAAAC9AAAAvgAAAL8AAADAAAAAwQAAAMIAAADDAAAAxAAAAMUAAADGAAAAxwAAAMgAAADJAAAAygAAAMsAAADMAAAAzQAAAM4AAADPAAAA0AAAANEAAADSAAAA0wAAANQAAADVAAAA1gAAANcAAADYAAAA2QAAANoAAADbAAAA3AAAAN0AAADeAAAA3wAAAOAAAADhAAAA4gAAAOMAAADkAAAA5QAAAOYAAADnAAAA6AAAAOkAAADqAAAA6wAAAOwAAADtAAAA7gAAAO8AAADwAAAA8QAAAPIAAADzAAAA9AAAAPUAAAD2AAAA9wAAAPgAAAD5AAAA+gAAAPsAAAD8AAAA/QAAAP4AAAD/AAAAAAEAAAEBAAACAQAAAwEAAAQBAAAFAQAABgEAAAcBAAAIAQAACQEAAAoBAAALAQAADAEAAA0BAAAOAQAADwEAABABAAARAQAAEgEAABMBAAAUAQAAFQEAABYBAAAXAQAAGAEAABkBAAAaAQAAGwEAABwBAAAdAQAAHgEAAB8BAAAgAQAAIQEAACIBAAAjAQAAJAEAACUBAAAmAQAAJwEAACgBAAApAQAAKgEAACsBAAAsAQAALQEAAC4BAAAvAQAAMAEAADEBAAAyAQAAMwEAADQBAAA1AQAANgEAADcBAAA4AQAAOQEAADoBAAA7AQAAPAEAAD0BAAA+AQAAPwEAAEABAABBAQAAQgEAAEMBAABEAQAARQEAAEYBAABHAQAASAEAAEkBAABKAQAASwEAAEwBAABNAQAATgEAAE8BAABQAQAAUQEAAFIBAABTAQAAVAEAAFUBAABWAQAAVwEAAFgBAABZAQAAWgEAAFsBAABcAQAAXQEAAF4BAABfAQAAYAEAAGEBAABiAQAAYwEAAGQBAABlAQAAZgEAAGcBAABoAQAAaQEAAGoBAABrAQAAbAEAAG0BAABuAQAAbwEAAHABAABxAQAAcgEAAHMBAAB0AQAAdQEAAHYBAAB3AQAAeAEAAHkBAAB6AQAAewEAAHwBAAB9AQAAfgEAAH8BAAD+////gQEAAIIBAACDAQAAhAEAAIUBAACGAQAAhwEAAP7////9/////f////3////9////jQEAAP7////+/////v////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////9SAG8AbwB0ACAARQBuAHQAcgB5AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFgAFAf//////////AwAAAAYJAgAAAAAAwAAAAAAAAEYAAAAAAAAAAAAAAAAgFZlgpnrRAY8BAACAAAAAAAAAADEAVABhAGIAbABlAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOAAIB/////wUAAAD/////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAB4aAAAAAAAAVwBvAHIAZABEAG8AYwB1AG0AZQBuAHQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABoAAgEBAAAA//////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAFAFMAdQBtAG0AYQByAHkASQBuAGYAbwByAG0AYQB0AGkAbwBuAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKAACAQIAAAAEAAAA/////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABYAAAAM0wIAAAAAAAUARABvAGMAdQBtAGUAbgB0AFMAdQBtAG0AYQByAHkASQBuAGYAbwByAG0AYQB0AGkAbwBuAAAAAAAAAAAAAAA4AAIB////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAEAAAAQAAAAAAAAAQBDAG8AbQBwAE8AYgBqAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABIAAgD///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAcgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAP7///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////8BAP7/AwoAAP////8GCQIAAAAAAMAAAAAAAABGIAAAAERvY3VtZW50IE1pY3Jvc29mdCBXb3JkIDk3LTIwMDQACgAAAE1TV29yZERvYwAQAAAAV29yZC5Eb2N1bWVudC44APQ5snEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==" } - do: get: index: test - id: 1 + id: "1" - length: { _source.attachment: 8 } - match: { _source.attachment.content: "Test elasticsearch" } - match: { _source.attachment.language: "et" } @@ -65,14 +65,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { field1: "UEsDBBQABgAIAAAAIQBtiidLZgEAAFQFAAATAAgCW0NvbnRlbnRfVHlwZXNdLnhtbCCiBAIooAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAC0lMtugzAQRfeV+g/I2wqcdFFVVUgWfSzbSE0/wLEH4tYv2c7r7ztAgqooAalJNkgwc+89A3hGk41WyQp8kNbkZJgNSAKGWyFNmZOv2Vv6SJIQmRFMWQM52UIgk/HtzWi2dRASVJuQk0WM7onSwBegWcisA4OVwnrNIt76kjrGf1gJ9H4weKDcmggmprHyIOPRCxRsqWLyusHHDQnKSfLc9FVROWHOKclZxDKtqvSozoMKHcKVEQd06Y4sQ2XdExbShbvTCd8OyoMEqavR6gJqPvB1eikgmTIf35nGBrq2XlBh+VKjKOse7gijLQrJodVXbs5bDiHgd9IqayuaSbNnP8kR4lZBuDxF49sfDzGi4BoAO+dehDXMP69G8ce8F6TA3BmbK7g8RmvdCxHx1EJzHZ7NUdt0RWLn1FsXcAv4f4y9P66VOsWBHfgou/+6NhGtz54Pqk0gQBzJpvVOHP8CAAD//wMAUEsDBBQABgAIAAAAIQDHwie8/wAAAN8CAAALAAgCX3JlbHMvLnJlbHMgogQCKKAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAArJLNSgMxEIDvgu8Q5t7NtoqINNuLCL2JrA8wJtPd6OaHZKrt2xtF1IVlEexx/j6+SWa9ObhBvFLKNngFy6oGQV4HY32n4LG9W1yDyIze4BA8KThShk1zfrZ+oAG5DOXexiwKxWcFPXO8kTLrnhzmKkTypbILySGXMHUyon7BjuSqrq9k+s2AZsQUW6Mgbc0FiPYY6X9s6YjRIKPUIdEipjKd2JZdRIupI1Zggr4v6fzZURUyyGmhy78Lhd3OaroNeu/I85QXHZi8ITOvhDHOGS1PaTTu+JF5C8lI85Wes1md9sO437snj3aYeJfvWvUcqfsQkqOzbN4BAAD//wMAUEsDBBQABgAIAAAAIQATqj6H9gAAADEDAAAcAAgBd29yZC9fcmVscy9kb2N1bWVudC54bWwucmVscyCiBAEooAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKySy2rDMBBF94X+g5h9LTt9UELkbEoh29b9AEUeP6gsCc304b+vaEjr0GC68PJeMfeeQbPZfg5WvGOk3jsFRZaDQGd83btWwUv1eHUPgli7WlvvUMGIBNvy8mLzhFZzGqKuDyRSiiMFHXNYS0mmw0FT5gO69NL4OGhOMrYyaPOqW5SrPL+TcZoB5Umm2NUK4q6+BlGNAf+T7ZumN/jgzduAjs9UyA/cPyNzWo5SrI4tsoKJmaVEkOdBbpYEabzjSu8t/mL8WHMQt0tCcJqdAHzLg1nMMRRLMhCPFiefcdBz9atF6/9cw9E5IsiTQy+/AAAA//8DAFBLAwQUAAYACAAAACEA9WKOYGUCAAAOBwAAEQAAAHdvcmQvZG9jdW1lbnQueG1spFXfb9owEH6ftP8h8jtNwijQiFDR0qI+TKpK9zwZx0ksYp9lGyj763dOIGSbVtGSh9j367vv7mJncvsmq2DLjRWgUhJfRSTgikEmVJGSH6+PvTEJrKMqoxUonpI9t+R2+vXLZJdkwDaSKxcghLLJTrOUlM7pJAwtK7mk9koKZsBC7q4YyBDyXDAe7sBkYT+Ko3qnDTBuLea7p2pLLTnASTgPTVJ23PajaIyyUC3Gv4xAc4XGHIykDkVTYIRZb3QPMTV1YiUq4fYea9jCbFOyMSo5YPRaHj4mQQLJVlZHZ3jPtyF6WI4R5hySTcj80PKaXmh4hYRB2VLoU98+i4bG8gjybsGdYnc6Hlw29LmhO1xOgOfQz5ogWTXM30eMozMm4iHaiHMo/JnzyKT78e0+15pOc+PrjwH0/wbQxWXDWRjY6BOauAztSa1bLH+VfADrMORuafYyMsuSajyBkiVPhQJDVxUywpEF2PXAf9ZkilfcCrK9XzWqB4mmhj5lKRmNhg/X9/GI1FrH31yjbR7UJnidZi8piaK7m8Hw5rpVzXlON5XzlvEwGs8f6yzGv9z0lVsX4JG2TjDLqWHlJPR6/65dVgBrf1ktHTUOIQVmjTy2ohLZ/1zAHWVrEnZ9H1TWeoY1lPZmy5l7Nv9nukS7185m8WjW9EIXy19oxdMRxzdRnbfE/XA8qJG9w3fqIR3gIY4HdX8SI4rSncQVOAfyJFc871hLTjOO1+EoGnsxB3Adsdi4WjykY1BZ1FpNGW98ajX+lRZG+KIrofizcAxZfhseq28Kr7fNcMPTj2z6GwAA//8DAFBLAwQUAAYACAAAACEAbU1ZqyEGAACOGgAAFQAAAHdvcmQvdGhlbWUvdGhlbWUxLnhtbOxZy47bNhTdF+g/ENo7lm3Jj0E8gS3bSZuZJMg4abOkJVpihhINkpoZIwjQLyhQIC26KdBdC3QToP2D/kuKNv2IUpRlkzbdQToOEBSxAYuPcy8P7yUPJev2nauUgAvEOKZZ32ncch2AspBGOIv7zpPppNZ1ABcwiyChGeo7S8SdO8effnIbHokEpQhI+4wfwb6TCLE4qtd5KJshv0UXKJN9c8pSKGSVxfWIwUvpNyX1puu26ynEmQMymEq30+T3n6Wzh/M5DpFzXHkfE/mTCV40hISdFb7RymSYMwRzhY3OG8WFL3lAGLiApO/IgSJ6OUVXwgEEciE7+o6rPk79+HZ9bUTEHlvNbqI+K7uVQXTeVHYsnq0NPc/32oO1fwUgYhc37ozb4/banwLAMJQzLbnoWH/YG478FVYDlUWL71Fn1GoYeM1/awc/8IuvgVegsujt4CeTYBNDDVQWfUtMOs3AM/AKVBbbO/iOOxh5HQOvQAnB2fkO2vXbraCa7Royp+SeFd7zvUmnuYJvUHVtdZX2mdi31lL4nLKJBKjkQoEzIJYLNIehxAWQ4BnD4ATHiVx4C5hRLpvdpjtxW/K3+HqqpCICjxDUrMumkO80FXwADxleiL7zufTqaJBnObhLRYLD1ai7FvdgFusWb3/65u8fvgJ//frj21ff2vFcx49QFn+JYfZvAwjd4M13r//47fWb77/+85dXFviAwZkOn+IUcfAAXYLHNJWTswyAZuzdLKYJxLrFIIs5zGBhY0GPZfx09IMlJNCCGyIzkk+ZlAob8G7+3CB8lrBcYAvwfpIawFNKyZAy65zuF2PpUciz2D44y3XcYwgvbGMHW3ke5wu55rHNZZAgg+YjIlMOY5QhAYo+eo6QxewZxkZcT3HIKKdzAZ5hMITYGpIpnhmraWN0D6cyL0sbQZlvIzanT8GQEpv7EbowkXJ3QGJziYgRxrswFzC1MoYp0ZEnUCQ2kmdLFhoB50JmOkaEgnGEOLfZPGRLg+59KTH2tJ+SZWoimcDnNuQJpNTY4PQ8SGC6sHLGWaJjP+PncolC8IgKKwlq7pCiLvMgxWNfup9iZKT7+r39RMqQfYEUPTmzbQlEzf24JHOIlPP6lqanOLtW4Lek3X9/0n6KszChds09iKjboTeR8wHD1v20LeL7cNvSHVAW4Q9fuUcwzx4huVks0I/C/VG4//fCvW8/H16uNwqtbuOrm3XlJt175z7HhJyJJUEnXGk7l9OLJrJRVZTR+kFhkcjiajgDFzOoyoBR8QUWyVkCF3KYhhoh5ivXMQcLyuXpoJqtvosOkqenNCpbG43q2VQaQLFpl6dL1S7PIlG2tjubh7C1e1WL1cNyRaCwfRcS2mAmiZaFRKdqvIaEmtlBWPQsLLqF+70s1GWVFbn/ACz+1/C9kpFcb5CgqMhTaV9l9+CZ3hdMc9pNy/R6BdfDZNogoS03k4S2DBMYoe3mA+e6t0mpQa8IxS6NTvd95LoQkS1tIJlZA5dyz7V86SaEi74zl/eFspgupD9e6CYkcdZ3QrEK9H9RlgXjYgR5UsJUVzn/FAvEAMGpXOt6Gki24dZodoo5fqDkeu6HFzl10ZOM5nMUij0tm6rsK51Ye28ILio0l6TPkugSzEjOHkMZKL/TKAIYYS7W0Yww0xb3JopbcrXaisZ/ZpstCskigasTRRfzEq7KazraPBTT7VmZ9dVkZnGRpBufutcbFR2aaO45QIpT064f7++Q11htdN9gVUr3ttb1Kq3bd0rc/EDQqG0GM6gVjC3UNq0mtQPeEGjDrZfmvjPi0KfB9qotDojqvlLVdl5O0NlzufJH8nY1J4IrquhKPiME1d/KpRKo1kpdrgTIGe47L1x/4AVNP6i5XX9c81qeW+v6g1Zt4PutxthvuKNh86UMikjShl+OPZHPM2S5evmi2ndewKTVbfatkKZ1qt6s1JWxegHTaBovYMo3L2Ba9DsAy8i8aDcnvVZv2K71WoNJzRsNu7Ve0B7WRu2gM5qMAr/bm7x0wIUCe4NW4LXH3Vq7EQQ1r+0W9Lu9WsdrNgdeZ9Ade4OXq1jLmVfXKryK1/E/AAAA//8DAFBLAwQKAAAAAAAAACEAvOgH/fQnAAD0JwAAFwAAAGRvY1Byb3BzL3RodW1ibmFpbC5qcGVn/9j/4AAQSkZJRgABAQAASABIAAD/4QCARXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUAAAABAAAARgEoAAMAAAABAAIAAIdpAAQAAAABAAAATgAAAAAAAABIAAAAAQAAAEgAAAABAAOgAQADAAAAAQABAACgAgAEAAAAAQAAAWmgAwAEAAAAAQAAAgAAAAAA/+0AOFBob3Rvc2hvcCAzLjAAOEJJTQQEAAAAAAAAOEJJTQQlAAAAAAAQ1B2M2Y8AsgTpgAmY7PhCfv/AABEIAgABaQMBEQACEQEDEQH/xAAfAAABBQEBAQEBAQAAAAAAAAAAAQIDBAUGBwgJCgv/xAC1EAACAQMDAgQDBQUEBAAAAX0BAgMABBEFEiExQQYTUWEHInEUMoGRoQgjQrHBFVLR8CQzYnKCCQoWFxgZGiUmJygpKjQ1Njc4OTpDREVGR0hJSlNUVVZXWFlaY2RlZmdoaWpzdHV2d3h5eoOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4eLj5OXm5+jp6vHy8/T19vf4+fr/xAAfAQADAQEBAQEBAQEBAAAAAAAAAQIDBAUGBwgJCgv/xAC1EQACAQIEBAMEBwUEBAABAncAAQIDEQQFITEGEkFRB2FxEyIygQgUQpGhscEJIzNS8BVictEKFiQ04SXxFxgZGiYnKCkqNTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqCg4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1tre4ubrCw8TFxsfIycrS09TV1tfY2dri4+Tl5ufo6ery8/T19vf4+fr/2wBDAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/2wBDAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/3QAEAC7/2gAMAwEAAhEDEQA/AP7Yfgx8GPg9N8HvhRLL8KPhrLLL8NfAskkkngTws8kkj+F9LZ3d200s7uxLMzHczEk5JNAHpX/ClPg3/wBEl+GX/hBeFf8A5W0AH/ClPg3/ANEl+GX/AIQXhX/5W0AH/ClPg3/0SX4Zf+EF4V/+VtAB/wAKU+Df/RJfhl/4QXhX/wCVtAB/wpT4N/8ARJfhl/4QXhX/AOVtAB/wpT4N/wDRJfhl/wCEF4V/+VtAB/wpT4N/9El+GX/hBeFf/lbQAf8AClPg3/0SX4Zf+EF4V/8AlbQAf8KU+Df/AESX4Zf+EF4V/wDlbQAf8KU+Df8A0SX4Zf8AhBeFf/lbQAf8KU+Df/RJfhl/4QXhX/5W0AH/AApT4N/9El+GX/hBeFf/AJW0AH/ClPg3/wBEl+GX/hBeFf8A5W0AH/ClPg3/ANEl+GX/AIQXhX/5W0AH/ClPg3/0SX4Zf+EF4V/+VtAB/wAKU+Df/RJfhl/4QXhX/wCVtAB/wpT4N/8ARJfhl/4QXhX/AOVtAB/wpT4N/wDRJfhl/wCEF4V/+VtAB/wpT4N/9El+GX/hBeFf/lbQAf8AClPg3/0SX4Zf+EF4V/8AlbQAf8KU+Df/AESX4Zf+EF4V/wDlbQAf8KU+Df8A0SX4Zf8AhBeFf/lbQAf8KU+Df/RJfhl/4QXhX/5W0AH/AApT4N/9El+GX/hBeFf/AJW0AH/ClPg3/wBEl+GX/hBeFf8A5W0AH/ClPg3/ANEl+GX/AIQXhX/5W0AH/ClPg3/0SX4Zf+EF4V/+VtAB/wAKU+Df/RJfhl/4QXhX/wCVtAB/wpT4N/8ARJfhl/4QXhX/AOVtAB/wpT4N/wDRJfhl/wCEF4V/+VtAB/wpT4N/9El+GX/hBeFf/lbQAf8AClPg3/0SX4Zf+EF4V/8AlbQAf8KU+Df/AESX4Zf+EF4V/wDlbQAf8KU+Df8A0SX4Zf8AhBeFf/lbQAf8KU+Df/RJfhl/4QXhX/5W0AH/AApT4N/9El+GX/hBeFf/AJW0AH/ClPg3/wBEl+GX/hBeFf8A5W0AH/ClPg3/ANEl+GX/AIQXhX/5W0AH/ClPg3/0SX4Zf+EF4V/+VtAB/wAKU+Df/RJfhl/4QXhX/wCVtAB/wpT4N/8ARJfhl/4QXhX/AOVtAB/wpT4N/wDRJfhl/wCEF4V/+VtAB/wpT4N/9El+GX/hBeFf/lbQAf8AClPg3/0SX4Zf+EF4V/8AlbQB/Nd/wrT4c/8ARP8AwT/4Sug//INAH//Q/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9H+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/T/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9T+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAeDfEX4/+Hvhf8QfD3gbxN4W8Vx6ZrfgHxz8TNQ+JS33w9sPh34O8G/DOXRk8fav4uvfEHj3RPE1jD4Xt/Evhu/v307wrq0M1hrUU+ny3Z03Xk0oAxf8Ahrr9n+W68P2WneN7nXLrxJ4x0D4f2sHh7wj411x9L8aeI9T13RrPwx4qGmeHrr/hD9esNV8Na1YeJNG8Uto+q+Eriz2+KLLRxNbvKAXNd/am+C2h+MbHwIPFDa34kn8V6n4Q1Ox8OWU+sSaBf6R8P/iz8Q7+7v4YALrVNPt7T4K+PPCs58JW/ia/tfH2nf8ACHXun22rW2qRaWAZN7+2L+z3bRaLNY+N5/ECa/4p0XwRYyeG/DPinWLW38XeIPh9r/xR07w7rupW+jHSvC2qQ+B/Dt5r2vweJ77SB4Ns7zRZvGjeHoNc0qa6ANXSP2r/ANn7WUia2+JWhxg6X4r1q7mlNxLpWl6R4Asbe7+IWsaj4lsobvwrD4f+H91dQeHvGvimLXZ/C3hzxhJH4O1LWovE8sWkOANvf2s/2eNN06x1bUviZpmnadqdvey6fdahpXiSyS7v9N8cH4a6l4YgW50aGR/HenePlfwnf/D0L/wnNlrUctnc+HonikKgGaP2wfgC/wARfCHwzh8arNrHje68eaVoWsLYXkXhaTxJ8O/Gfw8+Hmu+GJtcuY4Ihqs/jj4m+HPB+l3MME+g3njFbzwS+tW/jP8As/QL8A+nKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA/mXoA//1f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQB5b8Sfgr8MPi/bahZ/EfwrB4otNV+HPxJ+Emo2t1qGsWltefDz4vW2gWfxF8OTw6ZqNlG0XiO18MaJDJf7Rq2mrZE6Pf6c11etcAHk2k/sUfs26Fq/hbXtJ8DavYaz4JstA07wrqNt8R/ifHc6LY+G/Gcfj/TbS0ZfGKqLZ/FKTX2p28ivBrFpqGs6LqsV3omu65p2oAEGtfsOfsueILvx9fal8MAbj4neIfEXivxo1j4y8f6Qmp+IvF3hL4ieCPE+rW0Ok+KLKDRbjXfDvxY+IUOprocenQXWpeJbrxBJEfEFtp+p2gBQ0H9gr9lfwv8AYz4f+HWraW2nW/hSx06S2+J3xZ86w0/wXbeLLLQ9MtLiTx01xBpf2Dx54107WdMSQWHiTT/FWvWXiKDVLbVLtJQB17+wd+y1qWhWfhXU/h9rOqeEdPvfH9/p3g7VPij8W9S8H6dc/FLRPEnh/wCIT6f4WvvHU2g2K+LNM8Y+K01aO10+FJbrxFrGoIqX17PcMAV9M/YB/ZM0jxR4H8Z2PwtuF8S/Di38NWvgzUZ/iF8T7saNF4R+Is3xY0DFjc+M5tNv5LP4gTvr8s2p2l5JfYTS79rnRkTT6AO9sf2UfgVp97a30PhPWJ5dP8S3vizR4NR+IPxI1XTvD+rah8V/AHxwuYvDek6n4vvNL8OaGPin8MPBPiu28LaJZ2Hhmxk0iXSbHSbfQNX1rS9SAPoqgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKAP5l6AP/W/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9f+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/R/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9L+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/U/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9X+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/X/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9D+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/S/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9P+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/V/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9b+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/Q/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9H+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/T/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9T+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/W/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9f+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/R/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9L+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/U/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9X+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/X/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9D+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/S/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9P+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/V/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9b+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/Q/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9H+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/T/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9T+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/W/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9f+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/R/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9L+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/U/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9X+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1v7Yfgx8Z/g9D8HvhRFL8V/hrFLF8NfAsckcnjvwskkcieF9LV0dG1IMjowKsrDcrAg4INAHpX/C6/g3/wBFa+GX/he+Ff8A5ZUAH/C6/g3/ANFa+GX/AIXvhX/5ZUAH/C6/g3/0Vr4Zf+F74V/+WVAB/wALr+Df/RWvhl/4XvhX/wCWVAB/wuv4N/8ARWvhl/4XvhX/AOWVAB/wuv4N/wDRWvhl/wCF74V/+WVAB/wuv4N/9Fa+GX/he+Ff/llQAf8AC6/g3/0Vr4Zf+F74V/8AllQAf8Lr+Df/AEVr4Zf+F74V/wDllQAf8Lr+Df8A0Vr4Zf8Ahe+Ff/llQAf8Lr+Df/RWvhl/4XvhX/5ZUAH/AAuv4N/9Fa+GX/he+Ff/AJZUAH/C6/g3/wBFa+GX/he+Ff8A5ZUAH/C6/g3/ANFa+GX/AIXvhX/5ZUAH/C6/g3/0Vr4Zf+F74V/+WVAB/wALr+Df/RWvhl/4XvhX/wCWVAB/wuv4N/8ARWvhl/4XvhX/AOWVAB/wuv4N/wDRWvhl/wCF74V/+WVAB/wuv4N/9Fa+GX/he+Ff/llQAf8AC6/g3/0Vr4Zf+F74V/8AllQAf8Lr+Df/AEVr4Zf+F74V/wDllQAf8Lr+Df8A0Vr4Zf8Ahe+Ff/llQAf8Lr+Df/RWvhl/4XvhX/5ZUAH/AAuv4N/9Fa+GX/he+Ff/AJZUAH/C6/g3/wBFa+GX/he+Ff8A5ZUAH/C6/g3/ANFa+GX/AIXvhX/5ZUAH/C6/g3/0Vr4Zf+F74V/+WVAB/wALr+Df/RWvhl/4XvhX/wCWVAB/wuv4N/8ARWvhl/4XvhX/AOWVAB/wuv4N/wDRWvhl/wCF74V/+WVAB/wuv4N/9Fa+GX/he+Ff/llQAf8AC6/g3/0Vr4Zf+F74V/8AllQAf8Lr+Df/AEVr4Zf+F74V/wDllQAf8Lr+Df8A0Vr4Zf8Ahe+Ff/llQAf8Lr+Df/RWvhl/4XvhX/5ZUAH/AAuv4N/9Fa+GX/he+Ff/AJZUAH/C6/g3/wBFa+GX/he+Ff8A5ZUAH/C6/g3/ANFa+GX/AIXvhX/5ZUAH/C6/g3/0Vr4Zf+F74V/+WVAB/wALr+Df/RWvhl/4XvhX/wCWVAB/wuv4N/8ARWvhl/4XvhX/AOWVAB/wuv4N/wDRWvhl/wCF74V/+WVAB/wuv4N/9Fa+GX/he+Ff/llQAf8AC6/g3/0Vr4Zf+F74V/8AllQB/Nd/wsv4c/8ARQPBP/hVaD/8nUAf/9kAAFBLAwQUAAYACAAAACEAuN5y8JsDAACACQAAEQAAAHdvcmQvc2V0dGluZ3MueG1stFZLj9s2EL4X6H8wdK5Wj8iOV403sL1xs8E6WazcS2+URNnE8iEMKatO0f/eESWunGYRuA3ii8n55s1vxn7z9k/BJ0cKmim58KKr0JtQWaiSyf3C+3238efeRBsiS8KVpAvvRLX39ubnn960qabGoJqeoAupU1EsvIMxdRoEujhQQfSVqqlEsFIgiMEr7ANB4Kmp/UKJmhiWM87MKYjDcOYNbtTCa0CmgwtfsAKUVpXpTFJVVaygw5ezgEvi9ia3qmgElcZGDIByzEFJfWC1dt7E//WG4ME5OX6riKPgTq+NwgvKbRWUzxaXpNcZ1KAKqjU+kOAuQSbHwMlXjp5jX2HsoUTrCs2j0J7OM5/+NwfxvxxofkklPXTPciDQ82QoQxTp3V4qIDlHVmI5E8zIu0FaflZKTNq0plDg2yCnw9ALOgA7oqrMEEMR1jXl3JK84JSgwzbdAxFITyexNiWtSMPNjuSZUTUqHQnm/Tqc9/DhVB+otCT6A8fD4Uk87fHiQIAUhkJWkwKjrZU0oLjTK9VHZdY4CoAv1VtocqQPQI+Mtg+sMA3Q3pGdl/GU9bOHjiQR2IAv5mmrStoV1AC7/I06A5tU5HJ/MZDCXQGspLuu8Zk5cbrBmjL2mS5l+aHRhqFH25DvyOBbCWC7MfInpMruVNMNJV2P9A8KZh9ow1m9ZQAK7mSJlPphwVhVUcAADCm6RdYxUK3t83tKSlzR3xk3OKcRLvxSu8OjUsaphuF8Fs5vN32mHXoJslxGr5fJS8jqOpldW0oFz1FF2i3LB3CnjkIT0VusiciBkcm2W6dBp5HD04pJh+cU9wM9R7Imd6Dv94AWhPMNjp4D7AoQacl0fUsre+ZbAvvR76ABL0pxDXx49tWtFQq/gWrqHm2B1D01nEqUJIMlk+aeCSfXTZ45K4kb7QxqZPnpCLZPY3va1OAT2xG7J5YqVrcCf/M4UIlD1tGAbkld92zK99HC42x/MFFHAIO3En917SXfxwMWWyzuMXshRVcZag+HURY72ZneKyd7NcoSJ0tG2dTJpqNs5mSzToZLlALu4icktjt28kpxrlpavh/xr0RuSxcMXzw7iXxcrr/0GGcaJ63GPWwUOOxXi0VJWqriDsmKp/655+t3yTxa9vDU7m+zQx49YWsfabUimpYD5kynvelfm+4zj1f+MrqN/WQ2XfnzeP3OX23iZbReXs+m6/jvYQ7cX6ebfwAAAP//AwBQSwMEFAAGAAgAAAAhAPC8NQHcAQAA8QUAABIAAAB3b3JkL2ZvbnRUYWJsZS54bWy8k9tq4zAQhu8LfQej+8ay4vRg6pQ0bWBh6cXSfQBFkW2xOhhJiTdvvyPZcQMhbJallUHI/4x+jT40j0+/lUx23DphdImyCUYJ18xshK5L9PN9dXOPEuep3lBpNC/Rnjv0NL++euyKymjvEtivXaFYiRrv2yJNHWu4om5iWq4hWBmrqIdfW6eK2l/b9oYZ1VIv1kIKv08JxrdosLGXuJiqEoy/GLZVXPu4P7VcgqPRrhGtO7h1l7h1xm5aaxh3Du6sZO+nqNCjTZafGCnBrHGm8hO4zFBRtILtGY4rJT8MZv9mQEYDxYpvtTaWriXAh0oSMEPzgX7SFZoqCCypFGsrYqCl2jieQWxHZYkwwSs8gzl8OZ6GGaUhkTXUOh5M+kTcyxVVQu4PKt160+ut8Kw5yDtqRaipDzlRQ2Dr1rhErxgGWa1Qr2QlykFYLEeFhKPiyAZlOio4KCz69BkPcReLPmMOnJn2AE5AvAvFXfLGu+SHUVSfAULwLYCYAY4AZvr5QMji9QjIEpS7+/xw/Q8gD38H0mO8HMgCypJnMDwDhnx4GfF1fD6G43cxYJh+BYahQZLvom782TYJzfFFbbIIFZPjVxHahOC75xMc8fL/2SbDws3/AAAA//8DAFBLAwQUAAYACAAAACEA4IvKVR8BAAARAgAAFAAAAHdvcmQvd2ViU2V0dGluZ3MueG1slNFRS8MwEAfwd8HvUPK+pRs6tKwbgkz2MgbVD5Cl1zWY5EIua7dv71nnRHyZbzku9+P+3Hx5dDbrIJJBX4rJOBcZeI218ftSvL2uRg8io6R8rSx6KMUJSCwXtzfzvuhhV0FK/JMyVjwVTpeiTSkUUpJuwSkaYwDPzQajU4nLuJdOxfdDGGl0QSWzM9akk5zm+UycmXiNgk1jNDyjPjjwaZiXESyL6Kk1gb61/hqtx1iHiBqIOI+zX55Txl+Yyd0fyBkdkbBJYw5z3migeHySDy9nf4D7/wHTC+B0sd57jGpn+QS8ScaYWPANlLXYbzcv8rOocYOpUh08UcUpLKyMhaETzBEsbSGuvW6zvuiULcXjTHBT/jrk4gMAAP//AwBQSwMEFAAGAAgAAAAhABZNBGBtAQAA7wIAABEACAFkb2NQcm9wcy9jb3JlLnhtbCCiBAEooAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJySUW+CMBSF35fsP5C+Q4suxhDAZDM+zcRkLlv21rVX7YS2aavIv18BxbH5tLd7e757uJw2nZ3KIjiCsULJDMURQQFIpriQ2wy9rhfhFAXWUclpoSRkqAaLZvn9Xcp0wpSBlVEajBNgA+8kbcJ0hnbO6QRjy3ZQUht5Qnpxo0xJnW/NFmvK9nQLeETIBJfgKKeO4sYw1L0jOlty1lvqgylaA84wFFCCdBbHUYyvrANT2psDrfKDLIWrNdxEL2JPn6zowaqqomrcon7/GL8vn1/aXw2FbLJigPKUs8QJV0Ce4mvpK3v4/ALmuuO+8TUzQJ0y+ZweBQ9WovBdC12EJvI91JUy3PrxQecxDpYZoZ2/yM58cODpglq39De7EcAf61/f+as3IwaOonkZedwSfZueY+52Ax74eJIuzIvyNn6arxcoH5F4EpJxGJM1mSajh4SQj2a9wfzVsDwv8G/Hi0GX0PCJ5t8AAAD//wMAUEsDBBQABgAIAAAAIQCBlv05MgsAAGRyAAAPAAAAd29yZC9zdHlsZXMueG1svJ3bctu6FYbvO9N34OiqvXB8jJ14trPHduLaUzvbO3Kaa4iEJNQgofLgQ5++IEhJkBdBcQGrvrIlan0A8eMHsEBS+u33l1RGTzwvhMrORvsf9kYRz2KViGx2Nvr5cLXzaRQVJcsSJlXGz0avvBj9/uWvf/nt+bQoXyUvIg3IitM0PhvNy3JxurtbxHOesuKDWvBMH5yqPGWlfpnPdlOWP1aLnVilC1aKiZCifN092Ns7HrWYfAhFTaci5l9VXKU8K038bs6lJqqsmItFsaQ9D6E9qzxZ5CrmRaFPOpUNL2UiW2H2jwAoFXGuCjUtP+iTaWtkUDp8f8/8l8o14CMOcLACpPHpzSxTOZtI3fq6JpGGjb7o5k9U/JVPWSXLon6Z3+fty/aV+XOlsrKInk9ZEQvxoEvWkFRo3vV5VoiRPsJZUZ4XgnUenNf/dB6Ji9J6+0IkYrRbl1j8Vx98YvJsdHC0fOeyrsHGe5Jls+V703zn6oddk7MRz3Z+juu3Jpp7NmL5zvi8DtxtT6z5a53uYvWq+dSbttFdQ3eUcdNf9VE+vVXxI0/GpT5wNtqri9Jv/ry5z4XKdZ88G33+3L455qm4FknCM+uD2Vwk/NecZz8Lnqzf//PK9Kv2jVhVmf7/8NOe0UsWybeXmC/qXqqPZqxuve91gKw/XYl14Sb8P0vYfttmXfFzzmqrRvtvEab6KMRBHVFYZ9vNrN6cu/kUqqDD9yro6L0K+vheBR2/V0En71XQp/cqyGD+nwWJLOEvjRFhMYC6jeNwI5rjMBua4/ASmuOwCprjcAKa4+joaI6jH6M5jm6K4JQqdvVCq7MfOnp7P3f7HOHH3T4l+HG3zwB+3O0Dvh93+/jux90+nPtxt4/eftztgzWe2yy1ohtts6wMdtlUqTJTJY9K/hJOY5lmmfyFhldPejwnOUkCTDOytRNxMC1m5vX2HmJM6j+fl3XKFalpNBWzKtdpb2jFefbEpU5AI5YkmkcIzHlZ5Y4W8enTOZ/ynGcxp+zYdFApMh5lVToh6JsLNiNj8Swhbr4lkWRQWHVoVpXz2iSCoFOnLM5VeNUUIxsfbkUR3lY1JLqopORErO80XcywwnMDgwlPDQwmPDMwmPDEwNKMqolaGlFLtTSiBmtpRO3W9E+qdmtpRO3W0ojaraWFt9uDKKUZ4u1Vx/7wvbtLqeod5+B6jMUsY3oBED7dtHum0T3L2Sxni3lU7x93Y+1zxpZzoZLX6IFiTluRqNb1potc6rMWWRXeoBs0KnOteET2WvGIDLbihVvsTi+T6wXaNU0+M64mZadpDWmQacdMVs2CNtxtrAzvYWsDXIm8ILNBN5agB3+vl7O1nBQj37qW4RVbs8Jt9XZUIq1eiySopVTxI80wfP264LlOyx6DSVdKSvXMEzriuMxV09dsyx8YSQZZ/lu6mLNCmFxpAzF8ql9eq47u2CL4hO4lExmNbt92UiZkRLeCuH64u40e1KJOM+uGoQFeqLJUKRmz3Qn82y8++TtNBc91Epy9Ep3tOdH2kIFdCoJJpiGphIikl5kiEyRzqOH9k79OFMsTGtp9zpvbQ0pORByzdNEsOgi8pcfFZz3+EKyGDO9fLBf1vhCVqR5IYNa2YVFN/s3j8KHuu4pIdob+qEqz/2iWuiaaDhe+TNjAhS8RjJp6eqj7L8HJbuDCT3YDR3Wyl5IVhXBeQvXmUZ3ukkd9vuHJX8tTUuXTStI14BJI1oJLIFkTKlmlWUF5xoZHeMKGR32+hF3G8Ai25AzvH7lIyMQwMColDIxKBgOj0sDASAUIv0PHgoXfpmPBwu/VaWBESwALRtXPSKd/oqs8FoyqnxkYVT8zMKp+ZmBU/ezwa8SnU70IpptiLCRVn7OQdBNNVvJ0oXKWvxIhv0k+YwQbpA3tPlfT+rkBlTU3cRMg6z1qSbjYbnBUIv/iE7Kq1SzKehHsiDIplSLaW1tPOCZy8961bWHmmYvgKpjN9lv+xClW4xaM6DJAAwuXzYKFT1MWLHyasmDh05QFC5+mLFj4NGXBwu9fvpcs5nMlE547jNhXkWi8YHF7bQlcox60V38rZvMyGs9Xl6hszPHe1sjlLtNG2PYCuwaK44OesDueiCpdVhQ+AXR8ODzYGHojePmgVk/wevm7EflxYCQs83h75Dq124g8GRgJy/w0MNKMUhuRfYP4V5Y/dnaEk77+s9qYcHS+k75etAruLLavI60iu7rgSV8v2rBKdB7H9SUuqM4wz7jjh5nHHY9xkZuCsZObMthXbkSfwX7wJ1EvRzGDpilvdcvP2+IOzZQ6aOT8s1LNxaaNq6TDn0S80av9rOBRJ+dw+NXWjVHG3Y6Dhxs3YvC440YMHoDciEEjkTMcNSS5KYPHJjdi8CDlRqBHKzgj4EYrGI8brWC8z2gFKT6jVcAqwI0YvBxwI9BGhQi0UQNWCm4Eyqgg3MuokII2KkSgjQoRaKPCBRjOqDAeZ1QY72NUSPExKqSgjQoRaKNCBNqoEIE2KkSgjeq5tneGexkVUtBGhQi0USECbVSzXgwwKozHGRXG+xgVUnyMCiloo0IE2qgQgTYqRKCNChFoo0IEyqgg3MuokII2KkSgjQoRaKM2z8f6GxXG44wK432MCik+RoUUtFEhAm1UiEAbFSLQRoUItFEhAmVUEO5lVEhBGxUi0EaFCLRRzaWDAKPCeJxRYbyPUSHFx6iQgjYqRKCNChFoo0IE2qgQgTYqRKCMCsK9jAopaKNCBNqoENHXP9vr6q5nQ/bxu57Ox0yGX7pqK/XD/v4BG3U4HLWslZs1/AGaC6Ueo86nZQ9NvjEMIiZSKLNF7bgXxOaaC6Soq/V/XPY/lmbTA78prH2Ax1zoB/CjoZFgT+Wor8vbkSDJO+rr6XYkWHUe9Y2+diSYBo/6Bl3jy+WdVHo6AsF9w4wVvO8I7xutrXDYxH1jtBUIW7hvZLYCYQP3jcdW4MeoHpzfRn8c2E7Hq5uiAaGvO1qEEzehr1tCrZbDMTTGUNHchKHquQlDZXQTUHo6MXhh3Si0wm6Un9TQZlip/Y3qJmClhgQvqQHGX2qI8pYaovykhgMjVmpIwErtPzi7CV5SA4y/1BDlLTVE+UkNpzKs1JCAlRoSsFIHTshOjL/UEOUtNUT5SQ0Xd1ipIQErNSRgpYYEL6kBxl9qiPKWGqL8pAZZMlpqSMBKDQlYqSHBS2qA8Zcaorylhqg+qc0uyobUKIWtcNwizArETchWIG5wtgI9siUr2jNbsgie2RLUaqk5LluyRXMThqrnJgyV0U1A6enE4IV1o9AKu1F+UuOypS6p/Y3qJmClxmVLTqlx2VKv1LhsqVdqXLbklhqXLXVJjcuWuqT2H5zdBC+pcdlSr9S4bKlXaly25JYaly11SY3LlrqkxmVLXVIHTshOjL/UuGypV2pctuSWGpctdUmNy5a6pMZlS11S47Ilp9S4bKlXaly21Cs1LltyS43LlrqkxmVLXVLjsqUuqXHZklNqXLbUKzUuW+qV2pEt7T5v/GpYzTa/d6c/XL4ueP3F8dYDM0nzxbntRUDzwZtk9etedXBdk6j9xbP2bVPh9oJhU6IJhEXFc11W3H7ll6OoeyWFPm+WJ/pwCYp0fLOvqcL65JefbhtzfRG0+dzGBc/eGpd1Y/fU1ojBqt72aRRzVfFz2wW31VHXaCKbH8PT/9xkiQY8t7+w1tQ1eWENSh+/5FLesebTauH+qOTTsjm6v2cen31zfNJ8YaEzPjeDhBOwu1mZ5mX7w3eOFm9+wqC9eu1o9fMqrjIutRt4R5ub+ylCm3tdweV/xZf/AQAA//8DAFBLAwQUAAYACAAAACEAQP7QLGkBAAC3AgAAEAAIAWRvY1Byb3BzL2FwcC54bWwgogQBKKAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACcUk1LxTAQvAv+h9K7L32CH8i+iCjiQUV4Vc8h2bbBNAnJKr5/78ZqrXgzp92ZZHZmCZy/j656w5Rt8Jt6vWrqCr0Oxvp+Uz+21wendZVJeaNc8Lipd5jrc7m/Bw8pRExkMVcs4fOmHojimRBZDziqvGLaM9OFNCriNvUidJ3VeBX064iexGHTHAt8J/QGzUGcBetJ8eyN/itqgi7+8lO7i6wnocUxOkUo78tLtzKBRhAzCm0g5Vo7omwYnht4UD1muQYxFfAcksnyEMRUwOWgktLE+5PrExCLFi5idFYr4sXKO6tTyKGj6k5p6ynkoSoKIJa3gENsUb8mS7viY9nCrfWTk6lgZ0n1ScXhy97cwVYrh5ccX3bKZQTxAxSVl/wY23BVYn/xv8FFpmdLwzYqXQafLtMtCNgyioa9zuNmAG54/ckVeX7rezTfd/4SZV9P0z+U66NVw+dzO98YZ5w/iPwAAAD//wMAUEsBAi0AFAAGAAgAAAAhAG2KJ0tmAQAAVAUAABMAAAAAAAAAAAAAAAAAAAAAAFtDb250ZW50X1R5cGVzXS54bWxQSwECLQAUAAYACAAAACEAx8InvP8AAADfAgAACwAAAAAAAAAAAAAAAACfAwAAX3JlbHMvLnJlbHNQSwECLQAUAAYACAAAACEAE6o+h/YAAAAxAwAAHAAAAAAAAAAAAAAAAADPBgAAd29yZC9fcmVscy9kb2N1bWVudC54bWwucmVsc1BLAQItABQABgAIAAAAIQD1Yo5gZQIAAA4HAAARAAAAAAAAAAAAAAAAAAcJAAB3b3JkL2RvY3VtZW50LnhtbFBLAQItABQABgAIAAAAIQBtTVmrIQYAAI4aAAAVAAAAAAAAAAAAAAAAAJsLAAB3b3JkL3RoZW1lL3RoZW1lMS54bWxQSwECLQAKAAAAAAAAACEAvOgH/fQnAAD0JwAAFwAAAAAAAAAAAAAAAADvEQAAZG9jUHJvcHMvdGh1bWJuYWlsLmpwZWdQSwECLQAUAAYACAAAACEAuN5y8JsDAACACQAAEQAAAAAAAAAAAAAAAAAYOgAAd29yZC9zZXR0aW5ncy54bWxQSwECLQAUAAYACAAAACEA8Lw1AdwBAADxBQAAEgAAAAAAAAAAAAAAAADiPQAAd29yZC9mb250VGFibGUueG1sUEsBAi0AFAAGAAgAAAAhAOCLylUfAQAAEQIAABQAAAAAAAAAAAAAAAAA7j8AAHdvcmQvd2ViU2V0dGluZ3MueG1sUEsBAi0AFAAGAAgAAAAhABZNBGBtAQAA7wIAABEAAAAAAAAAAAAAAAAAP0EAAGRvY1Byb3BzL2NvcmUueG1sUEsBAi0AFAAGAAgAAAAhAIGW/TkyCwAAZHIAAA8AAAAAAAAAAAAAAAAA40MAAHdvcmQvc3R5bGVzLnhtbFBLAQItABQABgAIAAAAIQBA/tAsaQEAALcCAAAQAAAAAAAAAAAAAAAAAEJPAABkb2NQcm9wcy9hcHAueG1sUEsFBgAAAAAMAAwABgMAAOFRAAAAAA==" } - do: get: index: test - id: 1 + id: "1" - length: { _source.attachment: 8 } - match: { _source.attachment.content: "Test elasticsearch" } - match: { _source.attachment.language: "et" } diff --git a/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml b/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml index a876a4babe328..6ef35999307c4 100644 --- a/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml +++ b/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml @@ -91,13 +91,13 @@ - do: index: index: annotated - id: 1 + id: "1" body: "my_field" : "[A](~MARK0&~MARK0) [B](~MARK1)" - do: index: index: annotated - id: 2 + id: "2" body: "my_field" : "[A](~MARK0) [C](~MARK2)" refresh: true @@ -157,13 +157,13 @@ - do: index: index: annotated - id: 1 + id: "1" body: "my_field" : "[Jeff Beck](Beck) plays a strat" - do: index: index: annotated - id: 2 + id: "2" body: "my_field" : "[Kimchy](Beck) plays a strat" refresh: true @@ -190,19 +190,19 @@ - do: index: index: annotated - id: 1 + id: "1" body: "my_field" : "[Apple](Apple+Inc) launched the iphone 12" - do: index: index: annotated - id: 2 + id: "2" body: "my_field" : "[They](Apple+Inc) make iphone accessories" - do: index: index: annotated - id: 3 + id: "3" body: "my_field" : "[Apple](Apple+Inc) have a new iphone coming" refresh: true diff --git a/plugins/mapper-murmur3/src/yamlRestTest/resources/rest-api-spec/test/mapper_murmur3/10_basic.yml b/plugins/mapper-murmur3/src/yamlRestTest/resources/rest-api-spec/test/mapper_murmur3/10_basic.yml index 3ed6e6a97c2c2..f82de6fc21110 100644 --- a/plugins/mapper-murmur3/src/yamlRestTest/resources/rest-api-spec/test/mapper_murmur3/10_basic.yml +++ b/plugins/mapper-murmur3/src/yamlRestTest/resources/rest-api-spec/test/mapper_murmur3/10_basic.yml @@ -12,7 +12,7 @@ setup: - do: index: index: test - id: 0 + id: "0" body: { "foo": null } - do: @@ -30,25 +30,25 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "foo": "bar" } - do: index: index: test - id: 2 + id: "2" body: { "foo": "baz" } - do: index: index: test - id: 3 + id: "3" body: { "foo": "quux" } - do: index: index: test - id: 4 + id: "4" body: { "foo": "bar" } - do: @@ -67,7 +67,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "foo": "foo" } - do: diff --git a/plugins/mapper-size/src/yamlRestTest/resources/rest-api-spec/test/mapper_size/10_basic.yml b/plugins/mapper-size/src/yamlRestTest/resources/rest-api-spec/test/mapper_size/10_basic.yml index bc33818e10e62..434368ed2f5b2 100644 --- a/plugins/mapper-size/src/yamlRestTest/resources/rest-api-spec/test/mapper_size/10_basic.yml +++ b/plugins/mapper-size/src/yamlRestTest/resources/rest-api-spec/test/mapper_size/10_basic.yml @@ -14,7 +14,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "foo": "bar" } - do: @@ -23,7 +23,7 @@ - do: get: index: test - id: 1 + id: "1" stored_fields: "_size" - gt: { _size: 0 } diff --git a/plugins/store-smb/src/yamlRestTest/resources/rest-api-spec/test/store_smb/15_index_creation.yml b/plugins/store-smb/src/yamlRestTest/resources/rest-api-spec/test/store_smb/15_index_creation.yml index fbbdcb8f153e0..0d91a931519ed 100644 --- a/plugins/store-smb/src/yamlRestTest/resources/rest-api-spec/test/store_smb/15_index_creation.yml +++ b/plugins/store-smb/src/yamlRestTest/resources/rest-api-spec/test/store_smb/15_index_creation.yml @@ -10,13 +10,13 @@ - do: index: index: smb-test - id: 1 + id: "1" body: { foo: bar } - do: get: index: smb-test - id: 1 + id: "1" - match: { _index: smb-test } - match: { _id: "1"} diff --git a/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java b/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java index c1db9d77f61dd..6dec927308c32 100644 --- a/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java +++ b/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java @@ -31,7 +31,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.index.IndexRequest; @@ -114,7 +113,7 @@ static List parseHosts(String props) { public static void configureRemoteClusters(List remoteNodes) throws Exception { assertThat(remoteNodes, hasSize(3)); final String remoteClusterSettingPrefix = "cluster.remote." + CLUSTER_ALIAS + "."; - try (RestHighLevelClient localClient = newLocalClient()) { + try (RestClient localClient = newLocalClient().getLowLevelClient()) { final Settings remoteConnectionSettings; if (randomBoolean()) { final List seeds = remoteNodes.stream() @@ -137,13 +136,9 @@ public static void configureRemoteClusters(List remoteNodes) throws Except .put(remoteClusterSettingPrefix + "proxy_address", proxyNode.transportAddress) .build(); } - assertTrue( - localClient.cluster() - .putSettings(new ClusterUpdateSettingsRequest().persistentSettings(remoteConnectionSettings), RequestOptions.DEFAULT) - .isAcknowledged() - ); + updateClusterSettings(localClient, remoteConnectionSettings); assertBusy(() -> { - final Response resp = localClient.getLowLevelClient().performRequest(new Request("GET", "/_remote/info")); + final Response resp = localClient.performRequest(new Request("GET", "/_remote/info")); assertOK(resp); final ObjectPath objectPath = ObjectPath.createFromResponse(resp); assertNotNull(objectPath.evaluate(CLUSTER_ALIAS)); @@ -172,7 +167,7 @@ static int indexDocs(RestHighLevelClient client, String index, int numDocs) thro } void verifySearch(String localIndex, int localNumDocs, String remoteIndex, int remoteNumDocs, Integer preFilterShardSize) { - try (RestHighLevelClient localClient = newLocalClient()) { + try (RestClient localClient = newLocalClient().getLowLevelClient()) { Request request = new Request("POST", "/_search"); final int expectedDocs; if (randomBoolean()) { @@ -193,7 +188,7 @@ void verifySearch(String localIndex, int localNumDocs, String remoteIndex, int r } int size = between(1, 100); request.setJsonEntity("{\"sort\": \"f\", \"size\": " + size + "}"); - Response response = localClient.getLowLevelClient().performRequest(request); + Response response = localClient.performRequest(request); try ( XContentParser parser = JsonXContent.jsonXContent.createParser( NamedXContentRegistry.EMPTY, diff --git a/qa/full-cluster-restart/build.gradle b/qa/full-cluster-restart/build.gradle index afe83cb7628a8..a3af45b43363e 100644 --- a/qa/full-cluster-restart/build.gradle +++ b/qa/full-cluster-restart/build.gradle @@ -7,6 +7,7 @@ */ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -28,9 +29,7 @@ BuildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> setting 'indices.memory.shard_inactive_time', '60m' setting 'path.repo', "${buildDir}/cluster/shared/repo/${baseName}" setting 'xpack.security.enabled', 'false' - if (BuildParams.isSnapshotBuild() == false && bwcVersion.toString() == project.version) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } tasks.register("${baseName}#oldClusterTest", StandaloneRestIntegTestTask) { @@ -47,9 +46,6 @@ BuildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> useCluster baseCluster dependsOn "${baseName}#oldClusterTest" doFirst { - if (BuildParams.isSnapshotBuild() == false) { - baseCluster.get().systemProperty 'es.index_mode_feature_flag_registered', 'true' - } baseCluster.get().goToNextVersion() if (bwcVersion.before(BuildParams.bwcVersions.minimumWireCompatibleVersion)) { // When doing a full cluster restart of older versions we actually have to upgrade twice. First to 7.last, then to the current version. diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index 7f9d1e57adeb7..9fa45187f90ff 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -6,6 +6,8 @@ * Side Public License, v 1. */ + +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -31,6 +33,8 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> numberOfNodes = 4 setting 'path.repo', "${buildDir}/cluster/shared/repo/${baseName}" setting 'xpack.security.enabled', 'false' + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") + requiresFeature 'es.random_sampler_feature_flag_registered', Version.fromString("8.1.0") } tasks.register("${baseName}#mixedClusterTest", StandaloneRestIntegTestTask) { @@ -41,16 +45,10 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> // Getting the endpoints causes a wait for the cluster println "Test cluster endpoints are: ${-> baseCluster.get().allHttpSocketURI.join(",")}" println "Upgrading one node to create a mixed cluster" - if (BuildParams.isSnapshotBuild() == false) { - baseCluster.get().nodes."${baseName}-0".systemProperty 'es.index_mode_feature_flag_registered', 'true' - } baseCluster.get().nextNodeToNextVersion() // Getting the endpoints causes a wait for the cluster println "Upgrade complete, endpoints are: ${-> baseCluster.get().allHttpSocketURI.join(",")}" println "Upgrading another node to create a mixed cluster" - if (BuildParams.isSnapshotBuild() == false) { - baseCluster.get().nodes."${baseName}-1".systemProperty 'es.index_mode_feature_flag_registered', 'true' - } baseCluster.get().nextNodeToNextVersion() nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) nonInputProperties.systemProperty('tests.clustername', baseName) diff --git a/qa/multi-cluster-search/build.gradle b/qa/multi-cluster-search/build.gradle index 818cd917c594a..3f9ba25394649 100644 --- a/qa/multi-cluster-search/build.gradle +++ b/qa/multi-cluster-search/build.gradle @@ -6,6 +6,8 @@ * Side Public License, v 1. */ + +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask @@ -26,9 +28,7 @@ tasks.register('remote-cluster', RestIntegTestTask) { } testClusters.configureEach { - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } testClusters.matching{ it.name == 'remote-cluster' }.configureEach { diff --git a/qa/remote-clusters/build.gradle b/qa/remote-clusters/build.gradle index 93e1da8c52b9d..df03b37c5a603 100644 --- a/qa/remote-clusters/build.gradle +++ b/qa/remote-clusters/build.gradle @@ -65,7 +65,7 @@ tasks.named("preProcessFixture").configure { dockerCompose { tcpPortsToIgnoreWhenWaiting = [9600, 9601] - if ('default'.equalsIgnoreCase(providers.systemProperty('tests.distribution').forUseAtConfigurationTime().getOrElse('default'))) { + if ('default'.equalsIgnoreCase(providers.systemProperty('tests.distribution').getOrElse('default'))) { useComposeFiles = ['docker-compose.yml'] } else { useComposeFiles = ['docker-compose-oss.yml'] diff --git a/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/AbstractMultiClusterRemoteTestCase.java b/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/AbstractMultiClusterRemoteTestCase.java index c5bee9694a275..c778d6fe4c512 100644 --- a/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/AbstractMultiClusterRemoteTestCase.java +++ b/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/AbstractMultiClusterRemoteTestCase.java @@ -9,8 +9,7 @@ import org.apache.http.HttpHost; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; -import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Request; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.settings.SecureString; @@ -28,6 +27,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Collections; +import java.util.function.Consumer; @SuppressWarnings("removal") public abstract class AbstractMultiClusterRemoteTestCase extends ESRestTestCase { @@ -58,8 +58,12 @@ public void initClientsAndConfigureClusters() throws Exception { cluster1Client = buildClient("localhost:" + getProperty("test.fixtures.elasticsearch-" + getDistribution() + "-1.tcp.9200")); cluster2Client = buildClient("localhost:" + getProperty("test.fixtures.elasticsearch-" + getDistribution() + "-2.tcp.9200")); - cluster1Client().cluster().health(new ClusterHealthRequest().waitForNodes("1").waitForYellowStatus(), RequestOptions.DEFAULT); - cluster2Client().cluster().health(new ClusterHealthRequest().waitForNodes("1").waitForYellowStatus(), RequestOptions.DEFAULT); + Consumer waitForYellowRequest = request -> { + request.addParameter("wait_for_status", "yellow"); + request.addParameter("wait_for_nodes", "1"); + }; + ensureHealth(cluster1Client().getLowLevelClient(), waitForYellowRequest); + ensureHealth(cluster2Client().getLowLevelClient(), waitForYellowRequest); initialized = true; } diff --git a/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/RemoteClustersIT.java b/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/RemoteClustersIT.java index 6e4d83873f9df..78aa2b7e1c5de 100644 --- a/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/RemoteClustersIT.java +++ b/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/RemoteClustersIT.java @@ -7,14 +7,12 @@ */ package org.elasticsearch.cluster.remote.test; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.RequestOptions; -import org.elasticsearch.client.cluster.RemoteConnectionInfo; -import org.elasticsearch.client.cluster.RemoteInfoRequest; +import org.elasticsearch.client.RestClient; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xcontent.XContentFactory; @@ -22,6 +20,8 @@ import org.junit.Before; import java.io.IOException; +import java.util.Map; +import java.util.Optional; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assume.assumeThat; @@ -74,27 +74,22 @@ public void clearIndices() throws IOException { @After public void clearRemoteClusterSettings() throws IOException { - ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder().putNull("cluster.remote.*").build() - ); - assertTrue(cluster1Client().cluster().putSettings(request, RequestOptions.DEFAULT).isAcknowledged()); - assertTrue(cluster2Client().cluster().putSettings(request, RequestOptions.DEFAULT).isAcknowledged()); + Settings setting = Settings.builder().putNull("cluster.remote.*").build(); + updateClusterSettings(cluster1Client().getLowLevelClient(), setting); + updateClusterSettings(cluster2Client().getLowLevelClient(), setting); } public void testProxyModeConnectionWorks() throws IOException { String cluster2RemoteClusterSeed = "elasticsearch-" + getDistribution() + "-2:9300"; logger.info("Configuring remote cluster [{}]", cluster2RemoteClusterSeed); - ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder() - .put("cluster.remote.cluster2.mode", "proxy") - .put("cluster.remote.cluster2.proxy_address", cluster2RemoteClusterSeed) - .build() - ); - assertTrue(cluster1Client().cluster().putSettings(request, RequestOptions.DEFAULT).isAcknowledged()); + Settings settings = Settings.builder() + .put("cluster.remote.cluster2.mode", "proxy") + .put("cluster.remote.cluster2.proxy_address", cluster2RemoteClusterSeed) + .build(); + + updateClusterSettings(cluster1Client().getLowLevelClient(), settings); - RemoteConnectionInfo rci = cluster1Client().cluster().remoteInfo(new RemoteInfoRequest(), RequestOptions.DEFAULT).getInfos().get(0); - logger.info("Connection info: {}", rci); - assertTrue(rci.isConnected()); + assertTrue(isConnected(cluster1Client().getLowLevelClient())); assertEquals( 2L, @@ -105,33 +100,25 @@ public void testProxyModeConnectionWorks() throws IOException { public void testSniffModeConnectionFails() throws IOException { String cluster2RemoteClusterSeed = "elasticsearch-" + getDistribution() + "-2:9300"; logger.info("Configuring remote cluster [{}]", cluster2RemoteClusterSeed); - ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder() - .put("cluster.remote.cluster2alt.mode", "sniff") - .put("cluster.remote.cluster2alt.seeds", cluster2RemoteClusterSeed) - .build() - ); - assertTrue(cluster1Client().cluster().putSettings(request, RequestOptions.DEFAULT).isAcknowledged()); + Settings settings = Settings.builder() + .put("cluster.remote.cluster2alt.mode", "sniff") + .put("cluster.remote.cluster2alt.seeds", cluster2RemoteClusterSeed) + .build(); + updateClusterSettings(cluster1Client().getLowLevelClient(), settings); - RemoteConnectionInfo rci = cluster1Client().cluster().remoteInfo(new RemoteInfoRequest(), RequestOptions.DEFAULT).getInfos().get(0); - logger.info("Connection info: {}", rci); - assertFalse(rci.isConnected()); + assertFalse(isConnected(cluster1Client().getLowLevelClient())); } public void testHAProxyModeConnectionWorks() throws IOException { String proxyAddress = "haproxy:9600"; logger.info("Configuring remote cluster [{}]", proxyAddress); - ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder() - .put("cluster.remote.haproxynosn.mode", "proxy") - .put("cluster.remote.haproxynosn.proxy_address", proxyAddress) - .build() - ); - assertTrue(cluster1Client().cluster().putSettings(request, RequestOptions.DEFAULT).isAcknowledged()); + Settings settings = Settings.builder() + .put("cluster.remote.haproxynosn.mode", "proxy") + .put("cluster.remote.haproxynosn.proxy_address", proxyAddress) + .build(); + updateClusterSettings(cluster1Client().getLowLevelClient(), settings); - RemoteConnectionInfo rci = cluster1Client().cluster().remoteInfo(new RemoteInfoRequest(), RequestOptions.DEFAULT).getInfos().get(0); - logger.info("Connection info: {}", rci); - assertTrue(rci.isConnected()); + assertTrue(isConnected(cluster1Client().getLowLevelClient())); assertEquals( 2L, @@ -142,18 +129,14 @@ public void testHAProxyModeConnectionWorks() throws IOException { public void testHAProxyModeConnectionWithSNIToCluster1Works() throws IOException { assumeThat("test is only supported if the distribution contains xpack", getDistribution(), equalTo("default")); - ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder() - .put("cluster.remote.haproxysni1.mode", "proxy") - .put("cluster.remote.haproxysni1.proxy_address", "haproxy:9600") - .put("cluster.remote.haproxysni1.server_name", "application1.example.com") - .build() - ); - assertTrue(cluster2Client().cluster().putSettings(request, RequestOptions.DEFAULT).isAcknowledged()); + Settings settings = Settings.builder() + .put("cluster.remote.haproxysni1.mode", "proxy") + .put("cluster.remote.haproxysni1.proxy_address", "haproxy:9600") + .put("cluster.remote.haproxysni1.server_name", "application1.example.com") + .build(); + updateClusterSettings(cluster2Client().getLowLevelClient(), settings); - RemoteConnectionInfo rci = cluster2Client().cluster().remoteInfo(new RemoteInfoRequest(), RequestOptions.DEFAULT).getInfos().get(0); - logger.info("Connection info: {}", rci); - assertTrue(rci.isConnected()); + assertTrue(isConnected(cluster2Client().getLowLevelClient())); assertEquals( 1L, @@ -164,22 +147,30 @@ public void testHAProxyModeConnectionWithSNIToCluster1Works() throws IOException public void testHAProxyModeConnectionWithSNIToCluster2Works() throws IOException { assumeThat("test is only supported if the distribution contains xpack", getDistribution(), equalTo("default")); - ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder() - .put("cluster.remote.haproxysni2.mode", "proxy") - .put("cluster.remote.haproxysni2.proxy_address", "haproxy:9600") - .put("cluster.remote.haproxysni2.server_name", "application2.example.com") - .build() - ); - assertTrue(cluster1Client().cluster().putSettings(request, RequestOptions.DEFAULT).isAcknowledged()); + Settings settings = Settings.builder() + .put("cluster.remote.haproxysni2.mode", "proxy") + .put("cluster.remote.haproxysni2.proxy_address", "haproxy:9600") + .put("cluster.remote.haproxysni2.server_name", "application2.example.com") + .build(); + updateClusterSettings(cluster1Client().getLowLevelClient(), settings); - RemoteConnectionInfo rci = cluster1Client().cluster().remoteInfo(new RemoteInfoRequest(), RequestOptions.DEFAULT).getInfos().get(0); - logger.info("Connection info: {}", rci); - assertTrue(rci.isConnected()); + assertTrue(isConnected(cluster1Client().getLowLevelClient())); assertEquals( 2L, cluster1Client().search(new SearchRequest("haproxysni2:test2"), RequestOptions.DEFAULT).getHits().getTotalHits().value ); } + + @SuppressWarnings("unchecked") + private boolean isConnected(RestClient restClient) throws IOException { + Optional remoteConnectionInfo = getAsMap(restClient, "/_remote/info").values().stream().findFirst(); + if (remoteConnectionInfo.isPresent()) { + logger.info("Connection info: {}", remoteConnectionInfo); + if (((Map) remoteConnectionInfo.get()).get("connected")instanceof Boolean connected) { + return connected; + } + } + return false; + } } diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle index b5c4d5b99f3fc..f1e71b439fdd6 100644 --- a/qa/rolling-upgrade/build.gradle +++ b/qa/rolling-upgrade/build.gradle @@ -6,6 +6,8 @@ * Side Public License, v 1. */ + +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -36,9 +38,7 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> setting 'repositories.url.allowed_urls', 'http://snapshot.test*' setting 'path.repo', "${buildDir}/cluster/shared/repo/${baseName}" setting 'xpack.security.enabled', 'false' - if (BuildParams.isSnapshotBuild() == false && bwcVersion.toString() == project.version) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } String oldVersion = bwcVersion.toString() @@ -60,9 +60,6 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> dependsOn "${baseName}#oldClusterTest" useCluster baseCluster doFirst { - if (BuildParams.isSnapshotBuild() == false) { - baseCluster.get().nodes."${baseName}-0".systemProperty 'es.index_mode_feature_flag_registered', 'true' - } baseCluster.get().nextNodeToNextVersion() } systemProperty 'tests.rest.suite', 'mixed_cluster' @@ -76,9 +73,6 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> dependsOn "${baseName}#oneThirdUpgradedTest" useCluster baseCluster doFirst { - if (BuildParams.isSnapshotBuild() == false) { - baseCluster.get().nodes."${baseName}-1".systemProperty 'es.index_mode_feature_flag_registered', 'true' - } baseCluster.get().nextNodeToNextVersion() } systemProperty 'tests.rest.suite', 'mixed_cluster' @@ -91,9 +85,6 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> tasks.register("${baseName}#upgradedClusterTest", StandaloneRestIntegTestTask) { dependsOn "${baseName}#twoThirdsUpgradedTest" doFirst { - if (BuildParams.isSnapshotBuild() == false) { - baseCluster.get().nodes."${baseName}-2".systemProperty 'es.index_mode_feature_flag_registered', 'true' - } baseCluster.get().nextNodeToNextVersion() } useCluster testClusters.named(baseName) diff --git a/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml b/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml index 7a0cdcbef0786..ed3c5f6f9228c 100644 --- a/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml +++ b/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml @@ -73,7 +73,7 @@ catch: /There are no ingest nodes in this cluster, unable to forward request to an ingest node./ index: index: test - id: 1 + id: "1" pipeline: "my_pipeline_1" body: { field1: "1", diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml index e6a2a3d52e116..a8f7e1e5877c8 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml @@ -30,14 +30,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline_1" body: {} - do: get: index: test - id: 1 + id: "1" - length: { _source: 2 } - match: { _source.index_type_id: "test/1" } - match: { _source.metadata: ["test", "1"] } @@ -108,7 +108,7 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline_1" body: { metadata: "0", @@ -120,7 +120,7 @@ - do: get: index: test - id: 1 + id: "1" - length: { _source: 5 } - match: { _source.field1: "1" } - match: { _source.field2: "2" } @@ -131,7 +131,7 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline_2" body: { field1: "field2" @@ -140,7 +140,7 @@ - do: get: index: test - id: 1 + id: "1" - length: { _source: 2 } - match: { _source.field1: "field2" } - match: { _source.field2: "value" } @@ -148,7 +148,7 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline_3" body: { field_to_remove: "field2", @@ -158,7 +158,7 @@ - do: get: index: test - id: 1 + id: "1" - length: { _source: 1 } - match: { _source.field_to_remove: "field2" } @@ -196,7 +196,7 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_handled_pipeline" body: { do_nothing: "foo", @@ -205,7 +205,7 @@ - do: get: index: test - id: 1 + id: "1" - length: { _source: 2 } - match: { _source.do_nothing: "foo" } - match: { _source.error: "processor first_processor [remove]: field [field_to_remove] not present as part of path [field_to_remove]" } @@ -236,7 +236,7 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "_id" body: { values_flat : [], @@ -257,7 +257,7 @@ - do: get: index: test - id: 1 + id: "1" - length: { _source: 2 } - match: { _source.values_flat: ["foo_bar", "foo_baz"] } diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml index 14a1c71bed52d..9a7444c4ffc6c 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml @@ -43,7 +43,7 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "_id" body: { log: "89.160.20.128 - - [08/Sep/2014:02:54:42 +0000] \"GET /presentations/logstash-scale11x/images/ahhh___rage_face_by_samusmmx-d5g5zap.png HTTP/1.1\" 200 175208 \"http://mobile.rivals.com/board_posts.asp?SID=880&mid=198829575&fid=2208&tid=198829575&Team=&TeamId=&SiteId=\" \"Mozilla/5.0 (Linux; Android 4.2.2; VS980 4G Build/JDQ39B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.135 Mobile Safari/537.36\"" @@ -52,7 +52,7 @@ - do: get: index: test - id: 1 + id: "1" - length: { _source: 13 } - match: { _source.request: "/presentations/logstash-scale11x/images/ahhh___rage_face_by_samusmmx-d5g5zap.png" } - match: { _source.agent: "\"Mozilla/5.0 (Linux; Android 4.2.2; VS980 4G Build/JDQ39B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.135 Mobile Safari/537.36\"" } @@ -101,7 +101,7 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "_id" body: { log: "89.160.20.128 - - [08/Sep/2014:02:54:42 +0000] \"GET /presentations/logstash-scale11x/images/ahhh___rage_face_by_samusmmx-d5g5zap.png HTTP/1.1\" 200 175208 \"http://mobile.rivals.com/board_posts.asp?SID=880&mid=198829575&fid=2208&tid=198829575&Team=&TeamId=&SiteId=\" \"Mozilla/5.0 (Linux; Android 4.2.2; VS980 4G Build/JDQ39B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.135 Mobile Safari/537.36\"" @@ -110,7 +110,7 @@ - do: get: index: test - id: 1 + id: "1" - length: { _source: 7 } - match: { _source.url.original: "/presentations/logstash-scale11x/images/ahhh___rage_face_by_samusmmx-d5g5zap.png" } - match: { _source.user_agent.original: "Mozilla/5.0 (Linux; Android 4.2.2; VS980 4G Build/JDQ39B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.135 Mobile Safari/537.36" } @@ -187,7 +187,7 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "_id" body: { "age" : 33, @@ -227,7 +227,7 @@ - do: get: index: test - id: 1 + id: "1" - length: { _source: 11 } - is_false: _source.friends.0.id - is_false: _source.friends.1.id diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_update_by_query_with_ingest.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_update_by_query_with_ingest.yml index 5ba68cb932a17..255918261a896 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_update_by_query_with_ingest.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_update_by_query_with_ingest.yml @@ -18,7 +18,7 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/40_reindex_with_ingest.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/40_reindex_with_ingest.yml index 61f290f91bc42..3fca85dbe1eeb 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/40_reindex_with_ingest.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/40_reindex_with_ingest.yml @@ -18,7 +18,7 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/50_script_processor_using_painless.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/50_script_processor_using_painless.yml index eaf6b24030a06..6b7114d572ac2 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/50_script_processor_using_painless.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/50_script_processor_using_painless.yml @@ -22,14 +22,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { bytes_in: 1234, bytes_out: 4321 } - do: get: index: test - id: 1 + id: "1" - match: { _source.bytes_in: 1234 } - match: { _source.bytes_out: 4321 } - match: { _source.bytes_total: 55550 } @@ -70,14 +70,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { bytes_in: 1234, bytes_out: 4321 } - do: get: index: test - id: 1 + id: "1" - match: { _source.bytes_in: 1234 } - match: { _source.bytes_out: 4321 } - match: { _source.bytes_total: 5555 } diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml index 0f514f2213492..90c5f5f9a837a 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml @@ -31,7 +31,7 @@ - do: index: index: timetest - id: 1 + id: "1" pipeline: "my_timely_pipeline" body: {} diff --git a/qa/smoke-test-multinode/build.gradle b/qa/smoke-test-multinode/build.gradle index 368e215235512..26fab744c13b6 100644 --- a/qa/smoke-test-multinode/build.gradle +++ b/qa/smoke-test-multinode/build.gradle @@ -6,6 +6,8 @@ * Side Public License, v 1. */ + +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-testclusters' @@ -27,10 +29,8 @@ testClusters.matching { it.name == "integTest" }.configureEach { testClusters.configureEach { setting 'xpack.security.enabled', 'false' - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - systemProperty 'es.random_sampler_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") + requiresFeature 'es.random_sampler_feature_flag_registered', Version.fromString("8.1.0") } tasks.named("integTest").configure { diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 71e735de95476..8a8630c9846c6 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.build' @@ -22,7 +23,7 @@ configurations { // easy and efficient basicRestSpecs { attributes { - attribute(org.gradle.api.internal.artifacts.ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE) + attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE) } } } @@ -35,10 +36,8 @@ artifacts { testClusters.configureEach { module ':modules:mapper-extras' - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - systemProperty 'es.random_sampler_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") + requiresFeature 'es.random_sampler_feature_flag_registered', Version.fromString("8.1.0") } tasks.named("test").configure { enabled = false } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/field_caps.json b/rest-api-spec/src/main/resources/rest-api-spec/api/field_caps.json index f1f5e3992ab4a..934ef3daa44aa 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/field_caps.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/field_caps.json @@ -63,6 +63,14 @@ "type":"boolean", "default":false, "description":"Indicates whether unmapped fields should be included in the response." + }, + "filters": { + "type":"list", + "description":"An optional set of filters: can include +metadata,-metadata,-nested,-multifield,-parent" + }, + "types": { + "type": "list", + "description":"Only return results for fields that have one of the types in the list" } }, "body":{ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_memory_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_memory_stats.json new file mode 100644 index 0000000000000..272f2264292d3 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_memory_stats.json @@ -0,0 +1,45 @@ +{ + "ml.get_memory_stats":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-ml-memory.html", + "description":"Returns information on how ML is using memory." + }, + "stability":"stable", + "visibility":"public", + "headers":{ + "accept": [ "application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_ml/memory/_stats", + "methods":[ + "GET" + ] + }, + { + "path":"/_ml/memory/{node_id}/_stats", + "methods":[ + "GET" + ], + "parts":{ + "node_id":{ + "type":"string", + "description":"Specifies the node or nodes to retrieve stats for." + } + } + } + ] + }, + "params":{ + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + }, + "timeout":{ + "type":"time", + "description":"Explicit operation timeout" + } + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_authenticate.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_authenticate.json new file mode 100644 index 0000000000000..969f5e350705f --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_authenticate.json @@ -0,0 +1,28 @@ +{ + "security.oidc_authenticate":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-oidc-authenticate.html", + "description":"Exchanges an OpenID Connection authentication response message for an Elasticsearch access token and refresh token pair" + }, + "stability":"stable", + "visibility":"public", + "headers":{ + "accept": [ "application/json"], + "content_type": ["application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_security/oidc/authenticate", + "methods":[ + "POST" + ] + } + ] + }, + "body":{ + "description":"The OpenID Connect response to authenticate", + "required":true + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_logout.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_logout.json new file mode 100644 index 0000000000000..83f16e5c4de6c --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_logout.json @@ -0,0 +1,28 @@ +{ + "security.oidc_logout":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-oidc-logout.html", + "description":"Invalidates a refresh token and access token that was generated from the OpenID Connect Authenticate API" + }, + "stability":"stable", + "visibility":"public", + "headers":{ + "accept": [ "application/json"], + "content_type": ["application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_security/oidc/logout", + "methods":[ + "POST" + ] + } + ] + }, + "body":{ + "description":"Access token and refresh token to invalidate", + "required":true + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_prepare_authentication.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_prepare_authentication.json new file mode 100644 index 0000000000000..fed4897037435 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_prepare_authentication.json @@ -0,0 +1,28 @@ +{ + "security.oidc_prepare_authentication":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-oidc-prepare-authentication.html", + "description":"Creates an OAuth 2.0 authentication request as a URL string" + }, + "stability":"stable", + "visibility":"public", + "headers":{ + "accept": [ "application/json"], + "content_type": ["application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_security/oidc/prepare", + "methods":[ + "POST" + ] + } + ] + }, + "body":{ + "description":"The OpenID Connect authentication realm configuration", + "required":true + } + } +} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/80_cas.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/80_cas.yml index 87d3d237d42cb..6cec656e543ed 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/80_cas.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/80_cas.yml @@ -3,7 +3,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - match: { _version: 1} - set: { _seq_no: seqno } @@ -14,7 +14,7 @@ body: - index: _index: test_1 - _id: 1 + _id: "1" if_seq_no: 10000 if_primary_term: $primary_term - foo: bar2 @@ -28,7 +28,7 @@ body: - index: _index: test_1 - _id: 1 + _id: "1" if_seq_no: $seqno if_primary_term: $primary_term - foo: bar2 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.count/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.count/10_basic.yml index 7a6a29032cf74..9cbec3e33e589 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.count/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.count/10_basic.yml @@ -24,7 +24,7 @@ - do: index: index: index1 - id: 1 + id: "1" body: { foo: bar } refresh: true @@ -39,7 +39,7 @@ - do: index: index: index2 - id: 1 + id: "1" body: { foo: bar } refresh: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.recovery/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.recovery/10_basic.yml index c00837e47f81c..a8c0808782272 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.recovery/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.recovery/10_basic.yml @@ -14,7 +14,7 @@ - do: index: index: index1 - id: 1 + id: "1" body: { foo: bar } refresh: true - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/10_basic.yml index a8d40276c9800..22b0eb48c8877 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/10_basic.yml @@ -5,7 +5,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { foo: bar } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/20_query_string.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/20_query_string.yml index 66b0699a184d2..7b109cd104a10 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/20_query_string.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/20_query_string.yml @@ -12,7 +12,7 @@ - do: index: index: test - id: 1 + id: "1" body: { field: foo bar} - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/10_with_id.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/10_with_id.yml index f69e3600a43d3..f114805207787 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/10_with_id.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/10_with_id.yml @@ -4,7 +4,7 @@ - do: create: index: test_1 - id: 1 + id: "1" body: { foo: bar } - match: { _index: test_1 } @@ -14,7 +14,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _index: test_1 } - match: { _id: "1"} @@ -25,5 +25,5 @@ catch: conflict create: index: test_1 - id: 1 + id: "1" body: { foo: bar } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/35_external_version.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/35_external_version.yml index 86d0d4b59e06b..65f81b3247bd1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/35_external_version.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/35_external_version.yml @@ -5,7 +5,7 @@ catch: bad_request create: index: test - id: 1 + id: "1" body: { foo: bar } version_type: external version: 0 @@ -18,7 +18,7 @@ catch: bad_request create: index: test - id: 2 + id: "2" body: { foo: bar } version_type: external version: 5 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/40_routing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/40_routing.yml index 6fb845f4fa869..5731da79a04fd 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/40_routing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/40_routing.yml @@ -18,14 +18,14 @@ - do: create: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar } - do: get: index: test_1 - id: 1 + id: "1" routing: "5" stored_fields: [_routing] @@ -36,5 +36,5 @@ catch: missing get: index: test_1 - id: 1 + id: "1" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/60_refresh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/60_refresh.yml index d9c4ab4602d1c..2659b4650f14f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/60_refresh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/60_refresh.yml @@ -13,7 +13,7 @@ - do: create: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: @@ -21,14 +21,14 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 1 }} + query: { term: { _id: "1" }} - match: { hits.total: 0 } - do: create: index: test_1 - id: 2 + id: "2" refresh: true body: { foo: bar } - is_true: forced_refresh @@ -38,7 +38,7 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 2 }} + query: { term: { _id: "2" }} - match: { hits.total: 1 } @@ -48,7 +48,7 @@ - do: create: index: test_1 - id: 1 + id: "1" refresh: "" body: { foo: bar } - is_true: forced_refresh @@ -58,7 +58,7 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 1 }} + query: { term: { _id: "1" }} - match: { hits.total: 1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/70_nested.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/70_nested.yml index cc9a82cbcbc9e..858ba8840c83f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/70_nested.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/70_nested.yml @@ -18,7 +18,7 @@ setup: - do: create: index: test_1 - id: 1 + id: "1" body: "nested1" : [ { "foo": "bar" }, { "foo": "bar2" } ] - match: { _version: 1} @@ -30,6 +30,6 @@ setup: catch: /The number of nested documents has exceeded the allowed limit of \[2\]. This limit can be set by changing the \[index.mapping.nested_objects.limit\] index level setting\./ create: index: test_1 - id: 1 + id: "1" body: "nested1" : [ { "foo": "bar" }, { "foo": "bar2" }, { "foo": "bar3" } ] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/10_basic.yml index f58f1435046fc..d656aff035916 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/10_basic.yml @@ -6,7 +6,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - match: { _version: 1 } @@ -14,6 +14,6 @@ - do: delete: index: test_1 - id: 1 + id: "1" - match: { _version: 2 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/11_shard_header.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/11_shard_header.yml index fea1779b99d21..4f7493c42f61e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/11_shard_header.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/11_shard_header.yml @@ -18,13 +18,13 @@ - do: index: index: foobar - id: 1 + id: "1" body: { foo: bar } - do: delete: index: foobar - id: 1 + id: "1" - match: { _index: foobar } - match: { _id: "1"} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/12_result.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/12_result.yml index dba565179cded..081477532508c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/12_result.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/12_result.yml @@ -6,13 +6,13 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: delete: index: test_1 - id: 1 + id: "1" - match: { result: deleted } @@ -20,6 +20,6 @@ catch: missing delete: index: test_1 - id: 1 + id: "1" - match: { result: not_found } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/20_cas.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/20_cas.yml index a739e3f53cd44..04f009da788b6 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/20_cas.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/20_cas.yml @@ -6,7 +6,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - match: { _seq_no: 0 } @@ -15,14 +15,14 @@ catch: conflict delete: index: test_1 - id: 1 + id: "1" if_seq_no: 2 if_primary_term: 1 - do: delete: index: test_1 - id: 1 + id: "1" if_seq_no: 0 if_primary_term: 1 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/25_external_version.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/25_external_version.yml index e076dbded6f0c..54e5df1f2ed8f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/25_external_version.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/25_external_version.yml @@ -6,7 +6,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external version: 5 @@ -17,14 +17,14 @@ catch: conflict delete: index: test_1 - id: 1 + id: "1" version_type: external version: 4 - do: delete: index: test_1 - id: 1 + id: "1" version_type: external version: 6 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/26_external_gte_version.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/26_external_gte_version.yml index 03adef4a75fa9..e85c61a436bb0 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/26_external_gte_version.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/26_external_gte_version.yml @@ -6,7 +6,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external_gte version: 5 @@ -17,14 +17,14 @@ catch: conflict delete: index: test_1 - id: 1 + id: "1" version_type: external_gte version: 4 - do: delete: index: test_1 - id: 1 + id: "1" version_type: external_gte version: 6 @@ -33,7 +33,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external_gte version: 6 @@ -43,7 +43,7 @@ - do: delete: index: test_1 - id: 1 + id: "1" version_type: external_gte version: 6 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/30_routing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/30_routing.yml index 2bbafe5e04416..122f29a402646 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/30_routing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/30_routing.yml @@ -12,7 +12,7 @@ - do: index: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar } @@ -20,12 +20,12 @@ catch: missing delete: index: test_1 - id: 1 + id: "1" routing: "4" - do: delete: index: test_1 - id: 1 + id: "1" routing: "5" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/50_refresh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/50_refresh.yml index 3aea111f07a66..6577f9a6cebf0 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/50_refresh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/50_refresh.yml @@ -21,7 +21,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } refresh: true @@ -31,7 +31,7 @@ - do: index: index: test_1 - id: 3 + id: "3" body: { foo: bar } refresh: true - is_true: forced_refresh @@ -48,7 +48,7 @@ - do: delete: index: test_1 - id: 1 + id: "1" - do: search: @@ -62,7 +62,7 @@ - do: delete: index: test_1 - id: 3 + id: "3" refresh: true # If a replica shard where doc 1 is located gets initialized at this point, doc 1 @@ -86,7 +86,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } refresh: true - is_true: forced_refresh @@ -96,13 +96,13 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 1 }} + query: { term: { _id: "1" }} - match: { hits.total: 1 } - do: delete: index: test_1 - id: 1 + id: "1" refresh: "" - do: @@ -110,7 +110,7 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 1 }} + query: { term: { _id: "1" }} - match: { hits.total: 0 } --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/60_missing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/60_missing.yml index ae4e61c075ae8..991bd12bc989f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/60_missing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/60_missing.yml @@ -7,7 +7,7 @@ catch: missing delete: index: test_1 - id: 1 + id: "1" --- "Missing document with ignore": @@ -17,5 +17,5 @@ - do: delete: index: test_1 - id: 1 + id: "1" ignore: 404 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/10_basic.yml index 141e2898bb21b..e1e366b0a4059 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/10_basic.yml @@ -5,14 +5,14 @@ - do: exists: index: test_1 - id: 1 + id: "1" - is_false: '' - do: index: index: test_1 - id: 1 + id: "1" body: { "foo": "bar" } - is_true: '' @@ -20,14 +20,14 @@ - do: exists: index: test_1 - id: 1 + id: "1" - is_true: '' - do: exists: index: test_1 - id: 1 + id: "1" version: 1 - is_true: '' diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/40_routing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/40_routing.yml index 039eeb87d1f0e..e23a71ae42301 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/40_routing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/40_routing.yml @@ -19,14 +19,14 @@ - do: index: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar } - do: exists: index: test_1 - id: 1 + id: "1" routing: "5" - is_true: '' @@ -34,6 +34,6 @@ - do: exists: index: test_1 - id: 1 + id: "1" - is_false: '' diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/60_realtime_refresh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/60_realtime_refresh.yml index 6aebaa78b8a0a..3e60cbbf4968f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/60_realtime_refresh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/60_realtime_refresh.yml @@ -18,13 +18,13 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: exists: index: test_1 - id: 1 + id: "1" realtime: false - is_false: '' @@ -32,7 +32,7 @@ - do: exists: index: test_1 - id: 1 + id: "1" realtime: true - is_true: '' @@ -40,7 +40,7 @@ - do: exists: index: test_1 - id: 1 + id: "1" realtime: false refresh: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/70_defaults.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/70_defaults.yml index a042888d66d1c..771767abfcb95 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/70_defaults.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/70_defaults.yml @@ -5,12 +5,12 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { "foo": "bar" } - do: exists: index: test_1 - id: 1 + id: "1" - is_true: '' diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/explain/20_source_filtering.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/explain/20_source_filtering.yml index 1708034df8d4d..f9e185a6c2a59 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/explain/20_source_filtering.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/explain/20_source_filtering.yml @@ -5,40 +5,40 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 } - do: indices.refresh: index: test_1 - do: - explain: { index: test_1, id: 1, _source: false, body: { query: { match_all: {}} } } + explain: { index: test_1, id: "1", _source: false, body: { query: { match_all: {}} } } - match: { _index: test_1 } - match: { _id: "1" } - is_false: get._source - do: - explain: { index: test_1, id: 1, _source: true, body: { query: { match_all: {}} } } + explain: { index: test_1, id: "1", _source: true, body: { query: { match_all: {}} } } - match: { get._source.include.field1: v1 } - do: - explain: { index: test_1, id: 1, _source: include.field1, body: { query: { match_all: {}} } } + explain: { index: test_1, id: "1", _source: include.field1, body: { query: { match_all: {}} } } - match: { get._source.include.field1: v1 } - is_false: get._source.include.field2 - do: - explain: { index: test_1, id: 1, _source_includes: include.field1, body: { query: { match_all: {}} } } + explain: { index: test_1, id: "1", _source_includes: include.field1, body: { query: { match_all: {}} } } - match: { get._source.include.field1: v1 } - is_false: get._source.include.field2 - do: - explain: { index: test_1, id: 1, _source_includes: "include.field1,include.field2", body: { query: { match_all: {}} } } + explain: { index: test_1, id: "1", _source_includes: "include.field1,include.field2", body: { query: { match_all: {}} } } - match: { get._source.include.field1: v1 } - match: { get._source.include.field2: v2 } - is_false: get._source.count - do: - explain: { index: test_1, id: 1, _source_includes: include, _source_excludes: "*.field2", body: { query: { match_all: {}} } } + explain: { index: test_1, id: "1", _source_includes: include, _source_excludes: "*.field2", body: { query: { match_all: {}} } } - match: { get._source.include.field1: v1 } - is_false: get._source.include.field2 - is_false: get._source.count diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/explain/30_query_string.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/explain/30_query_string.yml index 61321c05548ce..a78735641226c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/explain/30_query_string.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/explain/30_query_string.yml @@ -14,7 +14,7 @@ - do: index: index: test - id: 1 + id: "1" body: { field: foo bar} - do: @@ -24,7 +24,7 @@ - do: explain: index: test - id: 1 + id: "1" q: bar df: field @@ -33,7 +33,7 @@ - do: explain: index: test - id: 1 + id: "1" q: field:foo field:xyz - is_true: matched @@ -41,7 +41,7 @@ - do: explain: index: test - id: 1 + id: "1" q: field:foo field:xyz default_operator: AND @@ -50,7 +50,7 @@ - do: explain: index: test - id: 1 + id: "1" q: field:BA* - is_true: matched @@ -58,7 +58,7 @@ - do: explain: index: test - id: 1 + id: "1" q: number:foo lenient: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/30_filter.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/30_index_filter.yml similarity index 100% rename from rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/30_filter.yml rename to rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/30_index_filter.yml diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/50_fieldtype_filter.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/50_fieldtype_filter.yml new file mode 100644 index 0000000000000..cfed4f68ea5e7 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/50_fieldtype_filter.yml @@ -0,0 +1,213 @@ +--- +setup: + - skip: + version: "- 8.1.99" + reason: Field type filters were added in 8.2 + - do: + indices.create: + index: test1 + body: + mappings: + properties: + text: + type: text + fields: + keyword: + type: keyword + keyword: + type: keyword + number: + type: double + geo: + type: geo_point + misc: + type: text + object: + type: object + properties: + nested1 : + type : text + index: false + nested2: + type: float + doc_values: false + level1: + type: nested + properties: + level2: + type: object + properties: + leaf1: + type: text + index: false + runtime: + misc.keyword: + type: keyword + + - do: + indices.create: + index: test2 + body: + mappings: + properties: + text: + type: text + keyword: + type: keyword + number: + type: double + date: + type: date + geo: + type: geo_point + object: + type: object + properties: + nested1 : + type : text + index: true + nested2: + type: float + doc_values: true + level1: + type: nested + properties: + level2: + type: object + properties: + leaf1: + type: text + index: false + - do: + indices.create: + index: test3 + body: + mappings: + properties: + text: + type: text + keyword: + type: keyword + number: + type: long + date: + type: date + non_indexed_date: + type: date + index: false + non_indexed_keyword: + type: keyword + index: false + non_indexed_boolean: + type: boolean + index: false + non_indexed_ip: + type: ip + index: false + non_indexed_geo_point: + type: geo_point + index: false + geo: + type: keyword + object: + type: nested + properties: + nested1 : + type : long + index: false + nested2: + type: keyword + doc_values: false +--- +"No filters includes all the following fields": + - do: + field_caps: + index: 'test1,test2,test3' + fields: '*' + + - is_true: fields.object + - is_true: fields.text + - is_true: fields.text\\.keyword + - is_true: fields._seq_no + - is_true: fields.level1\\.level2\\.leaf1 + - is_true: fields.level1 + +--- +"Exclude parent objects": + - do: + field_caps: + index: 'test1,test2,test3' + fields: '*' + filters: '-parent' + + - is_true: fields.object\\.nested1 + - is_false: fields.object + +--- +"Exclude metadata fields": + - do: + field_caps: + index: 'test1,test2,test3' + fields: '*' + filters: '-metadata' + + - is_false: fields._seq_no + +--- +"Exclude non-metadata fields": + - do: + field_caps: + index: 'test1,test2,test3' + fields: '*' + filters: '+metadata' + + - is_true: fields._seq_no + - is_false: fields.text + +--- +"Exclude nested fields": + - do: + field_caps: + index: 'test1,test2,test3' + fields: '*' + filters: '-nested' + + - is_false: fields.level1 + - is_false: fields.level1\\.level2\\.leaf1 + +--- +"Exclude multifields": + - do: + field_caps: + index: 'test1,test2,test3' + fields: '*' + filters: '-multifield' + + - is_false: fields.text\\.keyword + - is_true: fields.misc\\.keyword + +--- +"Field type filters": + - do: + field_caps: + index: 'test1,test2,test3' + fields: '*' + types: 'text,keyword,long' + + - is_false: fields.date + - is_false: fields.non_indexed_boolean + - is_true: fields.non_indexed_keyword + - is_true: fields.misc + +--- +"Field type filters with field name restrictions": + - do: + field_caps: + index: 'test1,test2,test3' + fields: 'non_*,text' + types: 'text,keyword,long' + + - is_false: fields.non_indexed_boolean + - is_true: fields.non_indexed_keyword + - is_false: fields.misc + - is_true: fields.text diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/15_default_values.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/15_default_values.yml index 2717a365ff328..5908fa69478fe 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/15_default_values.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/15_default_values.yml @@ -5,13 +5,13 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { "foo": "bar" } - do: get: index: test_1 - id: 1 + id: "1" - match: { _index: test_1 } - match: { _id: '1' } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/20_stored_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/20_stored_fields.yml index 69383a0b7b2b4..e67a167916eb4 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/20_stored_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/20_stored_fields.yml @@ -17,12 +17,12 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { "foo": "bar", "count": 1 } - do: get: index: test_1 - id: 1 + id: "1" stored_fields: foo - match: { _index: test_1 } @@ -33,7 +33,7 @@ - do: get: index: test_1 - id: 1 + id: "1" stored_fields: [foo, count] - match: { fields.foo: [bar] } @@ -43,7 +43,7 @@ - do: get: index: test_1 - id: 1 + id: "1" stored_fields: [foo, count, _source] - match: { fields.foo: [bar] } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/40_routing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/40_routing.yml index 78102c4411ca8..defd738b9c205 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/40_routing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/40_routing.yml @@ -20,14 +20,14 @@ - do: index: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar } - do: get: index: test_1 - id: 1 + id: "1" routing: "5" stored_fields: [_routing] @@ -38,5 +38,5 @@ catch: missing get: index: test_1 - id: 1 + id: "1" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/50_with_headers.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/50_with_headers.yml index 9e143de5d1b63..c23119e895a2d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/50_with_headers.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/50_with_headers.yml @@ -5,7 +5,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { "body": "foo" } - do: @@ -13,7 +13,7 @@ Accept: application/yaml get: index: test_1 - id: 1 + id: "1" - match: {_index: "test_1"} - match: {_id: "1"} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/60_realtime_refresh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/60_realtime_refresh.yml index 7f35bcae063df..3b56471134b93 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/60_realtime_refresh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/60_realtime_refresh.yml @@ -19,20 +19,20 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: catch: missing get: index: test_1 - id: 1 + id: "1" realtime: false - do: get: index: test_1 - id: 1 + id: "1" realtime: true - is_true: found @@ -40,7 +40,7 @@ - do: get: index: test_1 - id: 1 + id: "1" realtime: false refresh: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/70_source_filtering.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/70_source_filtering.yml index b154f020c5b60..60895d24a7061 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/70_source_filtering.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/70_source_filtering.yml @@ -15,37 +15,37 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 } - do: - get: { index: test_1, id: 1, _source: false } + get: { index: test_1, id: "1", _source: false } - match: { _index: test_1 } - match: { _id: "1" } - is_false: _source - do: - get: { index: test_1, id: 1, _source: true } + get: { index: test_1, id: "1", _source: true } - match: { _source.include.field1: v1 } - do: - get: { index: test_1, id: 1, _source: include.field1 } + get: { index: test_1, id: "1", _source: include.field1 } - match: { _source.include.field1: v1 } - is_false: _source.include.field2 - do: - get: { index: test_1, id: 1, _source_includes: include.field1 } + get: { index: test_1, id: "1", _source_includes: include.field1 } - match: { _source.include.field1: v1 } - is_false: _source.include.field2 - do: - get: { index: test_1, id: 1, _source_includes: "include.field1,include.field2" } + get: { index: test_1, id: "1", _source_includes: "include.field1,include.field2" } - match: { _source.include.field1: v1 } - match: { _source.include.field2: v2 } - is_false: _source.count - do: - get: { index: test_1, id: 1, _source_includes: include, _source_excludes: "*.field2" } + get: { index: test_1, id: "1", _source_includes: include, _source_excludes: "*.field2" } - match: { _source.include.field1: v1 } - is_false: _source.include.field2 - is_false: _source.count @@ -54,7 +54,7 @@ - do: get: index: test_1 - id: 1 + id: "1" stored_fields: count _source: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/80_missing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/80_missing.yml index 1e60246f97941..e2a2413027ec9 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/80_missing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/80_missing.yml @@ -5,7 +5,7 @@ catch: missing get: index: test_1 - id: 1 + id: "1" --- "Missing document with ignore": @@ -13,5 +13,5 @@ - do: get: index: test_1 - id: 1 + id: "1" ignore: 404 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/90_versions.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/90_versions.yml index cafe6f86193f3..45d6bfe69cbce 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/90_versions.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/90_versions.yml @@ -6,21 +6,21 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - match: { _version: 1} - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - match: { _version: 2} - do: get: index: test_1 - id: 1 + id: "1" version: 2 - match: { _id: "1" } @@ -28,13 +28,13 @@ catch: conflict get: index: test_1 - id: 1 + id: "1" version: 1 - do: get: index: test_1 - id: 1 + id: "1" version: 2 version_type: external - match: { _id: "1" } @@ -43,7 +43,7 @@ catch: conflict get: index: test_1 - id: 1 + id: "1" version: 10 version_type: external @@ -51,14 +51,14 @@ catch: conflict get: index: test_1 - id: 1 + id: "1" version: 1 version_type: external - do: get: index: test_1 - id: 1 + id: "1" version: 2 version_type: external_gte - match: { _id: "1" } @@ -67,7 +67,7 @@ catch: conflict get: index: test_1 - id: 1 + id: "1" version: 10 version_type: external_gte @@ -75,7 +75,7 @@ catch: conflict get: index: test_1 - id: 1 + id: "1" version: 1 version_type: external_gte diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/10_basic.yml index 7318602bb66d7..2969f2eb65e85 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/10_basic.yml @@ -6,19 +6,19 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { "foo": "bar" } - do: get_source: index: test_1 - id: 1 + id: "1" - match: { '': { foo: bar } } - do: get_source: index: test_1 - id: 1 + id: "1" - match: { '': { foo: bar } } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/15_default_values.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/15_default_values.yml index 0e53f92ce4eaa..8a1e453acb721 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/15_default_values.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/15_default_values.yml @@ -7,12 +7,12 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { "foo": "bar" } - do: get_source: index: test_1 - id: 1 + id: "1" - match: { '': { foo: bar } } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/40_routing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/40_routing.yml index 74752f1d95f56..6046ac2abbe37 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/40_routing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/40_routing.yml @@ -21,14 +21,14 @@ - do: index: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar } - do: get_source: index: test_1 - id: 1 + id: "1" routing: "5" - match: { '': {foo: bar}} @@ -37,4 +37,4 @@ catch: missing get_source: index: test_1 - id: 1 + id: "1" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/60_realtime_refresh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/60_realtime_refresh.yml index 7891e7f84d92f..8820a8921ceec 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/60_realtime_refresh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/60_realtime_refresh.yml @@ -18,20 +18,20 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: catch: missing get_source: index: test_1 - id: 1 + id: "1" realtime: false - do: get_source: index: test_1 - id: 1 + id: "1" realtime: true - match: { '': {foo: bar}} @@ -39,7 +39,7 @@ - do: get_source: index: test_1 - id: 1 + id: "1" realtime: false refresh: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/70_source_filtering.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/70_source_filtering.yml index 6570524630e5a..869b15dd1ab96 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/70_source_filtering.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/70_source_filtering.yml @@ -7,22 +7,22 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 } - do: - get_source: { index: test_1, id: 1, _source_includes: include.field1 } + get_source: { index: test_1, id: "1", _source_includes: include.field1 } - match: { include.field1: v1 } - is_false: include.field2 - do: - get_source: { index: test_1, id: 1, _source_includes: "include.field1,include.field2" } + get_source: { index: test_1, id: "1", _source_includes: "include.field1,include.field2" } - match: { include.field1: v1 } - match: { include.field2: v2 } - is_false: count - do: - get_source: { index: test_1, id: 1, _source_includes: include, _source_excludes: "*.field2" } + get_source: { index: test_1, id: "1", _source_includes: include, _source_excludes: "*.field2" } - match: { include.field1: v1 } - is_false: include.field2 - is_false: count diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/80_missing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/80_missing.yml index d7d2975790606..661bdadd2b464 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/80_missing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/80_missing.yml @@ -8,7 +8,7 @@ catch: missing get_source: index: test_1 - id: 1 + id: "1" --- "Missing document with ignore": @@ -19,5 +19,5 @@ - do: get_source: index: test_1 - id: 1 + id: "1" ignore: 404 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/85_source_missing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/85_source_missing.yml index 2ec0585b0f7bc..dfbc4e0ea1fe7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/85_source_missing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/85_source_missing.yml @@ -13,7 +13,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } @@ -24,7 +24,7 @@ setup: catch: missing get_source: index: test_1 - id: 1 + id: "1" --- "Missing document source with ignore": @@ -32,5 +32,5 @@ setup: - do: get_source: index: test_1 - id: 1 + id: "1" ignore: 404 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml index bb1b6db2f2d4c..53499ffd8b201 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml @@ -8,13 +8,12 @@ _internal.health: {} - is_true: cluster_name - - match: { status: "GREEN" } + - match: { status: "green" } - match: { impacts: [] } - - match: { components.cluster_coordination.status: "GREEN" } - - match: { components.cluster_coordination.indicators.instance_has_master.status: "GREEN" } + - match: { components.cluster_coordination.status: "green" } + - match: { components.cluster_coordination.indicators.instance_has_master.status: "green" } - match: { components.cluster_coordination.indicators.instance_has_master.summary: "Health coordinating instance has a master node." } - is_true: components.cluster_coordination.indicators.instance_has_master.details.coordinating_node.node_id - is_true: components.cluster_coordination.indicators.instance_has_master.details.coordinating_node.name - is_true: components.cluster_coordination.indicators.instance_has_master.details.master_node.node_id - is_true: components.cluster_coordination.indicators.instance_has_master.details.master_node.name - - match: { components.snapshots.status: "GREEN" } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/10_with_id.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/10_with_id.yml index 06d9eda9b2732..e48bd63079f17 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/10_with_id.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/10_with_id.yml @@ -6,7 +6,7 @@ - do: index: index: test-weird-index-中文 - id: 1 + id: "1" body: { foo: bar } - match: { _index: test-weird-index-中文 } @@ -16,7 +16,7 @@ - do: get: index: test-weird-index-中文 - id: 1 + id: "1" - match: { _index: test-weird-index-中文 } - match: { _id: "1"} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/12_result.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/12_result.yml index 626ff1f443d64..cce77e794b4ce 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/12_result.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/12_result.yml @@ -5,7 +5,7 @@ - do: index: index: test_index - id: 1 + id: "1" body: { foo: bar } - match: { result: created } @@ -13,7 +13,7 @@ - do: index: index: test_index - id: 1 + id: "1" body: { foo: bar } op_type: index diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/20_optype.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/20_optype.yml index aea2af3860365..4c2865458a7ed 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/20_optype.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/20_optype.yml @@ -6,7 +6,7 @@ - do: index: index: test_1 - id: 1 + id: "1" op_type: create body: { foo: bar } @@ -14,14 +14,14 @@ catch: conflict index: index: test_1 - id: 1 + id: "1" op_type: create body: { foo: bar } - do: index: index: test_1 - id: 1 + id: "1" op_type: index body: { foo: bar } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/30_cas.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/30_cas.yml index 27534131782a5..41f1dfd369b51 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/30_cas.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/30_cas.yml @@ -3,7 +3,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - match: { _version: 1} - set: { _seq_no: seqno } @@ -12,7 +12,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _seq_no: $seqno } - match: { _primary_term: $primary_term } @@ -20,7 +20,7 @@ catch: conflict index: index: test_1 - id: 1 + id: "1" if_seq_no: 10000 if_primary_term: $primary_term body: { foo: bar2 } @@ -29,7 +29,7 @@ catch: conflict index: index: test_1 - id: 1 + id: "1" if_seq_no: $seqno if_primary_term: 1000 body: { foo: bar2 } @@ -37,7 +37,7 @@ - do: index: index: test_1 - id: 1 + id: "1" if_seq_no: $seqno if_primary_term: $primary_term body: { foo: bar2 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/35_external_version.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/35_external_version.yml index 857c9d3c39c92..3f00bd449dd51 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/35_external_version.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/35_external_version.yml @@ -6,7 +6,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external version: 0 @@ -16,7 +16,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external version: 5 @@ -27,7 +27,7 @@ catch: conflict index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external version: 5 @@ -36,7 +36,7 @@ catch: conflict index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external version: 0 @@ -44,7 +44,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external version: 6 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/36_external_gte_version.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/36_external_gte_version.yml index 30a8cf453a7d1..eed80fea3b884 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/36_external_gte_version.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/36_external_gte_version.yml @@ -6,7 +6,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external_gte version: 0 @@ -16,7 +16,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external_gte version: 5 @@ -27,7 +27,7 @@ catch: conflict index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external_gte version: 0 @@ -35,7 +35,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar2 } version_type: external_gte version: 5 @@ -45,7 +45,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar2 } version_type: external_gte version: 6 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/40_routing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/40_routing.yml index dcf2224e5807d..3229701365a57 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/40_routing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/40_routing.yml @@ -19,14 +19,14 @@ - do: index: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar } - do: get: index: test_1 - id: 1 + id: "1" routing: "5" stored_fields: [_routing] @@ -37,5 +37,5 @@ catch: missing get: index: test_1 - id: 1 + id: "1" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/60_refresh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/60_refresh.yml index 8a76930ac266c..290e7910dd6de 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/60_refresh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/60_refresh.yml @@ -14,7 +14,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: @@ -22,14 +22,14 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 1 }} + query: { term: { _id: "1" }} - match: { hits.total: 0 } - do: index: index: test_1 - id: 2 + id: "2" refresh: true body: { foo: bar } - is_true: forced_refresh @@ -39,7 +39,7 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 2 }} + query: { term: { _id: "2" }} - match: { hits.total: 1 } @@ -51,7 +51,7 @@ - do: index: index: test_1 - id: 1 + id: "1" refresh: "" body: { foo: bar } - is_true: forced_refresh @@ -61,7 +61,7 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 1 }} + query: { term: { _id: "1" }} - match: { hits.total: 1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.flush/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.flush/10_basic.yml index 89b8236225c0a..6df2b6acf4e55 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.flush/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.flush/10_basic.yml @@ -18,7 +18,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "message": "a long message to make a periodic flush happen after this index operation" } - do: indices.stats: { index: test } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/10_index.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/10_index.yml index a00282e586f49..db23c4fad375f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/10_index.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/10_index.yml @@ -18,13 +18,13 @@ setup: - do: index: index: test1 - id: 1 + id: "1" body: { "foo": "bar" } - do: index: index: test2 - id: 1 + id: "1" body: { "foo": "baz" } --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/11_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/11_metric.yml index a8e8a3d0b0db5..ba8c61a4aa607 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/11_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/11_metric.yml @@ -4,13 +4,13 @@ setup: - do: index: index: test1 - id: 1 + id: "1" body: { "foo": "bar" } - do: index: index: test2 - id: 1 + id: "1" body: { "foo": "baz" } --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/12_level.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/12_level.yml index e9bd219a3e0ab..268c5d40a012b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/12_level.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/12_level.yml @@ -4,13 +4,13 @@ setup: - do: index: index: test1 - id: 1 + id: "1" body: { "foo": "bar" } - do: index: index: test2 - id: 1 + id: "1" body: { "foo": "baz" } --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/13_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/13_fields.yml index 42a11e467ccb3..f21103f8c70bf 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/13_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/13_fields.yml @@ -34,13 +34,13 @@ setup: - do: index: index: test1 - id: 1 + id: "1" body: { "bar": "bar", "baz": "baz" } - do: index: index: test1 - id: 2 + id: "2" body: { "bar": "foo", "baz": "foo" } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/14_groups.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/14_groups.yml index daf55b38919b2..8905bcee2465a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/14_groups.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/14_groups.yml @@ -4,7 +4,7 @@ setup: - do: index: index: test1 - id: 1 + id: "1" body: { "bar": "bar", "baz": "baz" } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/20_translog.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/20_translog.yml index 92506334a1f7b..49f57ac5df03f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/20_translog.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/20_translog.yml @@ -25,7 +25,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "foo": "bar" } - do: @@ -77,7 +77,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "foo": "bar" } - do: @@ -101,7 +101,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "foo": "bar" } - do: indices.stats: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/30_segments.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/30_segments.yml index 1a105bc00e51b..72fab59cc6cd7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/30_segments.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/30_segments.yml @@ -27,7 +27,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "foo": "bar" } - do: @@ -73,7 +73,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "foo": "bar" } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/40_updates_on_refresh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/40_updates_on_refresh.yml index 73c58211c189e..0ee1bb78a95b4 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/40_updates_on_refresh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/40_updates_on_refresh.yml @@ -29,13 +29,13 @@ setup: - do: index: index: test1 - id: 1 + id: "1" body: { "bar": "bar" } - do: index: index: test1 - id: 2 + id: "2" body: { "bar": "foo" } - do: @@ -54,7 +54,7 @@ setup: - do: index: index: test1 - id: 3 + id: "3" body: { "bar": "foo", "baz": "foo" } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/10_basic.yml index dc14cac06227f..ea5b43435ad2c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/10_basic.yml @@ -8,7 +8,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: @@ -18,9 +18,9 @@ mget: body: docs: - - { _index: test_2, _id: 1} - - { _index: test_1, _id: 2} - - { _index: test_1, _id: 1} + - { _index: test_2, _id: "1"} + - { _index: test_1, _id: "2"} + - { _index: test_1, _id: "1"} - is_false: docs.0.found - match: { docs.0._index: test_2 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/12_non_existent_index.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/12_non_existent_index.yml index 49d75fbc739e2..5ac47ba38a466 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/12_non_existent_index.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/12_non_existent_index.yml @@ -5,14 +5,14 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: mget: body: docs: - - { _index: test_2, _id: 1} + - { _index: test_2, _id: "1"} - is_false: docs.0.found - match: { docs.0._index: test_2 } @@ -22,7 +22,7 @@ mget: body: docs: - - { _index: test_1, _id: 1} + - { _index: test_1, _id: "1"} - is_true: docs.0.found - match: { docs.0._index: test_1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/13_missing_metadata.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/13_missing_metadata.yml index 7ef1f11df1cae..dc6b5c639c2d2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/13_missing_metadata.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/13_missing_metadata.yml @@ -5,7 +5,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: @@ -20,7 +20,7 @@ mget: body: docs: - - { _id: 1 } + - { _id: "1" } - do: catch: /action_request_validation_exception.+ no documents to get/ @@ -37,7 +37,7 @@ mget: body: docs: - - { _index: test_1, _id: 1} + - { _index: test_1, _id: "1"} - is_true: docs.0.found - match: { docs.0._index: test_1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml index 825dc256d786a..8e0bd87905e0a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml @@ -29,8 +29,8 @@ mget: body: docs: - - { _index: test_1, _id: 1} - - { _index: test_two_and_three, _id: 2} + - { _index: test_1, _id: "1"} + - { _index: test_two_and_three, _id: "2"} - is_true: docs.0.found - match: { docs.0._index: test_1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/15_ids.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/15_ids.yml index cf8a79223dfba..e8e1bc3c64e19 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/15_ids.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/15_ids.yml @@ -9,13 +9,13 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: index: index: test_1 - id: 2 + id: "2" body: { foo: baz } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/17_default_index.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/17_default_index.yml index 8ff660cf4ce33..4af1a9a9c4c51 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/17_default_index.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/17_default_index.yml @@ -8,7 +8,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: @@ -16,9 +16,9 @@ index: test_1 body: docs: - - { _index: test_2, _id: 1} - - { _id: 2} - - { _id: 1} + - { _index: test_2, _id: "1"} + - { _id: "2"} + - { _id: "1"} - is_false: docs.0.found - match: { docs.0._index: test_2 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/20_stored_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/20_stored_fields.yml index 01548972f9604..b93ed5b731755 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/20_stored_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/20_stored_fields.yml @@ -18,7 +18,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: @@ -26,10 +26,10 @@ index: test_1 body: docs: - - { _id: 1 } - - { _id: 1, stored_fields: foo } - - { _id: 1, stored_fields: [foo] } - - { _id: 1, stored_fields: [foo, _source] } + - { _id: "1" } + - { _id: "1", stored_fields: foo } + - { _id: "1", stored_fields: [foo] } + - { _id: "1", stored_fields: [foo, _source] } - is_false: docs.0.fields - match: { docs.0._source: { foo: bar }} @@ -49,10 +49,10 @@ stored_fields: foo body: docs: - - { _id: 1 } - - { _id: 1, stored_fields: foo } - - { _id: 1, stored_fields: [foo] } - - { _id: 1, stored_fields: [foo, _source] } + - { _id: "1" } + - { _id: "1", stored_fields: foo } + - { _id: "1", stored_fields: [foo] } + - { _id: "1", stored_fields: [foo, _source] } - match: { docs.0.fields.foo: [bar] } - is_false: docs.0._source @@ -72,10 +72,10 @@ stored_fields: [foo] body: docs: - - { _id: 1 } - - { _id: 1, stored_fields: foo } - - { _id: 1, stored_fields: [foo] } - - { _id: 1, stored_fields: [foo, _source] } + - { _id: "1" } + - { _id: "1", stored_fields: foo } + - { _id: "1", stored_fields: [foo] } + - { _id: "1", stored_fields: [foo, _source] } - match: { docs.0.fields.foo: [bar] } - is_false: docs.0._source @@ -95,10 +95,10 @@ stored_fields: [foo, _source] body: docs: - - { _id: 1 } - - { _id: 1, stored_fields: foo } - - { _id: 1, stored_fields: [foo] } - - { _id: 1, stored_fields: [foo, _source] } + - { _id: "1" } + - { _id: "1", stored_fields: foo } + - { _id: "1", stored_fields: [foo] } + - { _id: "1", stored_fields: [foo, _source] } - match: { docs.0.fields.foo: [bar] } - match: { docs.0._source: { foo: bar }} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/40_routing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/40_routing.yml index 45f107d0ef1df..7169c0ec25001 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/40_routing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/40_routing.yml @@ -17,7 +17,7 @@ routing: - do: index: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar } @@ -27,9 +27,9 @@ routing: stored_fields: [_routing] body: docs: - - { _id: 1 } - - { _id: 1, routing: "4" } - - { _id: 1, routing: "5" } + - { _id: "1" } + - { _id: "1", routing: "4" } + - { _id: "1", routing: "5" } - is_false: docs.0.found - is_false: docs.1.found @@ -59,7 +59,7 @@ requires routing: - do: index: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar } @@ -73,9 +73,9 @@ requires routing: stored_fields: [_routing] body: docs: - - { _id: 1, _index: test_1 } - - { _id: 1, _index: alias } - - { _id: 1, _index: test_1, routing: "5" } + - { _id: "1", _index: test_1 } + - { _id: "1", _index: alias } + - { _id: "1", _index: test_1, routing: "5" } - is_false: docs.0.found - match: { docs.0.error.reason: "routing is required for [test_1]/[1]" } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/60_realtime_refresh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/60_realtime_refresh.yml index 8ae390943c6b5..2f2036217d8dc 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/60_realtime_refresh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/60_realtime_refresh.yml @@ -18,7 +18,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/70_source_filtering.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/70_source_filtering.yml index a70151fd2e756..b9c720436a62a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/70_source_filtering.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/70_source_filtering.yml @@ -4,12 +4,12 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 } - do: index: index: test_1 - id: 2 + id: "2" body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 } --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/80_deprecated.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/80_deprecated.yml index b6b7a84100dd7..f57d9500375f1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/80_deprecated.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/80_deprecated.yml @@ -7,13 +7,13 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: index: index: test_1 - id: 2 + id: "2" body: { foo: baz } - do: @@ -21,13 +21,13 @@ mget: body: docs: - - { _index: test_1, _id: 1, _routing : test1 } - - { _index: test_1, _id: 2, _routing : test1 } + - { _index: test_1, _id: "1", _routing : test1 } + - { _index: test_1, _id: "2", _routing : test1 } - do: catch: bad_request mget: body: docs: - - { _index: test_1, _id: 1, _version : 1 } - - { _index: test_1, _id: 2, _version : 1 } + - { _index: test_1, _id: "1", _version : 1 } + - { _index: test_1, _id: "2", _version : 1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/10_basic.yml index 243d953811336..47679b7f1058a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/10_basic.yml @@ -17,7 +17,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar, title: howdy } - do: @@ -36,7 +36,7 @@ more_like_this: like: - - _id: 1 + _id: "1" fields: ["title"] - match: {hits.total: 0} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/20_docs.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/20_docs.yml index 50eb344d99048..188e817ce7592 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/20_docs.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/20_docs.yml @@ -9,19 +9,19 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: index: index: test_1 - id: 2 + id: "2" body: { foo: baz } - do: index: index: test_1 - id: 3 + id: "3" body: { foo: foo } - do: @@ -45,9 +45,9 @@ foo: bar - _index: test_1 - _id: 2 + _id: "2" - - _id: 3 + _id: "3" include: true min_doc_freq: 0 min_term_freq: 0 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/30_unlike.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/30_unlike.yml index a0f96eb6b2d1f..c913268d807dd 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/30_unlike.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/30_unlike.yml @@ -9,19 +9,19 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar baz selected } - do: index: index: test_1 - id: 2 + id: "2" body: { foo: bar } - do: index: index: test_1 - id: 3 + id: "3" body: { foo: bar baz } - do: @@ -40,10 +40,10 @@ more_like_this: like: _index: test_1 - _id: 1 + _id: "1" unlike: _index: test_1 - _id: 3 + _id: "3" include: true min_doc_freq: 0 min_term_freq: 0 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/msearch/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/msearch/10_basic.yml index 9f6f2e70ae46d..1052508ca2b88 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/msearch/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/msearch/10_basic.yml @@ -4,25 +4,25 @@ setup: - do: index: index: index_1 - id: 1 + id: "1" body: { foo: bar } - do: index: index: index_1 - id: 2 + id: "2" body: { foo: baz } - do: index: index: index_1 - id: 3 + id: "3" body: { foo: foo } - do: index: index: index_2 - id: 1 + id: "1" body: { foo: foo } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mtermvectors/30_routing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mtermvectors/30_routing.yml index 36374cfa2daac..a9e1893ea764c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mtermvectors/30_routing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mtermvectors/30_routing.yml @@ -16,7 +16,7 @@ routing: - do: index: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar baz } @@ -26,9 +26,9 @@ routing: fields: foo body: docs: - - { _id: 1 } - - { _id: 1, routing: "4" } - - { _id: 1, routing: "5" } + - { _id: "1" } + - { _id: "1", routing: "4" } + - { _id: "1", routing: "5" } - is_false: docs.0.found - is_false: docs.1.found @@ -60,7 +60,7 @@ requires routing: - do: index: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar baz } @@ -74,9 +74,9 @@ requires routing: fields: foo body: docs: - - { _id: 1, _index: test_1 } - - { _id: 1, _index: alias } - - { _id: 1, _index: test_1, routing: "5" } + - { _id: "1", _index: test_1 } + - { _id: "1", _index: alias } + - { _id: "1", _index: test_1, routing: "5" } - is_false: docs.0.found - match: { docs.0.error.reason: "routing is required for [test_1]/[1]" } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/10_basic.yml index 20dd6fc614694..6dc62b24a39df 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/10_basic.yml @@ -26,25 +26,25 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "integer_range" : { "gte": 1, "lte": 5 } } - do: index: index: test - id: 2 + id: "2" body: { "integer_range" : { "gte": 1, "lte": 3 } } - do: index: index: test - id: 3 + id: "3" body: { "integer_range" : { "gte": 4, "lte": 5 } } - do: index: index: test - id: 4 + id: "4" body: { "integer_range" : null } - do: @@ -98,19 +98,19 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "long_range" : { "gte": 1, "lte": 5 } } - do: index: index: test - id: 2 + id: "2" body: { "long_range" : { "gte": 1, "lte": 3 } } - do: index: index: test - id: 3 + id: "3" body: { "long_range" : { "gte": 4, "lte": 5 } } @@ -158,19 +158,19 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "float_range" : { "gte": 1, "lte": 5 } } - do: index: index: test - id: 2 + id: "2" body: { "float_range" : { "gte": 1, "lte": 3 } } - do: index: index: test - id: 3 + id: "3" body: { "float_range" : { "gte": 4, "lte": 5 } } @@ -218,19 +218,19 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "double_range" : { "gte": 1, "lte": 5 } } - do: index: index: test - id: 2 + id: "2" body: { "double_range" : { "gte": 1, "lte": 3 } } - do: index: index: test - id: 3 + id: "3" body: { "double_range" : { "gte": 4, "lte": 5 } } @@ -278,19 +278,19 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "ip_range" : { "gte": "192.168.0.1", "lte": "192.168.0.5" } } - do: index: index: test - id: 2 + id: "2" body: { "ip_range" : { "gte": "192.168.0.1", "lte": "192.168.0.3" } } - do: index: index: test - id: 3 + id: "3" body: { "ip_range" : { "gte": "192.168.0.4", "lte": "192.168.0.5" } } @@ -338,19 +338,19 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "date_range" : { "gte": "2017-09-01", "lte": "2017-09-05" } } - do: index: index: test - id: 2 + id: "2" body: { "date_range" : { "gte": "2017-09-01", "lte": "2017-09-03" } } - do: index: index: test - id: 3 + id: "3" body: { "date_range" : { "gte": "2017-09-04", "lte": "2017-09-05" } } @@ -401,19 +401,19 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "date_range" : { "gte": "2019-12-14T12:00:00.000Z", "lte": "2019-12-14T13:00:00.000Z" } } - do: index: index: test - id: 2 + id: "2" body: { "date_range" : { "gte": "2019-12-15T12:00:00.000Z", "lte": "2019-12-15T13:00:00.000Z" } } - do: index: index: test - id: 3 + id: "3" body: { "date_range" : { "gte": "2019-12-16T12:00:00.000Z", "lte": "2019-12-16T13:00:00.000Z" } } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/10_basic.yml index 957d1dc20fbb7..d0ab5783d96a6 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/10_basic.yml @@ -6,13 +6,13 @@ - do: index: index: test_scroll - id: 42 + id: "42" body: { foo: 1 } - do: index: index: test_scroll - id: 43 + id: "43" body: { foo: 2 } - do: @@ -37,7 +37,7 @@ - do: index: index: test_scroll - id: 44 + id: "44" body: { foo: 3 } - do: @@ -78,13 +78,13 @@ - do: index: index: test_scroll - id: 42 + id: "42" body: { foo: 1 } - do: index: index: test_scroll - id: 43 + id: "43" body: { foo: 2 } - do: @@ -109,7 +109,7 @@ - do: index: index: test_scroll - id: 44 + id: "44" body: { foo: 3 } - do: @@ -145,13 +145,13 @@ - do: index: index: test_scroll - id: 42 + id: "42" body: { foo: 1 } - do: index: index: test_scroll - id: 43 + id: "43" body: { foo: 2 } - do: @@ -176,7 +176,7 @@ - do: index: index: test_scroll - id: 44 + id: "44" body: { foo: 3 } - do: @@ -240,13 +240,13 @@ - do: index: index: test_scroll - id: 42 + id: "42" body: { foo: 1 } - do: index: index: test_scroll - id: 43 + id: "43" body: { foo: 2 } - do: @@ -285,13 +285,13 @@ - do: index: index: test_scroll - id: 42 + id: "42" body: { foo: 1 } - do: index: index: test_scroll - id: 43 + id: "43" body: { foo: 2 } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/11_clear.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/11_clear.yml index 97a13dd0c2c5f..dbc3cdc62171f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/11_clear.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/11_clear.yml @@ -6,7 +6,7 @@ - do: index: index: test_scroll - id: 42 + id: "42" body: { foo: bar } - do: @@ -46,7 +46,7 @@ - do: index: index: test_scroll - id: 42 + id: "42" body: { foo: bar } - do: @@ -87,7 +87,7 @@ - do: index: index: test_scroll - id: 42 + id: "42" body: { foo: bar } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/12_slices.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/12_slices.yml index e7b1086499b5a..6cc590a36d6c1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/12_slices.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/12_slices.yml @@ -11,25 +11,25 @@ setup: - do: index: index: test_sliced_scroll - id: 1 + id: "1" body: { foo: 1 } - do: index: index: test_sliced_scroll - id: 2 + id: "2" body: { foo: 2 } - do: index: index: test_sliced_scroll - id: 3 + id: "3" body: { foo: 3 } - do: index: index: test_sliced_scroll - id: 4 + id: "4" body: { foo: 4 } - do: @@ -45,7 +45,7 @@ setup: sort: foo body: slice: - id: 0 + id: "0" max: 2 query: match_all: {} @@ -78,7 +78,7 @@ setup: sort: foo body: slice: - id: 1 + id: "1" max: 2 query: match_all: {} @@ -113,7 +113,7 @@ setup: scroll: 1m body: slice: - id: 0 + id: "0" max: 1025 query: match_all: {} @@ -132,7 +132,7 @@ setup: scroll: 1m body: slice: - id: 0 + id: "0" max: 1025 query: match_all: {} @@ -154,7 +154,7 @@ setup: body: slice: field: foo - id: 0 + id: "0" max: 2 query: match_all: {} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/20_keep_alive.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/20_keep_alive.yml index 40c91128d1c76..16d934041c5c0 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/20_keep_alive.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/20_keep_alive.yml @@ -14,13 +14,13 @@ - do: index: index: test_scroll - id: 1 + id: "1" body: { foo: 1 } - do: index: index: test_scroll - id: 2 + id: "2" body: { foo: 1 } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/100_avg_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/100_avg_metric.yml index 0e57bb9abd667..6656fced57cd0 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/100_avg_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/100_avg_metric.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/110_max_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/110_max_metric.yml index 4235679746115..466dc0cf9461b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/110_max_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/110_max_metric.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/120_min_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/120_min_metric.yml index eb68357258507..30c226f554c3d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/120_min_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/120_min_metric.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/130_sum_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/130_sum_metric.yml index 3221543276115..218168ac4cb80 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/130_sum_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/130_sum_metric.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/140_value_count_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/140_value_count_metric.yml index b5ac7d2e5db01..2c46a2035b386 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/140_value_count_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/140_value_count_metric.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/150_stats_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/150_stats_metric.yml index 2afad21e61421..74440e039eb6e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/150_stats_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/150_stats_metric.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/160_extended_stats_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/160_extended_stats_metric.yml index c70ca3356767a..b5a6046b4d7a1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/160_extended_stats_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/160_extended_stats_metric.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/170_cardinality_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/170_cardinality_metric.yml index d7ca13eef4008..f35c85034f0b8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/170_cardinality_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/170_cardinality_metric.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/180_percentiles_tdigest_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/180_percentiles_tdigest_metric.yml index 9ed414f6b8439..9a126730c63c6 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/180_percentiles_tdigest_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/180_percentiles_tdigest_metric.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/190_percentiles_hdr_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/190_percentiles_hdr_metric.yml index 32c349c5e46b6..21f3ad31558af 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/190_percentiles_hdr_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/190_percentiles_hdr_metric.yml @@ -22,25 +22,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo @@ -422,7 +422,7 @@ setup: - do: index: index: test_1 - id: 5 + id: "5" refresh: true body: { int_field: -10 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml index f7d8619a48de1..5b05382eaa292 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml @@ -14,7 +14,7 @@ setup: - do: index: index: my-index - id: 1 + id: "1" refresh: true body: | { @@ -34,7 +34,7 @@ setup: - do: index: index: my-index - id: 2 + id: "2" refresh: true body: | { @@ -100,7 +100,7 @@ setup: - do: index: index: disabled-source - id: 1 + id: "1" refresh: true body: users: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/20_terms.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/20_terms.yml index eb871da38db0b..119c5c8234441 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/20_terms.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/20_terms.yml @@ -43,19 +43,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "str" : "abc" } - do: index: index: test_1 - id: 2 + id: "2" body: { "str": "abc" } - do: index: index: test_1 - id: 3 + id: "3" body: { "str": "bcd" } - do: @@ -87,19 +87,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "ip": "::1" } - do: index: index: test_1 - id: 2 + id: "2" body: { "ip": "127.0.0.1" } - do: index: index: test_1 - id: 3 + id: "3" body: { "ip": "::1" } - do: @@ -162,19 +162,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "boolean": true } - do: index: index: test_1 - id: 2 + id: "2" body: { "boolean": false } - do: index: index: test_1 - id: 3 + id: "3" body: { "boolean": true } - do: @@ -206,19 +206,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "integer": 1234 } - do: index: index: test_1 - id: 2 + id: "2" body: { "integer": 5678 } - do: index: index: test_1 - id: 3 + id: "3" body: { "integer": 1234 } - do: @@ -250,19 +250,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "double": 1234.5 } - do: index: index: test_1 - id: 2 + id: "2" body: { "double": 5678.5 } - do: index: index: test_1 - id: 3 + id: "3" body: { "double": 1234.5 } - do: @@ -294,19 +294,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "date": "2016-05-03" } - do: index: index: test_1 - id: 2 + id: "2" body: { "date": "2014-09-01" } - do: index: index: test_1 - id: 3 + id: "3" body: { "date": "2016-05-03" } - do: @@ -365,19 +365,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "str" : "abc" } - do: index: index: test_1 - id: 2 + id: "2" body: { "str": "abc" } - do: index: index: test_1 - id: 3 + id: "3" body: { "str": "bcd" } - do: @@ -419,19 +419,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "integer": 1234 } - do: index: index: test_1 - id: 2 + id: "2" body: { "integer": 5678 } - do: index: index: test_1 - id: 3 + id: "3" body: { "integer": 1234 } - do: @@ -469,7 +469,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {} - do: @@ -494,7 +494,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {} - do: @@ -521,7 +521,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {} - do: @@ -548,7 +548,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {} - do: @@ -573,7 +573,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {} - do: @@ -598,31 +598,31 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {"number": 100} - do: index: index: test_1 - id: 2 + id: "2" body: {"number": 10} - do: index: index: test_2 - id: 3 + id: "3" body: {"number": 100.0} - do: index: index: test_2 - id: 1 + id: "1" body: {"number": 10.0} - do: index: index: test_2 - id: 2 + id: "2" body: {"number": 14.6} - do: @@ -656,7 +656,7 @@ setup: index: refresh: true index: test_1 - id: 1 + id: "1" routing: "1" body: { "str": "abc" } @@ -664,7 +664,7 @@ setup: index: refresh: true index: test_1 - id: 2 + id: "2" routing: "1" body: { "str": "abc" } @@ -672,7 +672,7 @@ setup: index: refresh: true index: test_1 - id: 3 + id: "3" routing: "1" body: { "str": "bcd" } @@ -702,7 +702,7 @@ setup: index: refresh: true index: test_1 - id: 1 + id: "1" routing: "1" body: { "str": "abc" } @@ -710,7 +710,7 @@ setup: index: refresh: true index: test_1 - id: 2 + id: "2" routing: "1" body: { "str": "abc" } @@ -718,7 +718,7 @@ setup: index: refresh: true index: test_1 - id: 3 + id: "3" routing: "1" body: { "str": "bcd" } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/220_filters_bucket.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/220_filters_bucket.yml index 2d691ba3e73e9..b8f90bc836691 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/220_filters_bucket.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/220_filters_bucket.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo @@ -287,7 +287,7 @@ setup: body: - index: _index: test_1 - _id: 100 + _id: "100" - int_field: 1 double_field: 1.0 string_field: foo bar diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/230_composite.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/230_composite.yml index f405c985f9ee4..cf38f2a6dcfe5 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/230_composite.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/230_composite.yml @@ -65,25 +65,25 @@ setup: - do: index: index: nonesting - id: 1 + id: "1" body: { "kw": "one", "num": 1 } - do: index: index: nonesting - id: 2 + id: "2" body: { "kw": "two", "num": 2 } - do: index: index: nonesting - id: 3 + id: "3" body: { "kw": "three", "num": 3 } - do: index: index: verynested - id: 1 + id: "1" body: { "department": "compsci", "staff": 12, @@ -122,7 +122,7 @@ setup: - do: index: index: verynested - id: 2 + id: "2" body: { "department": "math", "staff": 20, @@ -161,43 +161,43 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "keyword": "foo", "long": [10, 20], "geo_point": "37.2343,-115.8067"} - do: index: index: test - id: 2 + id: "2" body: { "keyword": ["foo", "bar"], "geo_point": "41.12,-71.34" } - do: index: index: test - id: 3 + id: "3" body: { "keyword": "bar", "long": [100, 0], "geo_point": "90.0,0.0"} - do: index: index: test - id: 4 + id: "4" body: { "keyword": "bar", "long": [1000, 0], "geo_point": "41.12,-71.34"} - do: index: index: test - id: 5 + id: "5" body: { "date": "2017-10-20T03:08:45" } - do: index: index: test - id: 6 + id: "6" body: { "date": "2017-10-21T07:00:00" } - do: index: index: other - id: 0 + id: "0" body: { "date": "2017-10-20T03:08:45" } - do: @@ -808,7 +808,7 @@ setup: - do: index: index: test_2 - id: 1 + id: "1" body: { "f": "192.168.0.1" } refresh: true @@ -841,7 +841,7 @@ setup: - do: index: index: test - id: 7 + id: "7" body: { "date": "2017-10-22T01:00:00" } refresh: true - do: @@ -911,7 +911,7 @@ setup: - do: index: index: test - id: 7 + id: "7" body: { "date": "2017-10-22T01:00:00" } refresh: true - do: @@ -979,19 +979,19 @@ setup: - do: index: index: test - id: 7 + id: "7" body: { "date_nanos": "2017-11-21T01:00:00" } refresh: true - do: index: index: test - id: 8 + id: "8" body: { "date_nanos": "2017-11-22T01:00:00" } refresh: true - do: index: index: test - id: 9 + id: "9" body: { "date_nanos": "2017-11-22T02:00:00" } refresh: true - do: @@ -1041,7 +1041,7 @@ setup: - do: index: index: sorted_test - id: 2 + id: "2" refresh: true body: { "keyword": "foo", "long": 1 } @@ -1085,7 +1085,7 @@ setup: - do: index: index: sorted_test - id: 2 + id: "2" refresh: true body: { "keyword": "foo", "long": 1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml index 7f94aeac64de3..bc52f78bbcccc 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml @@ -14,49 +14,49 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "date": "2014-03-03T00:00:00", "keyword": "dgx" } - do: index: index: test - id: 2 + id: "2" body: { "date": "2015-03-03T00:00:00", "keyword": "dfs" } - do: index: index: test - id: 3 + id: "3" body: { "date": "2016-03-03T00:00:00", "keyword": "foobar" } - do: index: index: test - id: 4 + id: "4" body: { "date": "2017-03-03T00:00:00", "keyword": "foo" } - do: index: index: test - id: 5 + id: "5" body: { "date": "2018-03-03T00:00:00", "keyword": "bar" } - do: index: index: test - id: 6 + id: "6" body: { "date": "2019-03-03T00:00:00", "keyword": "baz" } - do: index: index: test - id: 7 + id: "7" body: { "date": "2020-03-03T00:00:00", "keyword": "qux" } - do: index: index: test - id: 8 + id: "8" body: { "date": "2021-03-03T00:00:00", "keyword": "quux" } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/260_weighted_avg.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/260_weighted_avg.yml index 6b17132c751de..019dffa81960e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/260_weighted_avg.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/260_weighted_avg.yml @@ -20,22 +20,22 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 2 double_field: 2.0 - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 3 double_field: 3.0 - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 4 double_field: 4.0 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml index 1368c87a77d7e..3257db80c7d49 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml @@ -18,27 +18,27 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - location: "52.374081,4.912350" - index: _index: test_1 - _id: 2 + _id: "2" - location: "52.369219,4.901618" - index: _index: test_1 - _id: 3 + _id: "3" - location: "52.371667,4.914722" - index: _index: test_1 - _id: 4 + _id: "4" - location: "51.222900,4.405200" - index: _index: test_1 - _id: 5 + _id: "5" - location: "48.861111,2.336389" - index: _index: test_1 - _id: 6 + _id: "6" - location: "48.860000,2.327000" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/280_rare_terms.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/280_rare_terms.yml index 9048914174133..a2e74fdbd58b7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/280_rare_terms.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/280_rare_terms.yml @@ -33,19 +33,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "str" : "abc" } - do: index: index: test_1 - id: 2 + id: "2" body: { "str": "abc" } - do: index: index: test_1 - id: 3 + id: "3" body: { "str": "bcd" } - do: @@ -66,19 +66,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "ip": "::1" } - do: index: index: test_1 - id: 2 + id: "2" body: { "ip": "127.0.0.1" } - do: index: index: test_1 - id: 3 + id: "3" body: { "ip": "::1" } - do: @@ -124,19 +124,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "boolean": true } - do: index: index: test_1 - id: 2 + id: "2" body: { "boolean": false } - do: index: index: test_1 - id: 3 + id: "3" body: { "boolean": true } - do: @@ -157,19 +157,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "integer": 1234 } - do: index: index: test_1 - id: 2 + id: "2" body: { "integer": 5678 } - do: index: index: test_1 - id: 3 + id: "3" body: { "integer": 1234 } - do: @@ -192,19 +192,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "date": "2016-05-03" } - do: index: index: test_1 - id: 2 + id: "2" body: { "date": "2014-09-01" } - do: index: index: test_1 - id: 3 + id: "3" body: { "date": "2016-05-03" } - do: @@ -243,7 +243,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {} - do: @@ -262,7 +262,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {} - do: @@ -281,7 +281,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {} - do: @@ -300,7 +300,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {} - do: @@ -323,21 +323,21 @@ setup: index: refresh: true index: test_1 - id: 1 + id: "1" body: { "str" : "abc", "number": 1 } - do: index: refresh: true index: test_1 - id: 2 + id: "2" body: { "str": "abc", "number": 2 } - do: index: refresh: true index: test_1 - id: 3 + id: "3" body: { "str": "bcd", "number": 3 } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml index 35a2330819034..e4054979656c7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml @@ -19,27 +19,27 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - location: "52.374081,4.912350" - index: _index: test_1 - _id: 2 + _id: "2" - location: "52.369219,4.901618" - index: _index: test_1 - _id: 3 + _id: "3" - location: "52.371667,4.914722" - index: _index: test_1 - _id: 4 + _id: "4" - location: "51.222900,4.405200" - index: _index: test_1 - _id: 5 + _id: "5" - location: "48.861111,2.336389" - index: _index: test_1 - _id: 6 + _id: "6" - location: "48.860000,2.327000" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/300_pipeline.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/300_pipeline.yml index a5c575ab5acee..d826a6a7debaa 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/300_pipeline.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/300_pipeline.yml @@ -20,19 +20,19 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 2 - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 3 - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 4 --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/30_sig_terms.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/30_sig_terms.yml index 62bfffb87dadf..147048c8dce93 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/30_sig_terms.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/30_sig_terms.yml @@ -17,37 +17,37 @@ - do: index: index: goodbad - id: 1 + id: "1" body: { text: "good", class: "good" } - do: index: index: goodbad - id: 2 + id: "2" body: { text: "good", class: "good" } - do: index: index: goodbad - id: 3 + id: "3" body: { text: "bad", class: "bad" } - do: index: index: goodbad - id: 4 + id: "4" body: { text: "bad", class: "bad" } - do: index: index: goodbad - id: 5 + id: "5" body: { text: "good bad", class: "good" } - do: index: index: goodbad - id: 6 + id: "6" body: { text: "good bad", class: "bad" } - do: index: index: goodbad - id: 7 + id: "7" body: { text: "bad", class: "bad" } @@ -108,33 +108,33 @@ - do: index: index: goodbad-2 - id: 1 + id: "1" body: { class: "bad" } - do: index: index: goodbad-2 - id: 2 + id: "2" body: { class: "bad" } - do: index: index: goodbad - id: 1 + id: "1" body: { text: "good", class: "good" } - do: index: index: goodbad - id: 2 + id: "2" body: { text: "good", class: "good" } - do: index: index: goodbad - id: 3 + id: "3" body: { text: "bad", class: "bad" } - do: index: index: goodbad - id: 4 + id: "4" body: { text: "bad", class: "bad" } - do: @@ -167,12 +167,12 @@ - do: index: index: ip_index - id: 1 + id: "1" body: { ip: "::1" } - do: index: index: ip_index - id: 2 + id: "2" body: { } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/310_date_agg_per_day_of_week.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/310_date_agg_per_day_of_week.yml index 4d0ae1e56df38..b7e16109826b4 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/310_date_agg_per_day_of_week.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/310_date_agg_per_day_of_week.yml @@ -16,7 +16,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "date": "2009-11-15T14:12:12" } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/380_nested.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/380_nested.yml index 43623846c87c3..7502f73fa6e2d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/380_nested.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/380_nested.yml @@ -26,13 +26,13 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "department": "compsci", "staff": 12, "courses": [ { "name": "Object Oriented Programming", "credits": 3, "sessions": [ { "semester": "spr2021", "students": 37 }, { "semester": "fall2020", "students": 45} ] }, { "name": "Theory of Computation", "credits": 4, "sessions": [ { "semester": "spr2021", "students": 19 }, { "semester": "fall2020", "students": 14 } ] } ] } - do: index: index: test - id: 2 + id: "2" body: { "department": "math", "staff": 20, "courses": [ { "name": "Precalculus", "credits": 1, "sessions": [ { "semester": "spr2021", "students": 100 }, { "semester": "fall2020", "students": 134 } ] }, { "name": "Linear Algebra", "credits": 3, "sessions": [ { "semester": "spr2021", "students": 29 }, { "semester": "fall2020", "students": 23 } ] } ] } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/390_geo_bounds_centroid.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/390_geo_bounds_centroid.yml index fe80dc411f308..bfd1763aaba8f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/390_geo_bounds_centroid.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/390_geo_bounds_centroid.yml @@ -21,27 +21,27 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - location: "52.374081,4.912350" - index: _index: test_1 - _id: 2 + _id: "2" - location: "52.369219,4.901618" - index: _index: test_1 - _id: 3 + _id: "3" - location: "52.371667,4.914722" - index: _index: test_1 - _id: 4 + _id: "4" - location: "51.222900,4.405200" - index: _index: test_1 - _id: 5 + _id: "5" - location: "48.861111,2.336389" - index: _index: test_1 - _id: 6 + _id: "6" - location: "48.860000,2.327000" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/40_range.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/40_range.yml index 8039ebc68f0fa..88760b99714a6 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/40_range.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/40_range.yml @@ -142,8 +142,8 @@ setup: --- "Float range": - skip: - version: " - 7.16.99" - reason: Bug fixed in 8.1.0 and backported to 7.17.0 + version: " - 7.17.0" + reason: Bug fixed in 8.1.0 and backported to 7.17.1 - do: search: index: test diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/450_ip_prefix.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/450_ip_prefix.yml index 0c1d09b2e770f..33c6c5d78f897 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/450_ip_prefix.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/450_ip_prefix.yml @@ -22,31 +22,31 @@ setup: refresh: true body: - { "index": { } } - - { "ipv4": "192.168.1.10", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f10", "value": 10, ip: "192.168.1.10" } + - { "ipv4": "192.168.1.10", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f10", "value": 10, "ip": "192.168.1.10" } - { "index": { } } - - { "ipv4": "192.168.1.12", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f12", "value": 20, ip: "2001:db8:a4f8:112a:6001:0:12:7f12" } + - { "ipv4": "192.168.1.12", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f12", "value": 20, "ip": "2001:db8:a4f8:112a:6001:0:12:7f12" } - { "index": { } } - - { "ipv4": "192.168.1.33", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f33", "value": 40, ip: "192.168.1.33" } + - { "ipv4": "192.168.1.33", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f33", "value": 40, "ip": "192.168.1.33" } - { "index": { } } - - { "ipv4": "192.168.1.10", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f10", "value": 20, ip: "2001:db8:a4f8:112a:6001:0:12:7f10" } + - { "ipv4": "192.168.1.10", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f10", "value": 20, "ip": "2001:db8:a4f8:112a:6001:0:12:7f10" } - { "index": { } } - - { "ipv4": "192.168.1.33", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f33", "value": 70, ip: "192.168.1.33" } + - { "ipv4": "192.168.1.33", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f33", "value": 70, "ip": "192.168.1.33" } - { "index": { } } - - { "ipv4": "192.168.2.41", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f41", "value": 20, ip: "2001:db8:a4f8:112c:6001:0:12:7f41" } + - { "ipv4": "192.168.2.41", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f41", "value": 20, "ip": "2001:db8:a4f8:112c:6001:0:12:7f41" } - { "index": { } } - - { "ipv4": "192.168.2.10", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f10", "value": 30, ip: "192.168.2.10" } + - { "ipv4": "192.168.2.10", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f10", "value": 30, "ip": "192.168.2.10" } - { "index": { } } - - { "ipv4": "192.168.2.23", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f23", "value": 50, ip: "2001:db8:a4f8:112c:6001:0:12:7f23" } + - { "ipv4": "192.168.2.23", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f23", "value": 50, "ip": "2001:db8:a4f8:112c:6001:0:12:7f23" } - { "index": { } } - - { "ipv4": "192.168.2.41", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f41", "value": 60, ip: "192.168.2.41" } + - { "ipv4": "192.168.2.41", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f41", "value": 60, "ip": "192.168.2.41" } - { "index": { } } - - { "ipv4": "192.168.2.10", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f10", "value": 10, ip: "2001:db8:a4f8:112c:6001:0:12:7f10" } + - { "ipv4": "192.168.2.10", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f10", "value": 10, "ip": "2001:db8:a4f8:112c:6001:0:12:7f10" } --- "IPv4 prefix": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -79,8 +79,8 @@ setup: # network part will just 0s. "IPv4 prefix with incorrect is_ipv6": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -104,8 +104,8 @@ setup: --- "IPv4 short prefix": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -141,8 +141,8 @@ setup: --- "IPv6 prefix": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -175,8 +175,8 @@ setup: # with everything else being 0s. "IPv6 prefix with incorrect is_ipv6": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -201,8 +201,8 @@ setup: --- "Invalid IPv4 prefix": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: catch: /\[prefix_length\] must be in range \[0, 32\] while value is \[44\]/ search: @@ -219,8 +219,8 @@ setup: --- "Invalid IPv6 prefix": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: catch: /\[prefix_length] must be in range \[0, 128\] while value is \[170]/ search: @@ -236,8 +236,8 @@ setup: --- "IPv4 prefix sub aggregation": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -278,8 +278,8 @@ setup: --- "IPv6 prefix sub aggregation": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -319,8 +319,8 @@ setup: --- "IPv6 prefix metric sub aggregation": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -356,8 +356,8 @@ setup: --- "IPv4 prefix appended": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -388,8 +388,8 @@ setup: --- "IPv6 prefix appended": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -420,8 +420,8 @@ setup: --- "Mixed IPv4 and IPv6 with is_ipv6 false": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -451,8 +451,8 @@ setup: --- "Mixed IPv4 and IPv6 with is_ipv6 true": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/450_random_sampler.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/450_random_sampler.yml index 2699ca14dd4c6..ff2cf8fe8112f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/450_random_sampler.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/450_random_sampler.yml @@ -32,8 +32,8 @@ setup: "Test random_sampler aggregation with no filter": - skip: features: close_to - version: " - 8.0.99" - reason: added in 8.1.0 + version: " - 8.1.99" + reason: added in 8.2.0 - do: search: index: data @@ -60,8 +60,8 @@ setup: --- "Test random_sampler aggregation with filter": - skip: - version: " - 8.0.99" - reason: added in 8.1.0 + version: " - 8.1.99" + reason: added in 8.2.0 - do: search: index: data @@ -124,8 +124,8 @@ setup: --- "Test random_sampler aggregation with poor settings": - skip: - version: " - 8.0.99" - reason: added in 8.1.0 + version: " - 8.1.99" + reason: added in 8.2.0 - do: catch: /\[probability\] must be between 0 and 1/ search: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/49_range_timezone_bug.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/49_range_timezone_bug.yml index 481c32f688be6..dba480e31985a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/49_range_timezone_bug.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/49_range_timezone_bug.yml @@ -18,7 +18,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "mydate": "2021-08-12T01:00:00.000000000+02:00" } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml index 54d176f7675ed..3916386abc244 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml @@ -17,7 +17,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: "text" : "The quick brown fox is brown." - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/20_fvh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/20_fvh.yml index a7f4ea6248c9f..adb361098c942 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/20_fvh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/20_fvh.yml @@ -47,7 +47,7 @@ setup: body: query: term: - id: 1 + id: "1" highlight: type: fvh fields: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/30_max_analyzed_offset.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/30_max_analyzed_offset.yml index 828293be114e6..edc69b4e22a35 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/30_max_analyzed_offset.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/30_max_analyzed_offset.yml @@ -18,7 +18,7 @@ setup: - do: index: index: test1 - id: 1 + id: "1" body: "field1" : "The quick brown fox went to the forest and saw another fox." "field2" : "The quick brown fox went to the forest and saw another fox." diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/10_basic.yml index cd4e7909cc92f..ee81e9b90341d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/10_basic.yml @@ -15,7 +15,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: "nested_field" : [ { "foo": "bar" } ] @@ -42,7 +42,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: "nested_field" : [ { "foo": "bar" } ] - do: @@ -64,7 +64,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: "nested_field" : [ { "foo": "baz" } ] - do: @@ -102,7 +102,7 @@ setup: - do: index: index: disabled_source - id: 1 + id: "1" body: nested_field: field: value diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/20_highlighting.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/20_highlighting.yml index 9f12c31b04c75..17f328046833e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/20_highlighting.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/20_highlighting.yml @@ -28,7 +28,7 @@ setup: - do: index: index: test - id: 1 + id: "1" refresh: true body: nested: @@ -108,7 +108,7 @@ setup: - do: index: index: disabled_source - id: 1 + id: "1" refresh: true body: nested: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/100_stored_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/100_stored_fields.yml index d2933a44e586d..3f8697e899400 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/100_stored_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/100_stored_fields.yml @@ -5,7 +5,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { foo: bar } - do: indices.refresh: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/10_source_filtering.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/10_source_filtering.yml index 1d740f192447d..86a87adc35e89 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/10_source_filtering.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/10_source_filtering.yml @@ -13,7 +13,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1, "bigint": 72057594037927936, d: 3.14 } - do: indices.refresh: {} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/110_field_collapsing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/110_field_collapsing.yml index d476426312147..a2d19a16ab85f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/110_field_collapsing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/110_field_collapsing.yml @@ -11,42 +11,42 @@ setup: - do: index: index: test - id: 1 + id: "1" version_type: external version: 11 body: { numeric_group: 1, tag: A, sort: 10 } - do: index: index: test - id: 2 + id: "2" version_type: external version: 22 body: { numeric_group: 1, tag: B, sort: 6 } - do: index: index: test - id: 3 + id: "3" version_type: external version: 33 body: { numeric_group: 1, tag: A, sort: 24 } - do: index: index: test - id: 4 + id: "4" version_type: external version: 44 body: { numeric_group: 25, tag: B, sort: 10 } - do: index: index: test - id: 5 + id: "5" version_type: external version: 55 body: { numeric_group: 25, tag: A, sort: 5 } - do: index: index: test - id: 6 + id: "6" version_type: external version: 66 body: { numeric_group: 3, tag: B, sort: 36 } @@ -438,12 +438,12 @@ setup: - do: index: index: alias-test - id: 1 + id: "1" body: { other_numeric_group: 1, sort: 6 } - do: index: index: alias-test - id: 2 + id: "2" body: { other_numeric_group: 25, sort: 10 } - do: indices.refresh: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/120_batch_reduce_size.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/120_batch_reduce_size.yml index 9c23899fc12dc..2177668c81ced 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/120_batch_reduce_size.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/120_batch_reduce_size.yml @@ -26,19 +26,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "str" : "abc" } - do: index: index: test_1 - id: 2 + id: "2" body: { "str": "abc" } - do: index: index: test_1 - id: 3 + id: "3" body: { "str": "bcd" } - do: indices.refresh: {} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml index c6509546ca94b..ac8194cdff7dd 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml @@ -60,18 +60,18 @@ setup: - do: index: index: index_1 - id: 1 + id: "1" body: { "created_at": "2016-01-01"} - do: index: index: index_2 - id: 2 + id: "2" body: { "created_at": "2017-01-01" } - do: index: index: index_3 - id: 3 + id: "3" body: { "created_at": "2018-01-01" } - do: indices.refresh: {} @@ -244,18 +244,18 @@ setup: - do: index: index: index_1 - id: 1 + id: "1" body: { "created_at_not_indexed": "2016-01-01"} - do: index: index: index_2 - id: 2 + id: "2" body: { "created_at_not_indexed": "2017-01-01" } - do: index: index: index_3 - id: 3 + id: "3" body: { "created_at_not_indexed": "2018-01-01" } - do: indices.refresh: {} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml index 84ec1ebee4b7d..7e402f14d6809 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml @@ -12,19 +12,19 @@ - do: index: index: search_index - id: 1 + id: "1" body: { "user": "1" } - do: index: index: search_index - id: 2 + id: "2" body: { "user": "2" } - do: index: index: search_index - id: 3 + id: "3" body: { "user": "3" } - do: @@ -49,7 +49,7 @@ - do: index: index: lookup_index - id: 1 + id: "1" body: { "followers" : ["1", "3"] } - do: indices.refresh: {} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/160_exists_query.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/160_exists_query.yml index 0c4ab603420f2..7efdf521d3c3c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/160_exists_query.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/160_exists_query.yml @@ -51,7 +51,7 @@ setup: Content-Type: application/json index: index: "test" - id: 1 + id: "1" body: binary: "YWJjZGUxMjM0" boolean: true @@ -76,7 +76,7 @@ setup: Content-Type: application/json index: index: "test" - id: 2 + id: "2" body: binary: "YWJjZGUxMjM0" boolean: false @@ -100,7 +100,7 @@ setup: Content-Type: application/json index: index: "test" - id: 3 + id: "3" routing: "route_me" body: binary: "YWJjZGUxMjM0" @@ -123,7 +123,7 @@ setup: - do: index: index: "test" - id: 4 + id: "4" body: {} - do: @@ -190,7 +190,7 @@ setup: Content-Type: application/json index: index: "test-no-dv" - id: 1 + id: "1" body: binary: "YWJjZGUxMjM0" boolean: true @@ -215,7 +215,7 @@ setup: Content-Type: application/json index: index: "test-no-dv" - id: 2 + id: "2" body: binary: "YWJjZGUxMjM0" boolean: false @@ -239,7 +239,7 @@ setup: Content-Type: application/json index: index: "test-no-dv" - id: 3 + id: "3" routing: "route_me" body: binary: "YWJjZGUxMjM0" @@ -262,7 +262,7 @@ setup: - do: index: index: "test-no-dv" - id: 4 + id: "4" body: {} - do: @@ -278,7 +278,7 @@ setup: - do: index: index: "test-unmapped" - id: 1 + id: "1" body: unrelated: "foo" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/161_exists_query_within_nested_query.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/161_exists_query_within_nested_query.yml index 592147c0c1d93..4dfba3652d7db 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/161_exists_query_within_nested_query.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/161_exists_query_within_nested_query.yml @@ -54,7 +54,7 @@ setup: Content-Type: application/json index: index: "test" - id: 1 + id: "1" body: nested: - binary: "YWJjZGUxMjM0" @@ -80,7 +80,7 @@ setup: Content-Type: application/json index: index: "test" - id: 2 + id: "2" body: nested: - binary: "YWJjZGUxMjM0" @@ -105,7 +105,7 @@ setup: Content-Type: application/json index: index: "test" - id: 3 + id: "3" routing: "route_me" body: nested: @@ -129,7 +129,7 @@ setup: - do: index: index: "test" - id: 4 + id: "4" body: {} - do: @@ -199,7 +199,7 @@ setup: Content-Type: application/json index: index: "test-no-dv" - id: 1 + id: "1" body: nested: - binary: "YWJjZGUxMjM0" @@ -225,7 +225,7 @@ setup: Content-Type: application/json index: index: "test-no-dv" - id: 2 + id: "2" body: nested: - binary: "YWJjZGUxMjM0" @@ -250,7 +250,7 @@ setup: Content-Type: application/json index: index: "test-no-dv" - id: 3 + id: "3" routing: "route_me" body: nested: @@ -274,7 +274,7 @@ setup: - do: index: index: "test-no-dv" - id: 4 + id: "4" body: {} - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/190_index_prefix_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/190_index_prefix_search.yml index f667786f557f9..085a0396d5576 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/190_index_prefix_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/190_index_prefix_search.yml @@ -14,7 +14,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { text: some short words with a stupendously long one } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/200_ignore_malformed.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/200_ignore_malformed.yml index e48289a53909d..b08f532585fde 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/200_ignore_malformed.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/200_ignore_malformed.yml @@ -18,19 +18,19 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "my_date": "2018-05-11", "my_ip": ":::1" } - do: index: index: test - id: 2 + id: "2" body: { "my_date": "bar", "my_ip": "192.168.1.42" } - do: index: index: test - id: 3 + id: "3" body: { "my_date": "bar", "my_ip": "quux" } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/200_index_phrase_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/200_index_phrase_search.yml index d37c0c8cb3f72..1aa776c4709b2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/200_index_phrase_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/200_index_phrase_search.yml @@ -14,7 +14,7 @@ - do: index: index: test - id: 1 + id: "1" body: { text: "peter piper picked a peck of pickled peppers" } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/20_default_values.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/20_default_values.yml index b2ec345a6fe8f..d775c47926fc6 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/20_default_values.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/20_default_values.yml @@ -8,13 +8,13 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: index: index: test_2 - id: 42 + id: "42" body: { foo: bar } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/220_total_hits_object.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/220_total_hits_object.yml index 965d18844e3cc..0f352de9c414b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/220_total_hits_object.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/220_total_hits_object.yml @@ -10,43 +10,43 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: index: index: test_1 - id: 3 + id: "3" body: { foo: baz } - do: index: index: test_1 - id: 2 + id: "2" body: { foo: bar } - do: index: index: test_1 - id: 4 + id: "4" body: { foo: bar } - do: index: index: test_2 - id: 42 + id: "42" body: { foo: bar } - do: index: index: test_2 - id: 24 + id: "24" body: { foo: baz } - do: index: index: test_2 - id: 36 + id: "36" body: { foo: bar } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/300_sequence_numbers.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/300_sequence_numbers.yml index 56871bfe02645..60ce254169155 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/300_sequence_numbers.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/300_sequence_numbers.yml @@ -6,14 +6,14 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { foo: foo } ## we index again in order to make the seq# 1 (so we can check for the field existence with is_false) - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/30_limits.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/30_limits.yml index b4ff84e13353f..286e203ef5c7d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/30_limits.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/30_limits.yml @@ -10,7 +10,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar, foo2: bar, foo3: bar } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/310_match_bool_prefix.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/310_match_bool_prefix.yml index f92b0ffda80e3..e9e39ed15ea56 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/310_match_bool_prefix.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/310_match_bool_prefix.yml @@ -17,7 +17,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: my_field1: "brown fox jump" my_field2: "xylophone" @@ -25,7 +25,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: my_field1: "brown emu jump" my_field2: "xylophone" @@ -33,7 +33,7 @@ setup: - do: index: index: test - id: 3 + id: "3" body: my_field1: "jumparound" my_field2: "emu" @@ -41,7 +41,7 @@ setup: - do: index: index: test - id: 4 + id: "4" body: my_field1: "dog" my_field2: "brown fox jump lazy" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml index c18af5d7d4188..14a24d5f20933 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml @@ -19,7 +19,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: keyword: [ "a", "b" ] integer_range: @@ -63,7 +63,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: keyword: "value" date: "1990-12-29T22:30:00.000Z" @@ -111,7 +111,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: keyword: [ "a" ] @@ -148,7 +148,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: keyword: "a" integer: 42 @@ -156,7 +156,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: keyword: "b" integer: "not an integer" @@ -194,7 +194,7 @@ setup: - do: index: index: test - id: 1 + id: "1" refresh: true body: keyword: "a" @@ -236,7 +236,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: date: "1990-12-29T22:30:00.000Z" @@ -282,7 +282,7 @@ setup: - do: index: index: test - id: 1 + id: "1" refresh: true body: count: "some text" @@ -317,7 +317,7 @@ Test unmapped field: - do: index: index: test - id: 1 + id: "1" refresh: true body: f1: some text @@ -378,7 +378,7 @@ Test unmapped fields inside disabled objects: - do: index: index: test - id: 1 + id: "1" refresh: true body: f1: @@ -426,7 +426,7 @@ Test nested fields: - do: index: index: test - id: 1 + id: "1" refresh: true body: products: @@ -505,7 +505,7 @@ Test nested field inside object structure: - do: index: index: test - id: 1 + id: "1" refresh: true body: obj: @@ -516,7 +516,7 @@ Test nested field inside object structure: - do: index: index: test - id: 2 + id: "2" refresh: true body: obj: @@ -619,7 +619,7 @@ Test doubly nested fields: - do: index: index: test - id: 1 + id: "1" refresh: true body: id: abcd1234 @@ -691,7 +691,7 @@ Test nested fields with unmapped subfields: - do: index: index: test - id: 1 + id: "1" refresh: true body: id: abcd1234 @@ -752,7 +752,7 @@ Test nested fields with ignored subfields: - do: index: index: test - id: 1 + id: "1" refresh: true body: malformed_outside : "bad_value_1" @@ -796,7 +796,7 @@ Test nested field with sibling field resolving to DocValueFetcher: - do: index: index: test - id: 1 + id: "1" refresh: true body: owner: "Anna Ott" @@ -838,7 +838,7 @@ Test nested field with sibling field resolving to DocValueFetcher: - do: index: index: test - id: 1 + id: "1" refresh: true body: number: [ 1, 2, "3", "four", 5, 6 ] @@ -880,7 +880,7 @@ Test token_count inside nested field doesn't fail: - do: index: index: test - id: 1 + id: "1" refresh: true body: user: @@ -916,7 +916,7 @@ error includes field name: - do: index: index: test - id: 1 + id: "1" refresh: true body: keyword: "value" @@ -953,7 +953,7 @@ error includes glob pattern: - do: index: index: test - id: 1 + id: "1" refresh: true body: dkeyword: "value" @@ -992,7 +992,7 @@ error for flattened includes whole path: - do: index: index: test - id: 1 + id: "1" refresh: true body: flattened: @@ -1031,7 +1031,7 @@ test fetching metadata fields: - do: index: index: test - id: 1 + id: "1" refresh: true body: field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/340_flattened.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/340_flattened.yml index d7a9020e7e9f6..b461a6c4defb2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/340_flattened.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/340_flattened.yml @@ -16,7 +16,7 @@ setup: - do: index: index: flattened_test - id: 1 + id: "1" body: flattened: key: some_value @@ -66,7 +66,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: headers: content-type: application/javascript @@ -76,7 +76,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: headers: content-type: text/plain @@ -122,7 +122,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: flattened: some_field: some_value @@ -164,7 +164,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: flattened: some_field: some_value @@ -198,7 +198,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: flattened: some_field: some_value diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_binary_field.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_binary_field.yml index 6fbdb575fccb5..d58e1f7dc2c18 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_binary_field.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_binary_field.yml @@ -21,7 +21,7 @@ index: index: test refresh: true - id: 1 + id: "1" body: binary: U29tZSBiaW5hcnkgYmxvYg== diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_point_in_time.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_point_in_time.yml index b3ad192710cdf..bc3479b705180 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_point_in_time.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_point_in_time.yml @@ -5,19 +5,19 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { id: 1, foo: bar, age: 18 } - do: index: index: test - id: 42 + id: "42" body: { id: 42, foo: bar, age: 18 } - do: index: index: test - id: 172 + id: "172" body: { id: 172, foo: bar, age: 24 } - do: @@ -27,7 +27,7 @@ setup: - do: index: index: test2 - id: 45 + id: "45" body: { id: 45, foo: bar, age: 19 } - do: @@ -67,7 +67,7 @@ setup: - do: index: index: test - id: 100 + id: "100" body: { id: 100, foo: bar, age: 23 } - do: indices.refresh: @@ -148,7 +148,7 @@ setup: search: body: slice: - id: 0 + id: "0" max: 2 size: 1 query: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/360_combined_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/360_combined_fields.yml index de1baae05097d..2b38a72c70a06 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/360_combined_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/360_combined_fields.yml @@ -15,7 +15,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: title: "Time, Clocks and the Ordering of Events in a Distributed System" abstract: "The concept of one event happening before another..." diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml index 0c4b1089b8122..f16b1dd71f188 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml @@ -14,7 +14,7 @@ setup: - do: index: index: test - id: 1 + id: "1" refresh: true body: keyword: [ "a", "b" ] @@ -103,7 +103,7 @@ fetch nested source: - do: index: index: test_nested - id: 1 + id: "1" refresh: true body: keyword: [ "a", "b" ] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/390_doc_values_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/390_doc_values_search.yml index b817c5dfdb2cb..cef4bbc187c84 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/390_doc_values_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/390_doc_values_search.yml @@ -52,7 +52,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: byte : 1 double : 1.0 @@ -70,7 +70,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: byte : 2 double : 2.0 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/40_indices_boost.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/40_indices_boost.yml index 42951e868c6b8..9e6b4582d8f22 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/40_indices_boost.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/40_indices_boost.yml @@ -19,13 +19,13 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: index: index: test_2 - id: 1 + id: "1" body: { foo: bar } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/60_query_string.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/60_query_string.yml index 131c8f92a231e..a264f6c699a41 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/60_query_string.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/60_query_string.yml @@ -12,7 +12,7 @@ - do: index: index: test - id: 1 + id: "1" body: { field: foo bar} - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/70_response_filtering.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/70_response_filtering.yml index e38f5f862a273..825bd9ce6de8e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/70_response_filtering.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/70_response_filtering.yml @@ -6,13 +6,13 @@ - do: index: index: test - id: 1 + id: "1" body: { foo: bar } - do: index: index: test - id: 2 + id: "2" body: { foo: bar } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/90_search_after.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/90_search_after.yml index 3d326d75fbf70..e1f367e8c217e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/90_search_after.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/90_search_after.yml @@ -5,19 +5,19 @@ - do: index: index: test - id: 1 + id: "1" body: { id: 1, foo: bar, age: 18 } - do: index: index: test - id: 42 + id: "42" body: { id: 42, foo: bar, age: 18 } - do: index: index: test - id: 172 + id: "172" body: { id: 172, foo: bar, age: 24 } - do: @@ -231,7 +231,7 @@ - do: index: index: test - id: 1 + id: "1" body: { id: 1, foo: bar, age: 18 } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/issue4895.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/issue4895.yml index 4d8b1484c74ac..cc032a400060f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/issue4895.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/issue4895.yml @@ -7,7 +7,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: user : foo amount : 35 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/20_completion.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/20_completion.yml index ff6ecfb0c34cf..cf88ced4dd740 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/20_completion.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/20_completion.yml @@ -32,14 +32,14 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_1: "bar" - do: index: index: test - id: 2 + id: "2" body: suggest_1: "baz" @@ -65,7 +65,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_2: ["bar", "foo"] @@ -106,7 +106,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_3: input: "bar" @@ -115,7 +115,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: suggest_3: input: "baz" @@ -145,7 +145,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_4: - input: "bar" @@ -156,7 +156,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: suggest_4: - input: "baz" @@ -203,7 +203,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_5a: "bar" suggest_5b: "baz" @@ -245,7 +245,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_6: input: "bar" @@ -256,7 +256,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: suggest_6: input: "baz" @@ -294,14 +294,14 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_1: "bar" - do: index: index: test - id: 2 + id: "2" body: suggest_1: "bar" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/30_context.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/30_context.yml index 6f3bae7f8e46f..f88726469f51c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/30_context.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/30_context.yml @@ -50,7 +50,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_context: input: "foo red" @@ -60,7 +60,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: suggest_context: input: "foo blue" @@ -92,7 +92,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_context_with_path: input: "Foo red" @@ -102,7 +102,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: suggest_context_with_path: "Foo blue" color: "blue" @@ -163,7 +163,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_geo: input: "Marriot in Amsterdam" @@ -175,7 +175,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: suggest_geo: input: "Marriot in Berlin" @@ -214,7 +214,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_multi_contexts: "Marriot in Amsterdam" location: @@ -225,7 +225,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: suggest_multi_contexts: "Marriot in Berlin" location: @@ -281,7 +281,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_context: input: "foo" @@ -291,7 +291,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_context: input: "foo" @@ -301,7 +301,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: suggest_context: input: "foo" @@ -334,7 +334,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_context: input: "foo" @@ -349,7 +349,7 @@ setup: catch: /Contexts are mandatory in context enabled completion field \[suggest_context\]/ index: index: test - id: 2 + id: "2" body: suggest_context: input: "foo" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml index 8e793e4beb6f0..8bbda56db7e53 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml @@ -17,14 +17,14 @@ - do: index: index: completion_with_sub_keyword - id: 1 + id: "1" body: suggest_1: "bar" - do: index: index: completion_with_sub_keyword - id: 2 + id: "2" body: suggest_1: "baz" @@ -75,14 +75,14 @@ - do: index: index: completion_with_sub_completion - id: 1 + id: "1" body: suggest_1: "bar" - do: index: index: completion_with_sub_completion - id: 2 + id: "2" body: suggest_1: "baz" @@ -130,7 +130,7 @@ - do: index: index: completion_with_context - id: 1 + id: "1" body: suggest_1: input: "foo red" @@ -140,7 +140,7 @@ - do: index: index: completion_with_context - id: 2 + id: "2" body: suggest_1: input: "foo blue" @@ -186,7 +186,7 @@ - do: index: index: completion_with_weight - id: 1 + id: "1" body: suggest_1: input: "bar" @@ -195,7 +195,7 @@ - do: index: index: completion_with_weight - id: 2 + id: "2" body: suggest_1: input: "baz" @@ -238,7 +238,7 @@ - do: index: index: geofield_with_completion - id: 1 + id: "1" body: geofield: "hgjhrwysvqw7" #41.12,-72.34,12 @@ -246,7 +246,7 @@ - do: index: index: geofield_with_completion - id: 1 + id: "1" body: geofield: "hgm4psywmkn7" #41.12,-71.34,12 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/termvectors/20_issue7121.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/termvectors/20_issue7121.yml index e3ba834de074f..6b03428332932 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/termvectors/20_issue7121.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/termvectors/20_issue7121.yml @@ -22,14 +22,14 @@ - do: index: index: testidx - id: 1 + id: "1" body: text : "foo bar" - do: termvectors: index: testidx - id: 1 + id: "1" realtime: false - match: { _index: "testidx" } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/termvectors/30_realtime.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/termvectors/30_realtime.yml index 8bfbee483690c..cc2272f813f32 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/termvectors/30_realtime.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/termvectors/30_realtime.yml @@ -16,13 +16,13 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: termvectors: index: test_1 - id: 1 + id: "1" realtime: false - is_false: found @@ -30,7 +30,7 @@ - do: termvectors: index: test_1 - id: 1 + id: "1" realtime: true - is_true: found diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml index 0e22f086096ff..4b6a376637617 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml @@ -52,7 +52,7 @@ add time series mappings: can't shadow dimensions: - skip: version: " - 8.0.99" - reason: introduced in 8.1.0 to be backported to 8.0.0 + reason: introduced in 8.1.0 - do: indices.create: @@ -118,7 +118,7 @@ can't shadow dimensions: can't shadow metrics: - skip: version: " - 8.0.99" - reason: introduced in 8.1.0 to be backported to 8.0.0 + reason: introduced in 8.1.0 - do: indices.create: @@ -214,3 +214,25 @@ no _tsid in standard indices: - is_false: fields.metricset.keyword.non_searchable_indices - is_false: fields.metricset.keyword.non_aggregatable_indices - is_false: fields._tsid # _tsid metadata field must not exist in non-time-series indices + +--- +no nested dimensions: + - skip: + version: " - 8.1.99" + reason: introduced in 8.2.0 + + - do: + catch: /time_series_dimension can't be configured in nested field \[nested.dim\]/ + indices.create: + index: test + body: + mappings: + properties: + "@timestamp": + type: date + nested: + type: nested + properties: + dim: + type: keyword + time_series_dimension: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml index c01fa5f24de44..59f480e42c98a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml @@ -54,10 +54,10 @@ ecs style: time_series_metric: gauge --- -top level dim object: +top level wildcard dim object: - skip: - version: " - 8.0.99" - reason: introduced in 8.1.0 + version: " - 8.1.99" + reason: routing_path object type check improve in 8.2.0 - do: indices.create: @@ -66,7 +66,7 @@ top level dim object: settings: index: mode: time_series - routing_path: [dim.*] + routing_path: [dim*] time_series: start_time: 2021-04-28T00:00:00Z end_time: 2021-04-29T00:00:00Z @@ -111,6 +111,39 @@ top level dim object: type: double time_series_metric: gauge +--- +exact match object type: + - skip: + version: " - 8.1.99" + reason: routing_path object type check improve in 8.2.0 + + - do: + catch: '/All fields that match routing_path must be keywords with \[time_series_dimension: true\] and without the \[script\] parameter. \[dim\] was \[object\]./' + indices.create: + index: tsdb_index + body: + settings: + index: + mode: time_series + routing_path: [dim] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + number_of_replicas: 0 + number_of_shards: 2 + mappings: + properties: + "@timestamp": + type: date + dim: + properties: + metricset: + type: keyword + time_series_dimension: true + uid: + type: keyword + time_series_dimension: true + --- non keyword matches routing_path: - skip: @@ -271,3 +304,63 @@ runtime field matching routing path: - '{"@timestamp": "2021-04-28T18:50:04.467Z", "dim": {"foo": {"bar": "a"}}}' - match: {items.0.index.error.reason: "All fields matching [routing_path] must be mapped but [dim.foo] was declared as [dynamic: false]"} - match: {items.1.index.error.reason: "All fields matching [routing_path] must be mapped but [dim.foo] was declared as [dynamic: false]"} + +--- +nested dimensions: + - skip: + version: " - 8.1.99" + reason: message changed in 8.2.0 + + - do: + catch: /time_series_dimension can't be configured in nested field \[nested.dim\]/ + indices.create: + index: test + body: + settings: + index: + mode: time_series + routing_path: [nested.*] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + "@timestamp": + type: date + nested: + type: nested + properties: + dim: + type: keyword + time_series_dimension: true + +--- +nested fields: + - skip: + version: " - 8.1.99" + reason: message changed in 8.2.0 + + - do: + catch: /cannot have nested fields when index is in \[index.mode=time_series\]/ + indices.create: + index: test + body: + settings: + index: + mode: time_series + routing_path: [dim] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + "@timestamp": + type: date + dim: + type: keyword + time_series_dimension: true + nested: + type: nested + properties: + foo: + type: keyword diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml index a742d0a8148a8..af48b28c75c4a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml @@ -111,7 +111,7 @@ delete: catch: /delete is not supported because the destination index \[test\] is in time series mode/ delete: index: test - id: 1 + id: "1" --- delete over _bulk: @@ -144,7 +144,7 @@ noop update: catch: /update is not supported because the destination index \[test\] is in time series mode/ update: index: test - id: 1 + id: "1" body: doc: {} @@ -160,7 +160,7 @@ update: catch: /update is not supported because the destination index \[test\] is in time series mode/ update: index: test - id: 1 + id: "1" body: doc: "@timestamp": "2021-04-28T18:35:24.467Z" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/10_doc.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/10_doc.yml index dda545d56e350..2a8734210432e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/10_doc.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/10_doc.yml @@ -6,7 +6,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: foo: bar count: 1 @@ -15,7 +15,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: foo: baz @@ -29,7 +29,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: baz } - match: { _source.count: 1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/11_shard_header.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/11_shard_header.yml index 5a0dc0485b103..85ba488ac9e98 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/11_shard_header.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/11_shard_header.yml @@ -18,13 +18,13 @@ - do: index: index: foobar - id: 1 + id: "1" body: { foo: bar } - do: update: index: foobar - id: 1 + id: "1" body: doc: foo: baz diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/12_result.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/12_result.yml index 6c69bc2aa993b..fe5371dfeea96 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/12_result.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/12_result.yml @@ -5,7 +5,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: bar } doc_as_upsert: true @@ -16,7 +16,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: bar } doc_as_upsert: true @@ -27,7 +27,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: bar } doc_as_upsert: true @@ -39,7 +39,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: baz } doc_as_upsert: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/13_legacy_doc.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/13_legacy_doc.yml index a97c68ba6ee3f..e4ebf83608b60 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/13_legacy_doc.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/13_legacy_doc.yml @@ -4,7 +4,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: foo: bar count: 1 @@ -13,7 +13,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: foo: baz @@ -27,7 +27,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: baz } - match: { _source.count: 1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/16_noop.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/16_noop.yml index 12f118ac28d01..ae21e68b160f5 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/16_noop.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/16_noop.yml @@ -6,7 +6,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - match: { _seq_no: 0 } @@ -17,7 +17,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: bar } @@ -29,7 +29,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: bar } detect_noop: false diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/20_doc_upsert.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/20_doc_upsert.yml index 39e2273d5cafb..5f649ed497475 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/20_doc_upsert.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/20_doc_upsert.yml @@ -5,7 +5,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: bar, count: 1 } upsert: { foo: baz } @@ -13,7 +13,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: baz } - is_false: _source.count @@ -22,7 +22,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: bar, count: 1 } upsert: { foo: baz } @@ -30,7 +30,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: bar } - match: { _source.count: 1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/22_doc_as_upsert.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/22_doc_as_upsert.yml index 0d695cb754056..9d94f7cf8e93e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/22_doc_as_upsert.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/22_doc_as_upsert.yml @@ -5,7 +5,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: bar, count: 1 } doc_as_upsert: true @@ -13,7 +13,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: bar } - match: { _source.count: 1 } @@ -22,7 +22,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { count: 2 } doc_as_upsert: true @@ -30,7 +30,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: bar } - match: { _source.count: 2 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/35_if_seq_no.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/35_if_seq_no.yml index 318ecd3a7c0ce..12a0c88e5a103 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/35_if_seq_no.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/35_if_seq_no.yml @@ -4,7 +4,7 @@ catch: missing update: index: test_1 - id: 1 + id: "1" if_seq_no: 1 if_primary_term: 1 body: @@ -13,7 +13,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: foo: baz @@ -21,7 +21,7 @@ catch: conflict update: index: test_1 - id: 1 + id: "1" if_seq_no: 234 if_primary_term: 1 body: @@ -30,7 +30,7 @@ - do: update: index: test_1 - id: 1 + id: "1" if_seq_no: 0 if_primary_term: 1 body: @@ -39,7 +39,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source: { foo: bar } } @@ -48,7 +48,7 @@ body: - update: _index: test_1 - _id: 1 + _id: "1" if_seq_no: 100 if_primary_term: 200 - doc: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/40_routing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/40_routing.yml index 8b67ca512f326..403debb08a64a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/40_routing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/40_routing.yml @@ -20,7 +20,7 @@ - do: update: index: test_1 - id: 1 + id: "1" routing: "5" body: doc: { foo: baz } @@ -29,7 +29,7 @@ - do: get: index: test_1 - id: 1 + id: "1" routing: "5" stored_fields: _routing @@ -39,14 +39,14 @@ catch: missing update: index: test_1 - id: 1 + id: "1" body: doc: { foo: baz } - do: update: index: test_1 - id: 1 + id: "1" routing: "5" _source: foo body: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/60_refresh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/60_refresh.yml index 77888fcbb2710..873cdfde05b76 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/60_refresh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/60_refresh.yml @@ -14,7 +14,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: baz } upsert: { foo: bar } @@ -24,14 +24,14 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 1 }} + query: { term: { _id: "1" }} - match: { hits.total: 0 } - do: update: index: test_1 - id: 2 + id: "2" refresh: true body: doc: { foo: baz } @@ -43,7 +43,7 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 2 }} + query: { term: { _id: "2" }} - match: { hits.total: 1 } @@ -55,7 +55,7 @@ - do: index: index: test_1 - id: 1 + id: "1" refresh: true body: { foo: bar } - is_true: forced_refresh @@ -63,7 +63,7 @@ - do: update: index: test_1 - id: 1 + id: "1" refresh: "" body: doc: {cat: dog} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/80_source_filtering.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/80_source_filtering.yml index c8eeba967a6f7..866bf3c6c195a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/80_source_filtering.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/80_source_filtering.yml @@ -5,7 +5,7 @@ - do: update: index: test_1 - id: 1 + id: "1" _source: [foo, bar] body: doc: { foo: baz } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml index fe76ab5299cda..d9a0f65f36170 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml @@ -12,7 +12,7 @@ - do: update: index: test_1 - id: 1 + id: "1" parent: 5 fields: [ _routing ] body: @@ -24,7 +24,7 @@ - do: get: index: test_1 - id: 1 + id: "1" parent: 5 stored_fields: [ _routing ] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/90_error.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/90_error.yml index 9a48d24783b44..4dd48f3462a4f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/90_error.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/90_error.yml @@ -7,7 +7,7 @@ catch: /\[UpdateRequest\] unknown field \[dac\] did you mean \[doc\]\?/ update: index: test - id: 1 + id: "1" body: dac: { foo: baz } upsert: { foo: bar } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/95_require_alias.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/95_require_alias.yml index f6c95ebd2463e..b8cd9c6eb0a38 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/95_require_alias.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/95_require_alias.yml @@ -7,7 +7,7 @@ catch: missing update: index: test_require_alias - id: 1 + id: "1" require_alias: true body: doc: { foo: bar, count: 1 } @@ -28,7 +28,7 @@ - do: update: index: test_require_alias - id: 1 + id: "1" require_alias: true body: doc: { foo: bar, count: 1 } diff --git a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search.sort/10_nested_path_filter.yml b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search.sort/10_nested_path_filter.yml index 536ad86378e69..323a5b9abbf1e 100644 --- a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search.sort/10_nested_path_filter.yml +++ b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search.sort/10_nested_path_filter.yml @@ -18,7 +18,7 @@ setup: - do: index: index: "my-index" - id: 1 + id: "1" refresh: true body: offer: @@ -51,7 +51,7 @@ setup: - do: index: index: "my-locations" - id: 1 + id: "1" refresh: true body: offer: diff --git a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_cutoff_frequency.yml b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_cutoff_frequency.yml index 2d645a9419171..591f60578b819 100644 --- a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_cutoff_frequency.yml +++ b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_cutoff_frequency.yml @@ -24,7 +24,7 @@ setup: - do: index: index: "test" - id: 1 + id: "1" body: my_field1: "brown fox jump" my_field2: "xylophone" diff --git a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_type_query.yml b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_type_query.yml index fa4e20fdfa6fe..fdaebbb2b81e7 100644 --- a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_type_query.yml +++ b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_type_query.yml @@ -9,7 +9,7 @@ type query throws exception when used: - do: index: index: "test1" - id: 1 + id: "1" type: "cat" refresh: true body: diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java index 6603a6ee5aee6..ed71f7845bb61 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java @@ -570,9 +570,7 @@ public IndexMetadata randomChange(IndexMetadata part) { } break; case 2: - builder.settings( - Settings.builder().put(part.getSettings()).put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) - ); + builder.settings(Settings.builder().put(part.getSettings()).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)); break; default: throw new IllegalArgumentException("Shouldn't be here"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/InstanceHasMasterHealthIndicatorServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/InstanceHasMasterHealthIndicatorServiceIT.java new file mode 100644 index 0000000000000..c99b090e958f2 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/InstanceHasMasterHealthIndicatorServiceIT.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cluster.coordination; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.health.GetHealthAction; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.disruption.NetworkDisruption; +import org.elasticsearch.test.transport.MockTransportService; + +import java.util.Collection; +import java.util.List; +import java.util.Set; + +import static org.elasticsearch.cluster.coordination.InstanceHasMasterHealthIndicatorService.NAME; +import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.RED; +import static org.elasticsearch.health.ServerHealthComponents.CLUSTER_COORDINATION; +import static org.hamcrest.Matchers.equalTo; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) +public class InstanceHasMasterHealthIndicatorServiceIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return List.of(MockTransportService.TestPlugin.class); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put(NoMasterBlockService.NO_MASTER_BLOCK_SETTING.getKey(), "all") + .build(); + } + + public void testGetHealthWhenMasterIsElected() throws Exception { + var client = client(); + + var response = client.execute(GetHealthAction.INSTANCE, new GetHealthAction.Request()).get(); + + assertThat(response.findComponent(CLUSTER_COORDINATION).findIndicator(NAME).status(), equalTo(GREEN)); + } + + public void testGetHealthWhenNoMaster() throws Exception { + var client = internalCluster().coordOnlyNodeClient(); + + var disruptionScheme = new NetworkDisruption( + new NetworkDisruption.IsolateAllNodes(Set.of(internalCluster().getNodeNames())), + NetworkDisruption.DISCONNECT + ); + + internalCluster().setDisruptionScheme(disruptionScheme); + disruptionScheme.startDisrupting(); + + try { + assertBusy(() -> { + ClusterState state = client.admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); + assertTrue(state.blocks().hasGlobalBlockWithId(NoMasterBlockService.NO_MASTER_BLOCK_ID)); + + var response = client.execute(GetHealthAction.INSTANCE, new GetHealthAction.Request()).get(); + + assertThat(response.findComponent(CLUSTER_COORDINATION).findIndicator(NAME).status(), equalTo(RED)); + }); + } finally { + internalCluster().clearDisruptionScheme(true); + } + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/health/GetHealthActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/health/GetHealthActionIT.java index a4426ef074350..2eadb96084c5a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/health/GetHealthActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/health/GetHealthActionIT.java @@ -8,93 +8,143 @@ package org.elasticsearch.health; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.coordination.NoMasterBlockService; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.health.components.controller.ClusterCoordination; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.plugins.HealthPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.disruption.NetworkDisruption; -import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xcontent.NamedXContentRegistry; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; +import java.util.List; +import java.util.function.Supplier; -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) +import static org.elasticsearch.common.util.CollectionUtils.appendToCopy; +import static org.hamcrest.Matchers.equalTo; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) public class GetHealthActionIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singletonList(MockTransportService.TestPlugin.class); + return appendToCopy(super.nodePlugins(), TestHealthPlugin.class); } - @Override - protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal, otherSettings)) - .put(NoMasterBlockService.NO_MASTER_BLOCK_SETTING.getKey(), "all") - .build(); - } + public static final Setting TEST_HEALTH_STATUS = new Setting<>( + "test.health.status", + "GREEN", + HealthStatus::valueOf, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); - public void testGetHealth() throws Exception { - GetHealthAction.Response response = client().execute(GetHealthAction.INSTANCE, new GetHealthAction.Request()).get(); - assertEquals(cluster().getClusterName(), response.getClusterName().value()); - assertEquals(HealthStatus.GREEN, response.getStatus()); + public static final class TestHealthPlugin extends Plugin implements HealthPlugin { + + private final SetOnce healthIndicatorService = new SetOnce<>(); - assertEquals(2, response.getComponents().size()); + @Override + public List> getSettings() { + return List.of(TEST_HEALTH_STATUS); + } - for (HealthComponentResult component : response.getComponents()) { - assertEquals(HealthStatus.GREEN, component.status()); + @Override + public Collection createComponents( + Client client, + ClusterService clusterService, + ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + Environment environment, + NodeEnvironment nodeEnvironment, + NamedWriteableRegistry namedWriteableRegistry, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier repositoriesServiceSupplier + ) { + var service = new FixedStatusHealthIndicatorService(clusterService); + healthIndicatorService.set(service); + return List.of(service); } - HealthComponentResult controller = response.getComponents() - .stream() - .filter(c -> c.name().equals("cluster_coordination")) - .findAny() - .orElseThrow(); - assertEquals(1, controller.indicators().size()); - HealthIndicatorResult nodeDoesNotHaveMaster = controller.indicators().get(ClusterCoordination.INSTANCE_HAS_MASTER_NAME); - assertEquals(ClusterCoordination.INSTANCE_HAS_MASTER_NAME, nodeDoesNotHaveMaster.name()); - assertEquals(HealthStatus.GREEN, nodeDoesNotHaveMaster.status()); - assertEquals(ClusterCoordination.INSTANCE_HAS_MASTER_GREEN_SUMMARY, nodeDoesNotHaveMaster.summary()); + @Override + public Collection getHealthIndicatorServices() { + return List.of(healthIndicatorService.get()); + } } - public void testGetHealthInstanceNoMaster() throws Exception { - // builds the coordinating-only client before disrupting all nodes - final Client client = internalCluster().coordOnlyNodeClient(); + /** + * This indicator could be used to pre-define health of the cluster with {@code TEST_HEALTH_STATUS} property + * and return it via health API. + */ + public static final class FixedStatusHealthIndicatorService implements HealthIndicatorService { + + private final ClusterService clusterService; + + public FixedStatusHealthIndicatorService(ClusterService clusterService) { + this.clusterService = clusterService; + } + + @Override + public String name() { + return "test_indicator"; + } + + @Override + public String component() { + return "test_component"; + } + + @Override + public HealthIndicatorResult calculate() { + var status = clusterService.getClusterSettings().get(TEST_HEALTH_STATUS); + return createIndicator(status, "Health is set to [" + status + "] by test plugin", HealthIndicatorDetails.EMPTY); + } + } - final NetworkDisruption disruptionScheme = new NetworkDisruption( - new NetworkDisruption.IsolateAllNodes(new HashSet<>(Arrays.asList(internalCluster().getNodeNames()))), - NetworkDisruption.DISCONNECT - ); + public void testGetHealth() throws Exception { - internalCluster().setDisruptionScheme(disruptionScheme); - disruptionScheme.startDisrupting(); + var client = client(); + var status = randomFrom(HealthStatus.values()); try { - assertBusy(() -> { - ClusterState state = client.admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); - assertTrue(state.blocks().hasGlobalBlockWithId(NoMasterBlockService.NO_MASTER_BLOCK_ID)); - - GetHealthAction.Response response = client.execute(GetHealthAction.INSTANCE, new GetHealthAction.Request()).get(); - assertEquals(HealthStatus.RED, response.getStatus()); - assertEquals(2, response.getComponents().size()); - HealthComponentResult controller = response.getComponents() - .stream() - .filter(c -> c.name().equals("cluster_coordination")) - .findAny() - .orElseThrow(); - assertEquals(1, controller.indicators().size()); - HealthIndicatorResult instanceHasMaster = controller.indicators().get(ClusterCoordination.INSTANCE_HAS_MASTER_NAME); - assertEquals(ClusterCoordination.INSTANCE_HAS_MASTER_NAME, instanceHasMaster.name()); - assertEquals(HealthStatus.RED, instanceHasMaster.status()); - assertEquals(ClusterCoordination.INSTANCE_HAS_MASTER_RED_SUMMARY, instanceHasMaster.summary()); - }); + updateClusterSettings(Settings.builder().put(TEST_HEALTH_STATUS.getKey(), status)); + + var response = client.execute(GetHealthAction.INSTANCE, new GetHealthAction.Request()).get(); + + assertThat(response.getStatus(), equalTo(status)); + assertThat(response.getClusterName(), equalTo(new ClusterName(cluster().getClusterName()))); + assertThat( + response.findComponent("test_component"), + equalTo( + new HealthComponentResult( + "test_component", + status, + List.of( + new HealthIndicatorResult( + "test_indicator", + "test_component", + status, + "Health is set to [" + status + "] by test plugin", + HealthIndicatorDetails.EMPTY + ) + ) + ) + ) + ); } finally { - internalCluster().clearDisruptionScheme(true); + updateClusterSettings(Settings.builder().putNull(TEST_HEALTH_STATUS.getKey())); } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/SystemIndexManagerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/SystemIndexManagerIT.java index a9b3058770142..3f5b42826e1ba 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/SystemIndexManagerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/SystemIndexManagerIT.java @@ -52,7 +52,7 @@ protected Collection> nodePlugins() { } /** - * Check that if the the SystemIndexManager finds a managed index with out-of-date mappings, then + * Check that if the SystemIndexManager finds a managed index with out-of-date mappings, then * the manager updates those mappings. */ public void testSystemIndexManagerUpgradesMappings() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java index 9a800c2656c45..465c394403bef 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; @@ -28,6 +29,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.rest.RestStatus; @@ -36,13 +39,16 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; +import org.elasticsearch.search.aggregations.timeseries.TimeSeriesAggregationBuilder; import org.elasticsearch.search.lookup.LeafStoredFieldsLookup; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.transport.TransportService; +import org.junit.BeforeClass; +import java.time.Instant; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -55,9 +61,12 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; +import static org.elasticsearch.index.IndexSettings.TIME_SERIES_END_TIME; +import static org.elasticsearch.index.IndexSettings.TIME_SERIES_START_TIME; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.scriptQuery; import static org.elasticsearch.search.SearchCancellationIT.ScriptedBlockPlugin.SEARCH_BLOCK_SCRIPT_NAME; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.containsString; @@ -69,6 +78,13 @@ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) public class SearchCancellationIT extends ESIntegTestCase { + private static boolean lowLevelCancellation; + + @BeforeClass + public static void init() { + lowLevelCancellation = randomBoolean(); + } + @Override protected Collection> nodePlugins() { return Collections.singleton(ScriptedBlockPlugin.class); @@ -76,7 +92,6 @@ protected Collection> nodePlugins() { @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { - boolean lowLevelCancellation = randomBoolean(); logger.info("Using lowLevelCancellation: {}", lowLevelCancellation); return Settings.builder() .put(super.nodeSettings(nodeOrdinal, otherSettings)) @@ -227,7 +242,12 @@ public void testCancellationDuringAggregation() throws Exception { new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.COMBINE_SCRIPT_NAME, Collections.emptyMap()) ) .reduceScript( - new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.REDUCE_SCRIPT_NAME, Collections.emptyMap()) + new Script( + ScriptType.INLINE, + "mockscript", + ScriptedBlockPlugin.REDUCE_BLOCK_SCRIPT_NAME, + Collections.emptyMap() + ) ) ) ) @@ -238,6 +258,80 @@ public void testCancellationDuringAggregation() throws Exception { ensureSearchWasCancelled(searchResponse); } + public void testCancellationDuringTimeSeriesAggregation() throws Exception { + List plugins = initBlockFactory(); + int numberOfShards = between(2, 5); + long now = Instant.now().toEpochMilli(); + int numberOfRefreshes = between(1, 5); + int numberOfDocsPerRefresh = numberOfShards * between(1500, 2000) / numberOfRefreshes; + assertAcked( + prepareCreate("test").setSettings( + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numberOfShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES.name()) + .put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "dim") + .put(TIME_SERIES_START_TIME.getKey(), now) + .put(TIME_SERIES_END_TIME.getKey(), now + (long) numberOfRefreshes * numberOfDocsPerRefresh + 1) + .build() + ).setMapping(""" + { + "properties": { + "@timestamp": {"type": "date", "format": "epoch_millis"}, + "dim": {"type": "keyword", "time_series_dimension": true} + } + } + """) + ); + + for (int i = 0; i < numberOfRefreshes; i++) { + // Make sure we sometimes have a few segments + BulkRequestBuilder bulkRequestBuilder = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + for (int j = 0; j < numberOfDocsPerRefresh; j++) { + bulkRequestBuilder.add( + client().prepareIndex("test") + .setOpType(DocWriteRequest.OpType.CREATE) + .setSource("@timestamp", now + (long) i * numberOfDocsPerRefresh + j, "val", (double) j, "dim", String.valueOf(i)) + ); + } + assertNoFailures(bulkRequestBuilder.get()); + } + + logger.info("Executing search"); + TimeSeriesAggregationBuilder timeSeriesAggregationBuilder = new TimeSeriesAggregationBuilder("test_agg"); + ActionFuture searchResponse = client().prepareSearch("test") + .setQuery(matchAllQuery()) + .addAggregation( + timeSeriesAggregationBuilder.subAggregation( + new ScriptedMetricAggregationBuilder("sub_agg").initScript( + new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.INIT_SCRIPT_NAME, Collections.emptyMap()) + ) + .mapScript( + new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.MAP_BLOCK_SCRIPT_NAME, Collections.emptyMap()) + ) + .combineScript( + new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.COMBINE_SCRIPT_NAME, Collections.emptyMap()) + ) + .reduceScript( + new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.REDUCE_FAIL_SCRIPT_NAME, Collections.emptyMap()) + ) + ) + ) + .execute(); + awaitForBlock(plugins); + cancelSearch(SearchAction.NAME); + disableBlocks(plugins); + + SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, searchResponse::actionGet); + assertThat(ExceptionsHelper.status(ex), equalTo(RestStatus.BAD_REQUEST)); + logger.info("All shards failed with", ex); + if (lowLevelCancellation) { + // Ensure that we cancelled in TimeSeriesIndexSearcher and not in reduce phase + assertThat(ExceptionsHelper.stackTrace(ex), containsString("TimeSeriesIndexSearcher")); + } + + } + public void testCancellationOfScrollSearches() throws Exception { List plugins = initBlockFactory(); @@ -414,8 +508,11 @@ public static class ScriptedBlockPlugin extends MockScriptPlugin { static final String SEARCH_BLOCK_SCRIPT_NAME = "search_block"; static final String INIT_SCRIPT_NAME = "init"; static final String MAP_SCRIPT_NAME = "map"; + static final String MAP_BLOCK_SCRIPT_NAME = "map_block"; static final String COMBINE_SCRIPT_NAME = "combine"; static final String REDUCE_SCRIPT_NAME = "reduce"; + static final String REDUCE_FAIL_SCRIPT_NAME = "reduce_fail"; + static final String REDUCE_BLOCK_SCRIPT_NAME = "reduce_block"; static final String TERM_SCRIPT_NAME = "term"; private final AtomicInteger hits = new AtomicInteger(); @@ -449,10 +546,16 @@ public Map, Object>> pluginScripts() { this::nullScript, MAP_SCRIPT_NAME, this::nullScript, + MAP_BLOCK_SCRIPT_NAME, + this::mapBlockScript, COMBINE_SCRIPT_NAME, this::nullScript, - REDUCE_SCRIPT_NAME, + REDUCE_BLOCK_SCRIPT_NAME, this::blockScript, + REDUCE_SCRIPT_NAME, + this::termScript, + REDUCE_FAIL_SCRIPT_NAME, + this::reduceFailScript, TERM_SCRIPT_NAME, this::termScript ); @@ -474,6 +577,11 @@ private Object searchBlockScript(Map params) { return true; } + private Object reduceFailScript(Map params) { + fail("Shouldn't reach reduce"); + return true; + } + private Object nullScript(Map params) { return null; } @@ -483,7 +591,9 @@ private Object blockScript(Map params) { if (runnable != null) { runnable.run(); } - LogManager.getLogger(SearchCancellationIT.class).info("Blocking in reduce"); + if (shouldBlock.get()) { + LogManager.getLogger(SearchCancellationIT.class).info("Blocking in reduce"); + } hits.incrementAndGet(); try { assertBusy(() -> assertFalse(shouldBlock.get())); @@ -493,6 +603,23 @@ private Object blockScript(Map params) { return 42; } + private Object mapBlockScript(Map params) { + final Runnable runnable = beforeExecution.get(); + if (runnable != null) { + runnable.run(); + } + if (shouldBlock.get()) { + LogManager.getLogger(SearchCancellationIT.class).info("Blocking in map"); + } + hits.incrementAndGet(); + try { + assertBusy(() -> assertFalse(shouldBlock.get())); + } catch (Exception e) { + throw new RuntimeException(e); + } + return 1; + } + private Object termScript(Map params) { return 1; } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTestIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTestIT.java new file mode 100644 index 0000000000000..ae2fc82c75273 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTestIT.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.snapshots; + +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.health.GetHealthAction; +import org.elasticsearch.health.HealthStatus; +import org.elasticsearch.repositories.RepositoryData; +import org.elasticsearch.repositories.RepositoryException; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.RED; +import static org.elasticsearch.health.ServerHealthComponents.SNAPSHOT; +import static org.elasticsearch.snapshots.RepositoryIntegrityHealthIndicatorService.NAME; +import static org.elasticsearch.test.hamcrest.ThrowableAssertions.assertThatThrows; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class RepositoryIntegrityHealthIndicatorServiceTestIT extends AbstractSnapshotIntegTestCase { + + public void testRepositoryIntegrityHealthIndicator() throws IOException, InterruptedException { + + var client = client(); + + var repository = "test-repo"; + var location = randomRepoPath(); + + createRepository( + repository, + "fs", + Settings.builder() + .put("location", location) + .put("compress", false) + // Don't cache repository data because the test manually modifies the repository data + .put(BlobStoreRepository.CACHE_REPOSITORY_DATA.getKey(), false) + ); + + assertSnapshotRepositoryHealth("Indicator should be green after empty repository is created", client, GREEN); + + createIndex("test-index-1"); + indexRandomDocs("test-index-1", randomIntBetween(1, 10)); + createFullSnapshot(repository, "snapshot-1"); + + assertSnapshotRepositoryHealth("Indicator should be green after successful snapshot is taken", client, GREEN); + + corruptRepository(repository, location); + // Currently, the health indicator is not proactively checking the repository and + // instead relies on other operations to detect and flag repository corruption + assertThatThrows( + () -> createFullSnapshot(repository, "snapshot-2"), + RepositoryException.class, + containsString("[" + repository + "] Could not read repository data") + ); + + assertSnapshotRepositoryHealth("Indicator should be red after file is deleted from the repository", client, RED); + + deleteRepository(repository); + } + + private void assertSnapshotRepositoryHealth(String message, Client client, HealthStatus status) { + var response = client.execute(GetHealthAction.INSTANCE, new GetHealthAction.Request()).actionGet(); + assertThat(message, response.findComponent(SNAPSHOT).findIndicator(NAME).status(), equalTo(status)); + } + + private void corruptRepository(String name, Path location) throws IOException { + final RepositoryData repositoryData = getRepositoryData(name); + Files.delete(location.resolve("index-" + repositoryData.getGenId())); + } +} diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 078c10edba060..b8032448cb0e0 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -100,6 +100,7 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_7_17_0 = new Version(7_17_00_99, LUCENE_8_11_1); public static final Version V_7_17_1 = new Version(7_17_01_99, LUCENE_8_11_1); public static final Version V_8_0_0 = new Version(8_00_00_99, org.apache.lucene.util.Version.LUCENE_9_0_0); + public static final Version V_8_0_1 = new Version(8_00_01_99, org.apache.lucene.util.Version.LUCENE_9_0_0); public static final Version V_8_1_0 = new Version(8_01_00_99, org.apache.lucene.util.Version.LUCENE_9_0_0); public static final Version V_8_2_0 = new Version(8_02_00_99, org.apache.lucene.util.Version.LUCENE_9_0_0); public static final Version CURRENT = V_8_2_0; diff --git a/server/src/main/java/org/elasticsearch/action/ResultDeduplicator.java b/server/src/main/java/org/elasticsearch/action/ResultDeduplicator.java index 8f3e7ee60b242..2a9887d8a30d7 100644 --- a/server/src/main/java/org/elasticsearch/action/ResultDeduplicator.java +++ b/server/src/main/java/org/elasticsearch/action/ResultDeduplicator.java @@ -8,7 +8,9 @@ package org.elasticsearch.action; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.ThreadContext; import java.util.ArrayList; import java.util.List; @@ -22,8 +24,14 @@ */ public final class ResultDeduplicator { + private final ThreadContext threadContext; private final ConcurrentMap requests = ConcurrentCollections.newConcurrentMap(); + public ResultDeduplicator(ThreadContext threadContext) { + assert threadContext != null; + this.threadContext = threadContext; + } + /** * Ensures a given request not executed multiple times when another equal request is already in-flight. * If the request is not yet known to the deduplicator it will invoke the passed callback with an {@link ActionListener} @@ -35,7 +43,8 @@ public final class ResultDeduplicator { * @param callback Callback to be invoked with request and completion listener the first time the request is added to the deduplicator */ public void executeOnce(T request, ActionListener listener, BiConsumer> callback) { - ActionListener completionListener = requests.computeIfAbsent(request, CompositeListener::new).addListener(listener); + ActionListener completionListener = requests.computeIfAbsent(request, CompositeListener::new) + .addListener(ContextPreservingActionListener.wrapPreservingContext(listener, threadContext)); if (completionListener != null) { callback.accept(request, completionListener); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/DesiredNodesClusterStateTaskExecutor.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/DesiredNodesClusterStateTaskExecutor.java index 1f09386f535ac..d3be7bad6eb82 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/DesiredNodesClusterStateTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/DesiredNodesClusterStateTaskExecutor.java @@ -24,7 +24,7 @@ public ClusterTasksResult execute(ClusterState currentSt for (ClusterStateUpdateTask task : tasks) { try { clusterState = task.execute(clusterState); - builder.success(task); + builder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); } catch (Exception e) { builder.failure(task, e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java index 5e2034b6905c9..24cde795cf1ec 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java @@ -112,7 +112,7 @@ private void waitForEventsAndExecuteHealth( if (request.local()) { new LocalMasterServiceTask(request.waitForEvents()) { @Override - public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + protected void onPublicationComplete() { final long timeoutInMillis = Math.max(0, endTimeRelativeMillis - threadPool.relativeTimeInMillis()); final TimeValue newTimeout = TimeValue.timeValueMillis(timeoutInMillis); request.timeout(newTimeout); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportPostFeatureUpgradeAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportPostFeatureUpgradeAction.java index 0b3c93c3061c7..6016369637f6e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportPostFeatureUpgradeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportPostFeatureUpgradeAction.java @@ -28,7 +28,9 @@ import org.elasticsearch.upgrades.SystemIndexMigrationTaskParams; import java.util.Comparator; +import java.util.EnumSet; import java.util.List; +import java.util.Set; import java.util.stream.Collectors; import static org.elasticsearch.action.admin.cluster.migration.TransportGetFeatureUpgradeStatusAction.getFeatureUpgradeStatus; @@ -75,11 +77,15 @@ protected void masterOperation( ClusterState state, ActionListener listener ) throws Exception { + final Set upgradableStatuses = EnumSet.of( + GetFeatureUpgradeStatusResponse.UpgradeStatus.MIGRATION_NEEDED, + GetFeatureUpgradeStatusResponse.UpgradeStatus.ERROR + ); List featuresToMigrate = systemIndices.getFeatures() .values() .stream() .map(feature -> getFeatureUpgradeStatus(state, feature)) - .filter(status -> status.getUpgradeStatus().equals(GetFeatureUpgradeStatusResponse.UpgradeStatus.MIGRATION_NEEDED)) + .filter(status -> upgradableStatuses.contains(status.getUpgradeStatus())) .map(GetFeatureUpgradeStatusResponse.FeatureUpgradeStatus::getFeatureName) .map(PostFeatureUpgradeResponse.Feature::new) .sorted(Comparator.comparing(PostFeatureUpgradeResponse.Feature::getFeatureName)) // consistent ordering to simplify testing diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreClusterStateListener.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreClusterStateListener.java index 2f3e92d2f55a9..c2931714e72a7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreClusterStateListener.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreClusterStateListener.java @@ -16,10 +16,13 @@ import org.elasticsearch.cluster.RestoreInProgress; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.snapshots.RestoreInfo; import org.elasticsearch.snapshots.RestoreService; +import java.util.function.Supplier; + import static org.elasticsearch.snapshots.RestoreService.restoreInProgress; public class RestoreClusterStateListener implements ClusterStateListener { @@ -29,43 +32,48 @@ public class RestoreClusterStateListener implements ClusterStateListener { private final ClusterService clusterService; private final String uuid; private final ActionListener listener; + private final Supplier contextSupplier; private RestoreClusterStateListener( ClusterService clusterService, RestoreService.RestoreCompletionResponse response, - ActionListener listener + ActionListener listener, + Supplier contextSupplier ) { this.clusterService = clusterService; this.uuid = response.getUuid(); this.listener = listener; + this.contextSupplier = contextSupplier; } @Override public void clusterChanged(ClusterChangedEvent changedEvent) { - final RestoreInProgress.Entry prevEntry = restoreInProgress(changedEvent.previousState(), uuid); - final RestoreInProgress.Entry newEntry = restoreInProgress(changedEvent.state(), uuid); - if (prevEntry == null) { - // When there is a master failure after a restore has been started, this listener might not be registered - // on the current master and as such it might miss some intermediary cluster states due to batching. - // Clean up listener in that case and acknowledge completion of restore operation to client. - clusterService.removeListener(this); - listener.onResponse(new RestoreSnapshotResponse((RestoreInfo) null)); - } else if (newEntry == null) { - clusterService.removeListener(this); - ImmutableOpenMap shards = prevEntry.shards(); - assert prevEntry.state().completed() : "expected completed snapshot state but was " + prevEntry.state(); - assert RestoreService.completed(shards) : "expected all restore entries to be completed"; - RestoreInfo ri = new RestoreInfo( - prevEntry.snapshot().getSnapshotId().getName(), - prevEntry.indices(), - shards.size(), - shards.size() - RestoreService.failedShards(shards) - ); - RestoreSnapshotResponse response = new RestoreSnapshotResponse(ri); - logger.debug("restore of [{}] completed", prevEntry.snapshot().getSnapshotId()); - listener.onResponse(response); - } else { - // restore not completed yet, wait for next cluster state update + try (ThreadContext.StoredContext stored = contextSupplier.get()) { + final RestoreInProgress.Entry prevEntry = restoreInProgress(changedEvent.previousState(), uuid); + final RestoreInProgress.Entry newEntry = restoreInProgress(changedEvent.state(), uuid); + if (prevEntry == null) { + // When there is a master failure after a restore has been started, this listener might not be registered + // on the current master and as such it might miss some intermediary cluster states due to batching. + // Clean up listener in that case and acknowledge completion of restore operation to client. + clusterService.removeListener(this); + listener.onResponse(new RestoreSnapshotResponse((RestoreInfo) null)); + } else if (newEntry == null) { + clusterService.removeListener(this); + ImmutableOpenMap shards = prevEntry.shards(); + assert prevEntry.state().completed() : "expected completed snapshot state but was " + prevEntry.state(); + assert RestoreService.completed(shards) : "expected all restore entries to be completed"; + RestoreInfo ri = new RestoreInfo( + prevEntry.snapshot().getSnapshotId().getName(), + prevEntry.indices(), + shards.size(), + shards.size() - RestoreService.failedShards(shards) + ); + RestoreSnapshotResponse response = new RestoreSnapshotResponse(ri); + logger.debug("restore of [{}] completed", prevEntry.snapshot().getSnapshotId()); + listener.onResponse(response); + } else { + // restore not completed yet, wait for next cluster state update + } } } @@ -76,8 +84,11 @@ public void clusterChanged(ClusterChangedEvent changedEvent) { public static void createAndRegisterListener( ClusterService clusterService, RestoreService.RestoreCompletionResponse response, - ActionListener listener + ActionListener listener, + ThreadContext threadContext ) { - clusterService.addListener(new RestoreClusterStateListener(clusterService, response, listener)); + clusterService.addListener( + new RestoreClusterStateListener(clusterService, response, listener, threadContext.newRestorableContext(true)) + ); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java index 7b247f1b14a42..73b66fa5d1bb5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java @@ -72,7 +72,12 @@ protected void masterOperation( ) { restoreService.restoreSnapshot(request, listener.delegateFailure((delegatedListener, restoreCompletionResponse) -> { if (restoreCompletionResponse.getRestoreInfo() == null && request.waitForCompletion()) { - RestoreClusterStateListener.createAndRegisterListener(clusterService, restoreCompletionResponse, delegatedListener); + RestoreClusterStateListener.createAndRegisterListener( + clusterService, + restoreCompletionResponse, + delegatedListener, + threadPool.getThreadContext() + ); } else { delegatedListener.onResponse(new RestoreSnapshotResponse(restoreCompletionResponse.getRestoreInfo())); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java index adabbd19f6fc1..5cf491d42a6de 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java @@ -120,7 +120,7 @@ public TransportAction( // each duplicate task task.indexNameRef.set(successfulBefore.indexNameRef.get()); } - builder.success(task); + builder.success(task, new ClusterStateTaskExecutor.LegacyClusterTaskResultActionListener(task, currentState)); } catch (Exception e) { builder.failure(task, e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java index 4ed98350e3070..95ab9d9e12f41 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java @@ -92,7 +92,7 @@ protected void masterOperation( .indices(concreteIndices) .waitForActiveShards(request.waitForActiveShards()); - indexStateService.openIndex(updateRequest, new ActionListener<>() { + indexStateService.openIndices(updateRequest, new ActionListener<>() { @Override public void onResponse(ShardsAcknowledgedResponse response) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java index a0b0f7341ce10..cdea744b6fec1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java @@ -278,7 +278,9 @@ private RolloverResult rolloverDataStream( currentState, createIndexClusterStateRequest, silent, - (builder, indexMetadata) -> builder.put(ds.rollover(indexMetadata.getIndex(), newGeneration)) + (builder, indexMetadata) -> builder.put( + ds.rollover(indexMetadata.getIndex(), newGeneration, templateV2.getDataStreamTemplate().getIndexMode()) + ) ); RolloverInfo rolloverInfo = new RolloverInfo(dataStreamName, metConditions, threadPool.absoluteTimeInMillis()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java index 8a57f56f6777d..f46ae3bd62d45 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -380,7 +380,7 @@ public ClusterTasksResult execute(ClusterState currentState, List< for (RolloverTask task : tasks) { try { state = task.performRollover(state); - builder.success(task); + builder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); } catch (Exception e) { builder.failure(task, e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java index 907c1c4b1c9a3..64b1d9aa01ecb 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java @@ -127,7 +127,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws for (Map.Entry componentTemplate : this.componentTemplates.entrySet()) { builder.startObject(); builder.field(NAME.getPreferredName(), componentTemplate.getKey()); - builder.field(COMPONENT_TEMPLATE.getPreferredName(), componentTemplate.getValue()); + builder.field(COMPONENT_TEMPLATE.getPreferredName(), componentTemplate.getValue(), params); builder.endObject(); } builder.endArray(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java index 2376c77084311..e6574541de6a5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java @@ -137,7 +137,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws for (Map.Entry indexTemplate : this.indexTemplates.entrySet()) { builder.startObject(); builder.field(NAME.getPreferredName(), indexTemplate.getKey()); - builder.field(INDEX_TEMPLATE.getPreferredName(), indexTemplate.getValue()); + builder.field(INDEX_TEMPLATE.getPreferredName(), indexTemplate.getValue(), params); builder.endObject(); } builder.endArray(); diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java index 3ee3f88aad75e..98ecb98791194 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java @@ -25,6 +25,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; @@ -476,22 +477,19 @@ static class Builder { private final String name; private final String type; private boolean isMetadataField; - private boolean isSearchable; - private boolean isAggregatable; - private boolean isDimension; + private int searchableIndices = 0; + private int aggregatableIndices = 0; + private int dimensionIndices = 0; private TimeSeriesParams.MetricType metricType; - private boolean metricTypeIsSet; - private List indiceList; - private Map> meta; + private boolean hasConflictMetricType; + private final List indiceList; + private final Map> meta; Builder(String name, String type) { this.name = name; this.type = type; - this.isSearchable = true; - this.isAggregatable = true; - this.isDimension = true; this.metricType = null; - this.metricTypeIsSet = false; + this.hasConflictMetricType = false; this.indiceList = new ArrayList<>(); this.meta = new HashMap<>(); } @@ -508,81 +506,101 @@ void add( TimeSeriesParams.MetricType metricType, Map meta ) { - IndexCaps indexCaps = new IndexCaps(index, search, agg, isDimension, metricType); - indiceList.add(indexCaps); - this.isSearchable &= search; - this.isAggregatable &= agg; + assert indiceList.isEmpty() || indiceList.get(indiceList.size() - 1).name.compareTo(index) < 0 + : "indices aren't sorted; previous [" + indiceList.get(indiceList.size() - 1).name + "], current [" + index + "]"; + if (search) { + searchableIndices++; + } + if (agg) { + aggregatableIndices++; + } + if (isDimension) { + dimensionIndices++; + } this.isMetadataField |= isMetadataField; - this.isDimension &= isDimension; // If we have discrepancy in metric types or in some indices this field is not marked as a metric field - we will // treat is a non-metric field and report this discrepancy in metricConflictsIndices - if (this.metricTypeIsSet) { - if (this.metricType != metricType) { - this.metricType = null; - } - } else { - this.metricTypeIsSet = true; + if (indiceList.isEmpty()) { this.metricType = metricType; + } else if (this.metricType != metricType) { + hasConflictMetricType = true; + this.metricType = null; } + IndexCaps indexCaps = new IndexCaps(index, search, agg, isDimension, metricType); + indiceList.add(indexCaps); for (Map.Entry entry : meta.entrySet()) { this.meta.computeIfAbsent(entry.getKey(), key -> new HashSet<>()).add(entry.getValue()); } } - List getIndices() { - return indiceList.stream().map(c -> c.name).collect(Collectors.toList()); + void getIndices(Collection indices) { + indiceList.forEach(cap -> indices.add(cap.name)); } FieldCapabilities build(boolean withIndices) { final String[] indices; - Collections.sort(indiceList, Comparator.comparing(o -> o.name)); if (withIndices) { indices = indiceList.stream().map(caps -> caps.name).toArray(String[]::new); } else { indices = null; } + // Iff this field is searchable in some indices AND non-searchable in others + // we record the list of non-searchable indices + final boolean isSearchable = searchableIndices == indiceList.size(); final String[] nonSearchableIndices; - if (isSearchable == false && indiceList.stream().anyMatch((caps) -> caps.isSearchable)) { - // Iff this field is searchable in some indices AND non-searchable in others - // we record the list of non-searchable indices - nonSearchableIndices = indiceList.stream() - .filter((caps) -> caps.isSearchable == false) - .map(caps -> caps.name) - .toArray(String[]::new); - } else { + if (isSearchable || searchableIndices == 0) { nonSearchableIndices = null; + } else { + nonSearchableIndices = new String[indiceList.size() - searchableIndices]; + int index = 0; + for (IndexCaps indexCaps : indiceList) { + if (indexCaps.isSearchable == false) { + nonSearchableIndices[index++] = indexCaps.name; + } + } } + // Iff this field is aggregatable in some indices AND non-aggregatable in others + // we keep the list of non-aggregatable indices + final boolean isAggregatable = aggregatableIndices == indiceList.size(); final String[] nonAggregatableIndices; - if (isAggregatable == false && indiceList.stream().anyMatch((caps) -> caps.isAggregatable)) { - // Iff this field is aggregatable in some indices AND non-searchable in others - // we keep the list of non-aggregatable indices - nonAggregatableIndices = indiceList.stream() - .filter((caps) -> caps.isAggregatable == false) - .map(caps -> caps.name) - .toArray(String[]::new); - } else { + if (isAggregatable || aggregatableIndices == 0) { nonAggregatableIndices = null; + } else { + nonAggregatableIndices = new String[indiceList.size() - aggregatableIndices]; + int index = 0; + for (IndexCaps indexCaps : indiceList) { + if (indexCaps.isAggregatable == false) { + nonAggregatableIndices[index++] = indexCaps.name; + } + } } + // Collect all indices that have dimension == false if this field is marked as a dimension in at least one index + final boolean isDimension = dimensionIndices == indiceList.size(); final String[] nonDimensionIndices; - if (isDimension == false && indiceList.stream().anyMatch((caps) -> caps.isDimension)) { - // Collect all indices that have dimension == false if this field is marked as a dimension in at least one index - nonDimensionIndices = indiceList.stream() - .filter((caps) -> caps.isDimension == false) - .map(caps -> caps.name) - .toArray(String[]::new); - } else { + if (isDimension || dimensionIndices == 0) { nonDimensionIndices = null; + } else { + nonDimensionIndices = new String[indiceList.size() - dimensionIndices]; + int index = 0; + for (IndexCaps indexCaps : indiceList) { + if (indexCaps.isDimension == false) { + nonDimensionIndices[index++] = indexCaps.name; + } + } } final String[] metricConflictsIndices; - if (indiceList.stream().anyMatch((caps) -> caps.metricType != metricType)) { + if (hasConflictMetricType) { // Collect all indices that have this field. If it is marked differently in different indices, we cannot really // make a decisions which index is "right" and which index is "wrong" so collecting all indices where this field // is present is probably the only sensible thing to do here - metricConflictsIndices = indiceList.stream().map(caps -> caps.name).toArray(String[]::new); + metricConflictsIndices = Objects.requireNonNullElseGet( + indices, + () -> indiceList.stream().map(caps -> caps.name).toArray(String[]::new) + ); } else { metricConflictsIndices = null; } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java index 7eac2f54c1137..2926b277ba52e 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.fieldcaps; +import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.mapper.MappedFieldType; @@ -37,6 +38,7 @@ */ class FieldCapabilitiesFetcher { private final IndicesService indicesService; + private final Map> indexMappingHashToResponses = new HashMap<>(); FieldCapabilitiesFetcher(IndicesService indicesService) { this.indicesService = indicesService; @@ -45,6 +47,8 @@ class FieldCapabilitiesFetcher { FieldCapabilitiesIndexResponse fetch( ShardId shardId, String[] fieldPatterns, + String[] filters, + String[] fieldTypes, QueryBuilder indexFilter, long nowInMillis, Map runtimeFields @@ -63,70 +67,112 @@ FieldCapabilitiesIndexResponse fetch( ); if (canMatchShard(shardId, indexFilter, nowInMillis, searchExecutionContext) == false) { - return new FieldCapabilitiesIndexResponse(shardId.getIndexName(), Collections.emptyMap(), false); + return new FieldCapabilitiesIndexResponse(shardId.getIndexName(), null, Collections.emptyMap(), false); } - Set fieldNames = new HashSet<>(); - for (String pattern : fieldPatterns) { - fieldNames.addAll(searchExecutionContext.getMatchingFieldNames(pattern)); + final MappingMetadata mapping = indexService.getMetadata().mapping(); + final String indexMappingHash = mapping != null ? mapping.getSha256() : null; + if (indexMappingHash != null) { + final Map existing = indexMappingHashToResponses.get(indexMappingHash); + if (existing != null) { + return new FieldCapabilitiesIndexResponse(shardId.getIndexName(), indexMappingHash, existing, true); + } } Predicate fieldPredicate = indicesService.getFieldFilter().apply(shardId.getIndexName()); - Map responseMap = new HashMap<>(); - for (String field : fieldNames) { - MappedFieldType ft = searchExecutionContext.getFieldType(field); - boolean isMetadataField = searchExecutionContext.isMetadataField(field); - if (isMetadataField || fieldPredicate.test(ft.name())) { - IndexFieldCapabilities fieldCap = new IndexFieldCapabilities( - field, - ft.familyTypeName(), - isMetadataField, - ft.isSearchable(), - ft.isAggregatable(), - ft.isDimension(), - ft.getMetricType(), - ft.meta() - ); - responseMap.put(field, fieldCap); - } else { - continue; - } + final Map responseMap = retrieveFieldCaps( + searchExecutionContext, + fieldPatterns, + filters, + fieldTypes, + fieldPredicate + ); + if (indexMappingHash != null) { + indexMappingHashToResponses.put(indexMappingHash, responseMap); + } + return new FieldCapabilitiesIndexResponse(shardId.getIndexName(), indexMappingHash, responseMap, true); + } + } + + static Map retrieveFieldCaps( + SearchExecutionContext context, + String[] fieldPatterns, + String[] filters, + String[] types, + Predicate indexFieldfilter + ) { + + Set fieldNames = new HashSet<>(); + for (String pattern : fieldPatterns) { + fieldNames.addAll(context.getMatchingFieldNames(pattern)); + } + + boolean includeParentObjects = checkIncludeParents(filters); - // Check the ancestor of the field to find nested and object fields. - // Runtime fields are excluded since they can override any path. - // TODO find a way to do this that does not require an instanceof check - if (ft instanceof RuntimeField == false) { - int dotIndex = ft.name().lastIndexOf('.'); - while (dotIndex > -1) { - String parentField = ft.name().substring(0, dotIndex); - if (responseMap.containsKey(parentField)) { - // we added this path on another field already - break; - } - // checks if the parent field contains sub-fields - if (searchExecutionContext.getFieldType(parentField) == null) { - // no field type, it must be an object field - String type = searchExecutionContext.nestedLookup().getNestedMappers().get(parentField) != null - ? "nested" - : "object"; - IndexFieldCapabilities fieldCap = new IndexFieldCapabilities( - parentField, - type, - false, - false, - false, - false, - null, - Collections.emptyMap() - ); - responseMap.put(parentField, fieldCap); - } - dotIndex = parentField.lastIndexOf('.'); + FieldCapsFilter filter = buildFilter(indexFieldfilter, filters, types); + Map responseMap = new HashMap<>(); + for (String field : fieldNames) { + MappedFieldType ft = context.getFieldType(field); + if (filter.matches(ft, context)) { + IndexFieldCapabilities fieldCap = new IndexFieldCapabilities( + field, + ft.familyTypeName(), + context.isMetadataField(field), + ft.isSearchable(), + ft.isAggregatable(), + ft.isDimension(), + ft.getMetricType(), + ft.meta() + ); + responseMap.put(field, fieldCap); + } else { + continue; + } + + // Check the ancestor of the field to find nested and object fields. + // Runtime fields are excluded since they can override any path. + // TODO find a way to do this that does not require an instanceof check + if (ft instanceof RuntimeField == false && includeParentObjects) { + int dotIndex = ft.name().lastIndexOf('.'); + while (dotIndex > -1) { + String parentField = ft.name().substring(0, dotIndex); + if (responseMap.containsKey(parentField)) { + // we added this path on another field already + break; + } + // checks if the parent field contains sub-fields + if (context.getFieldType(parentField) == null) { + // no field type, it must be an object field + String type = context.nestedLookup().getNestedMappers().get(parentField) != null ? "nested" : "object"; + IndexFieldCapabilities fieldCap = new IndexFieldCapabilities( + parentField, + type, + false, + false, + false, + false, + null, + Collections.emptyMap() + ); + responseMap.put(parentField, fieldCap); } + dotIndex = parentField.lastIndexOf('.'); } } - return new FieldCapabilitiesIndexResponse(shardId.getIndexName(), responseMap, true); } + return responseMap; + } + + private static boolean checkIncludeParents(String[] filters) { + for (String filter : filters) { + if ("-parent".equals(filter)) { + return false; + } + if ("parent".equals(filter)) { + return true; + } + } + return true; } private boolean canMatchShard( @@ -144,4 +190,35 @@ private boolean canMatchShard( return SearchService.queryStillMatchesAfterRewrite(searchRequest, searchExecutionContext); } + private interface FieldCapsFilter { + boolean matches(MappedFieldType fieldType, SearchExecutionContext context); + + default FieldCapsFilter and(FieldCapsFilter other) { + return (ft, context) -> matches(ft, context) && other.matches(ft, context); + } + } + + private static FieldCapsFilter buildFilter(Predicate fieldFilter, String[] filters, String[] fieldTypes) { + // security filters don't exclude metadata fields + FieldCapsFilter fcf = (ft, c) -> fieldFilter.test(ft.name()) || c.isMetadataField(ft.name()); + if (fieldTypes.length > 0) { + Set acceptedTypes = Set.of(fieldTypes); + fcf = fcf.and((ft, c) -> acceptedTypes.contains(ft.familyTypeName())); + } + for (String filter : filters) { + if ("parent".equals(filter) || "-parent".equals(filter)) { + continue; + } + FieldCapsFilter next = switch (filter) { + case "+metadata" -> (ft, c) -> c.isMetadataField(ft.name()); + case "-metadata" -> (ft, c) -> c.isMetadataField(ft.name()) == false; + case "-nested" -> (ft, c) -> c.nestedLookup().getNestedParent(ft.name()) == null; + case "-multifield" -> (ft, c) -> c.isMultiField(ft.name()) == false; + default -> throw new IllegalArgumentException("Unknown field caps filter [" + filter + "]"); + }; + fcf = fcf.and(next); + } + return fcf; + } + } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java index 397018208202b..3d03cfc92e1e2 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java @@ -9,34 +9,113 @@ package org.elasticsearch.action.fieldcaps; import org.elasticsearch.Version; -import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; import java.io.IOException; +import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +final class FieldCapabilitiesIndexResponse implements Writeable { + private static final Version MAPPING_HASH_VERSION = Version.V_8_2_0; -public class FieldCapabilitiesIndexResponse extends ActionResponse implements Writeable { private final String indexName; + @Nullable + private final String indexMappingHash; private final Map responseMap; private final boolean canMatch; private final transient Version originVersion; - FieldCapabilitiesIndexResponse(String indexName, Map responseMap, boolean canMatch) { + FieldCapabilitiesIndexResponse( + String indexName, + @Nullable String indexMappingHash, + Map responseMap, + boolean canMatch + ) { this.indexName = indexName; + this.indexMappingHash = indexMappingHash; this.responseMap = responseMap; this.canMatch = canMatch; this.originVersion = Version.CURRENT; } FieldCapabilitiesIndexResponse(StreamInput in) throws IOException { - super(in); this.indexName = in.readString(); this.responseMap = in.readMap(StreamInput::readString, IndexFieldCapabilities::new); this.canMatch = in.readBoolean(); this.originVersion = in.getVersion(); + if (in.getVersion().onOrAfter(MAPPING_HASH_VERSION)) { + this.indexMappingHash = in.readOptionalString(); + } else { + this.indexMappingHash = null; + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(indexName); + out.writeMap(responseMap, StreamOutput::writeString, (valueOut, fc) -> fc.writeTo(valueOut)); + out.writeBoolean(canMatch); + if (out.getVersion().onOrAfter(MAPPING_HASH_VERSION)) { + out.writeOptionalString(indexMappingHash); + } + } + + private record GroupByMappingHash(List indices, String indexMappingHash, Map responseMap) + implements + Writeable { + GroupByMappingHash(StreamInput in) throws IOException { + this(in.readStringList(), in.readString(), in.readMap(StreamInput::readString, IndexFieldCapabilities::new)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeStringCollection(indices); + out.writeString(indexMappingHash); + out.writeMap(responseMap, StreamOutput::writeString, (valueOut, fc) -> fc.writeTo(valueOut)); + } + + List getResponses() { + return indices.stream().map(index -> new FieldCapabilitiesIndexResponse(index, indexMappingHash, responseMap, true)).toList(); + } + } + + static List readList(StreamInput input) throws IOException { + if (input.getVersion().before(MAPPING_HASH_VERSION)) { + return input.readList(FieldCapabilitiesIndexResponse::new); + } + final List ungroupedList = input.readList(FieldCapabilitiesIndexResponse::new); + final List groups = input.readList(GroupByMappingHash::new); + return Stream.concat(ungroupedList.stream(), groups.stream().flatMap(g -> g.getResponses().stream())).toList(); + } + + static void writeList(StreamOutput output, List responses) throws IOException { + if (output.getVersion().before(MAPPING_HASH_VERSION)) { + output.writeCollection(responses); + return; + } + final Predicate canGroup = r -> r.canMatch && r.indexMappingHash != null; + final List ungroupedResponses = responses.stream().filter(r -> canGroup.test(r) == false).toList(); + final List groupedResponses = responses.stream() + .filter(canGroup) + .collect(Collectors.groupingBy(r -> r.indexMappingHash)) + .values() + .stream() + .map(rs -> { + final String indexMappingHash = rs.get(0).indexMappingHash; + final Map responseMap = rs.get(0).responseMap; + final List indices = rs.stream().map(r -> r.indexName).toList(); + return new GroupByMappingHash(indices, indexMappingHash, responseMap); + }) + .toList(); + output.writeList(ungroupedResponses); + output.writeList(groupedResponses); } /** @@ -46,6 +125,14 @@ public String getIndexName() { return indexName; } + /** + * Returns the index mapping hash associated with this index if exists + */ + @Nullable + public String getIndexMappingHash() { + return indexMappingHash; + } + public boolean canMatch() { return canMatch; } @@ -69,23 +156,19 @@ Version getOriginVersion() { return originVersion; } - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(indexName); - out.writeMap(responseMap, StreamOutput::writeString, (valueOut, fc) -> fc.writeTo(valueOut)); - out.writeBoolean(canMatch); - } - @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; FieldCapabilitiesIndexResponse that = (FieldCapabilitiesIndexResponse) o; - return canMatch == that.canMatch && Objects.equals(indexName, that.indexName) && Objects.equals(responseMap, that.responseMap); + return canMatch == that.canMatch + && Objects.equals(indexName, that.indexName) + && Objects.equals(indexMappingHash, that.indexMappingHash) + && Objects.equals(responseMap, that.responseMap); } @Override public int hashCode() { - return Objects.hash(indexName, responseMap, canMatch); + return Objects.hash(indexName, indexMappingHash, responseMap, canMatch); } } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java index a35ed069c63b9..6280b0266ba4c 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java @@ -8,11 +8,13 @@ package org.elasticsearch.action.fieldcaps; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; @@ -28,6 +30,8 @@ class FieldCapabilitiesNodeRequest extends ActionRequest implements IndicesReque private final List shardIds; private final String[] fields; + private final String[] filters; + private final String[] allowedTypes; private final OriginalIndices originalIndices; private final QueryBuilder indexFilter; private final long nowInMillis; @@ -37,6 +41,13 @@ class FieldCapabilitiesNodeRequest extends ActionRequest implements IndicesReque super(in); shardIds = in.readList(ShardId::new); fields = in.readStringArray(); + if (in.getVersion().onOrAfter(Version.V_8_2_0)) { + filters = in.readStringArray(); + allowedTypes = in.readStringArray(); + } else { + filters = Strings.EMPTY_ARRAY; + allowedTypes = Strings.EMPTY_ARRAY; + } originalIndices = OriginalIndices.readOriginalIndices(in); indexFilter = in.readOptionalNamedWriteable(QueryBuilder.class); nowInMillis = in.readLong(); @@ -46,6 +57,8 @@ class FieldCapabilitiesNodeRequest extends ActionRequest implements IndicesReque FieldCapabilitiesNodeRequest( List shardIds, String[] fields, + String[] filters, + String[] allowedTypes, OriginalIndices originalIndices, QueryBuilder indexFilter, long nowInMillis, @@ -53,6 +66,8 @@ class FieldCapabilitiesNodeRequest extends ActionRequest implements IndicesReque ) { this.shardIds = Objects.requireNonNull(shardIds); this.fields = fields; + this.filters = filters; + this.allowedTypes = allowedTypes; this.originalIndices = originalIndices; this.indexFilter = indexFilter; this.nowInMillis = nowInMillis; @@ -63,6 +78,14 @@ public String[] fields() { return fields; } + public String[] filters() { + return filters; + } + + public String[] allowedTypes() { + return allowedTypes; + } + public OriginalIndices originalIndices() { return originalIndices; } @@ -98,6 +121,10 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeList(shardIds); out.writeStringArray(fields); + if (out.getVersion().onOrAfter(Version.V_8_2_0)) { + out.writeStringArray(filters); + out.writeStringArray(allowedTypes); + } OriginalIndices.writeOriginalIndices(originalIndices, out); out.writeOptionalNamedWriteable(indexFilter); out.writeLong(nowInMillis); @@ -117,6 +144,8 @@ public boolean equals(Object o) { return nowInMillis == that.nowInMillis && shardIds.equals(that.shardIds) && Arrays.equals(fields, that.fields) + && Arrays.equals(filters, that.filters) + && Arrays.equals(allowedTypes, that.allowedTypes) && Objects.equals(originalIndices, that.originalIndices) && Objects.equals(indexFilter, that.indexFilter) && Objects.equals(runtimeFields, that.runtimeFields); @@ -127,6 +156,8 @@ public int hashCode() { int result = Objects.hash(originalIndices, indexFilter, nowInMillis, runtimeFields); result = 31 * result + shardIds.hashCode(); result = 31 * result + Arrays.hashCode(fields); + result = 31 * result + Arrays.hashCode(filters); + result = 31 * result + Arrays.hashCode(allowedTypes); return result; } } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponse.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponse.java index 6d103fbe863cc..91f079cadbd99 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponse.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponse.java @@ -37,14 +37,14 @@ class FieldCapabilitiesNodeResponse extends ActionResponse implements Writeable FieldCapabilitiesNodeResponse(StreamInput in) throws IOException { super(in); - this.indexResponses = in.readList(FieldCapabilitiesIndexResponse::new); + this.indexResponses = FieldCapabilitiesIndexResponse.readList(in); this.failures = in.readMap(ShardId::new, StreamInput::readException); this.unmatchedShardIds = in.readSet(ShardId::new); } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeList(indexResponses); + FieldCapabilitiesIndexResponse.writeList(out, indexResponses); out.writeMap(failures, (o, v) -> v.writeTo(o), StreamOutput::writeException); out.writeCollection(unmatchedShardIds); } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java index f66ef34972385..0397f61515fd7 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.fieldcaps; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; @@ -35,6 +36,8 @@ public final class FieldCapabilitiesRequest extends ActionRequest implements Ind private String[] indices = Strings.EMPTY_ARRAY; private IndicesOptions indicesOptions = DEFAULT_INDICES_OPTIONS; private String[] fields = Strings.EMPTY_ARRAY; + private String[] filters = Strings.EMPTY_ARRAY; + private String[] allowedTypes = Strings.EMPTY_ARRAY; private boolean includeUnmapped = false; // pkg private API mainly for cross cluster search to signal that we do multiple reductions ie. the results should not be merged private boolean mergeResults = true; @@ -52,6 +55,10 @@ public FieldCapabilitiesRequest(StreamInput in) throws IOException { indexFilter = in.readOptionalNamedWriteable(QueryBuilder.class); nowInMillis = in.readOptionalLong(); runtimeFields = in.readMap(); + if (in.getVersion().onOrAfter(Version.V_8_2_0)) { + filters = in.readStringArray(); + allowedTypes = in.readStringArray(); + } } public FieldCapabilitiesRequest() {} @@ -86,6 +93,10 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalNamedWriteable(indexFilter); out.writeOptionalLong(nowInMillis); out.writeMap(runtimeFields); + if (out.getVersion().onOrAfter(Version.V_8_2_0)) { + out.writeStringArray(filters); + out.writeStringArray(allowedTypes); + } } @Override @@ -117,6 +128,24 @@ public String[] fields() { return fields; } + public FieldCapabilitiesRequest filters(String... filters) { + this.filters = filters; + return this; + } + + public String[] filters() { + return filters; + } + + public FieldCapabilitiesRequest allowedTypes(String... types) { + this.allowedTypes = types; + return this; + } + + public String[] allowedTypes() { + return allowedTypes; + } + /** * The list of indices to lookup */ @@ -213,6 +242,8 @@ public boolean equals(Object o) { && Arrays.equals(fields, that.fields) && Objects.equals(indexFilter, that.indexFilter) && Objects.equals(nowInMillis, that.nowInMillis) + && Arrays.equals(filters, that.filters) + && Arrays.equals(allowedTypes, that.allowedTypes) && Objects.equals(runtimeFields, that.runtimeFields); } @@ -221,6 +252,8 @@ public int hashCode() { int result = Objects.hash(indicesOptions, includeUnmapped, mergeResults, indexFilter, nowInMillis, runtimeFields); result = 31 * result + Arrays.hashCode(indices); result = 31 * result + Arrays.hashCode(fields); + result = 31 * result + Arrays.hashCode(filters); + result = 31 * result + Arrays.hashCode(allowedTypes); return result; } @@ -230,6 +263,10 @@ public String getDescription() { Strings.collectionToDelimitedStringWithLimit(Arrays.asList(indices), ",", "", "", 1024, stringBuilder); stringBuilder.append("], fields["); Strings.collectionToDelimitedStringWithLimit(Arrays.asList(fields), ",", "", "", 1024, stringBuilder); + stringBuilder.append("], filters["); + stringBuilder.append(Strings.collectionToDelimitedString(Arrays.asList(filters), ",")); + stringBuilder.append("], types["); + stringBuilder.append(Strings.collectionToDelimitedString(Arrays.asList(allowedTypes), ",")); stringBuilder.append("]"); return stringBuilder.toString(); } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java index 7e14fb667c96e..dba604db2faf2 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java @@ -75,7 +75,7 @@ public FieldCapabilitiesResponse(StreamInput in) throws IOException { super(in); indices = in.readStringArray(); this.responseMap = in.readMap(StreamInput::readString, FieldCapabilitiesResponse::readField); - indexResponses = in.readList(FieldCapabilitiesIndexResponse::new); + this.indexResponses = FieldCapabilitiesIndexResponse.readList(in); this.failures = in.readList(FieldCapabilitiesFailure::new); } @@ -141,7 +141,7 @@ private static Map readField(StreamInput in) throws I public void writeTo(StreamOutput out) throws IOException { out.writeStringArray(indices); out.writeMap(responseMap, StreamOutput::writeString, FieldCapabilitiesResponse::writeField); - out.writeList(indexResponses); + FieldCapabilitiesIndexResponse.writeList(out, indexResponses); out.writeList(failures); } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/IndexFieldCapabilities.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/IndexFieldCapabilities.java index 674a8e7da8456..6cefa39f7f6ab 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/IndexFieldCapabilities.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/IndexFieldCapabilities.java @@ -35,6 +35,19 @@ public class IndexFieldCapabilities implements Writeable { private final TimeSeriesParams.MetricType metricType; private final Map meta; + public static IndexFieldCapabilities withMetadata(IndexFieldCapabilities input, boolean isMetadata) { + return new IndexFieldCapabilities( + input.getName(), + input.getType(), + isMetadata, + input.isSearchable, + input.isAggregatable, + input.isDimension, + input.metricType, + input.meta + ); + } + /** * @param name The name of the field. * @param type The type associated with the field. diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/RequestDispatcher.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/RequestDispatcher.java index dc2b99afa46cd..f9d5cff2471b4 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/RequestDispatcher.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/RequestDispatcher.java @@ -174,6 +174,8 @@ private void sendRequestToNode(String nodeId, List shardIds) { final FieldCapabilitiesNodeRequest nodeRequest = new FieldCapabilitiesNodeRequest( shardIds, fieldCapsRequest.fields(), + fieldCapsRequest.filters(), + fieldCapsRequest.allowedTypes(), originalIndices, fieldCapsRequest.indexFilter(), nowInMillis, diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/ResponseRewriter.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/ResponseRewriter.java new file mode 100644 index 0000000000000..59c1caa80c2e6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/ResponseRewriter.java @@ -0,0 +1,129 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.Version; + +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.function.Predicate; +import java.util.stream.Collectors; + +final class ResponseRewriter { + + public static Map rewriteOldResponses( + Version version, + Map input, + String[] filters, + String[] allowedTypes, + Predicate isMetadata + ) { + if (version.onOrAfter(Version.V_8_1_0)) { + return input; // nothing needs to be done + } + Function transformer = buildTransformer( + version, + input, + filters, + allowedTypes, + isMetadata + ); + Map rewritten = new HashMap<>(); + for (var entry : input.entrySet()) { + IndexFieldCapabilities fc = transformer.apply(entry.getValue()); + if (fc != null) { + rewritten.put(entry.getKey(), fc); + } + } + return rewritten; + } + + private static Function buildTransformer( + Version version, + Map input, + String[] filters, + String[] allowedTypes, + Predicate isMetadata + ) { + boolean checkMetadata = version.before(Version.V_7_13_0); + Predicate test = ifc -> true; + Set objects = null; + Set nestedObjects = null; + if (allowedTypes.length > 0) { + Set at = Set.of(allowedTypes); + test = test.and(ifc -> at.contains(ifc.getType())); + } + for (String filter : filters) { + if ("-parent".equals(filter)) { + test = test.and(fc -> fc.getType().equals("nested") == false && fc.getType().equals("object") == false); + } + if ("-metadata".equals(filter)) { + test = test.and(fc -> fc.isMetadatafield() == false); + } + if ("+metadata".equals(filter)) { + test = test.and(IndexFieldCapabilities::isMetadatafield); + } + if ("-nested".equals(filter)) { + if (nestedObjects == null) { + nestedObjects = findTypes("nested", input); + } + Set no = nestedObjects; + test = test.and(fc -> isNestedField(fc.getName(), no) == false); + } + if ("-multifield".equals(filter)) { + // immediate parent is not an object field + if (objects == null) { + objects = findTypes("object", input); + } + Set o = objects; + test = test.and(fc -> isNotMultifield(fc.getName(), o)); + } + } + Predicate finalTest = test; + return fc -> { + IndexFieldCapabilities rewritten = fc; + if (checkMetadata) { + rewritten = IndexFieldCapabilities.withMetadata(fc, isMetadata.test(fc.getName())); + } + if (finalTest.test(rewritten) == false) { + return null; + } + return rewritten; + }; + } + + private static Set findTypes(String type, Map fieldCaps) { + return fieldCaps.entrySet() + .stream() + .filter(entry -> type.equals(entry.getValue().getType())) + .map(Map.Entry::getKey) + .collect(Collectors.toSet()); + } + + private static boolean isNestedField(String field, Set nestedParents) { + for (String parent : nestedParents) { + if (field.startsWith(parent + ".") || field.equals(parent)) { + return true; + } + } + return false; + } + + private static boolean isNotMultifield(String field, Set objectFields) { + int lastDotPos = field.lastIndexOf("."); + if (lastDotPos == -1) { + return true; + } + String parent = field.substring(0, lastDotPos); + return objectFields.contains(parent); + } + +} diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java index 71e4e19c4de1f..a12bef8aa74d4 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.fieldcaps; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.OriginalIndices; @@ -36,13 +35,14 @@ import org.elasticsearch.transport.TransportService; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Consumer; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -56,8 +56,8 @@ public class TransportFieldCapabilitiesAction extends HandledTransportAction metadataFieldPred; + private final IndicesService indicesService; private final boolean ccsCheckCompatibility; @Inject @@ -74,7 +74,7 @@ public TransportFieldCapabilitiesAction( this.transportService = transportService; this.clusterService = clusterService; this.indexNameExpressionResolver = indexNameExpressionResolver; - this.fieldCapabilitiesFetcher = new FieldCapabilitiesFetcher(indicesService); + this.indicesService = indicesService; final Set metadataFields = indicesService.getAllMetadataFields(); this.metadataFieldPred = metadataFields::contains; transportService.registerRequestHandler( @@ -113,6 +113,17 @@ protected void doExecute(Task task, FieldCapabilitiesRequest request, final Acti checkIndexBlocks(clusterState, concreteIndices); final Map indexResponses = Collections.synchronizedMap(new HashMap<>()); + // This map is used to share the index response for indices which have the same index mapping hash to reduce the memory usage. + final Map> indexMappingHashToResponses = Collections.synchronizedMap(new HashMap<>()); + final Consumer handleIndexResponse = resp -> { + if (resp.canMatch() && resp.getIndexMappingHash() != null) { + Map curr = indexMappingHashToResponses.putIfAbsent(resp.getIndexMappingHash(), resp.get()); + if (curr != null) { + resp = new FieldCapabilitiesIndexResponse(resp.getIndexName(), resp.getIndexMappingHash(), curr, true); + } + } + indexResponses.putIfAbsent(resp.getIndexName(), resp); + }; final FailureCollector indexFailures = new FailureCollector(); // One for each cluster including the local cluster final CountDown completionCounter = new CountDown(1 + remoteClusterIndices.size()); @@ -126,7 +137,7 @@ protected void doExecute(Task task, FieldCapabilitiesRequest request, final Acti nowInMillis, concreteIndices, threadPool.executor(ThreadPool.Names.SEARCH_COORDINATION), - indexResponse -> indexResponses.putIfAbsent(indexResponse.getIndexName(), indexResponse), + handleIndexResponse, indexFailures::collect, countDown ); @@ -142,7 +153,9 @@ protected void doExecute(Task task, FieldCapabilitiesRequest request, final Acti remoteClusterClient.fieldCaps(remoteRequest, ActionListener.wrap(response -> { for (FieldCapabilitiesIndexResponse resp : response.getIndexResponses()) { String indexName = RemoteClusterAware.buildRemoteIndexName(clusterAlias, resp.getIndexName()); - indexResponses.putIfAbsent(indexName, new FieldCapabilitiesIndexResponse(indexName, resp.get(), resp.canMatch())); + handleIndexResponse.accept( + new FieldCapabilitiesIndexResponse(indexName, resp.getIndexMappingHash(), resp.get(), resp.canMatch()) + ); } for (FieldCapabilitiesFailure failure : response.getFailures()) { Exception ex = failure.getException(); @@ -178,12 +191,7 @@ private Runnable createResponseMerger( // fork off to the management pool for merging the responses as the operation can run for longer than is acceptable // on a transport thread in case of large numbers of indices and/or fields threadPool.executor(ThreadPool.Names.SEARCH_COORDINATION) - .submit( - ActionRunnable.supply( - listener, - () -> merge(indexResponses, request.includeUnmapped(), new ArrayList<>(failures)) - ) - ); + .submit(ActionRunnable.supply(listener, () -> merge(indexResponses, request, new ArrayList<>(failures)))); } else { listener.onResponse( new FieldCapabilitiesResponse(new ArrayList<>(indexResponses.values()), new ArrayList<>(failures)) @@ -212,6 +220,8 @@ private static FieldCapabilitiesRequest prepareRemoteRequest( remoteRequest.indicesOptions(originalIndices.indicesOptions()); remoteRequest.indices(originalIndices.indices()); remoteRequest.fields(request.fields()); + remoteRequest.filters(request.filters()); + remoteRequest.allowedTypes(request.allowedTypes()); remoteRequest.runtimeFields(request.runtimeFields()); remoteRequest.indexFilter(request.indexFilter()); remoteRequest.nowInMillis(nowInMillis); @@ -219,19 +229,23 @@ private static FieldCapabilitiesRequest prepareRemoteRequest( } private FieldCapabilitiesResponse merge( - Map indexResponses, - boolean includeUnmapped, + Map indexResponsesMap, + FieldCapabilitiesRequest request, List failures ) { - String[] indices = indexResponses.keySet().stream().sorted().toArray(String[]::new); + final List indexResponses = indexResponsesMap.values() + .stream() + .sorted(Comparator.comparing(FieldCapabilitiesIndexResponse::getIndexName)) + .toList(); + final String[] indices = indexResponses.stream().map(FieldCapabilitiesIndexResponse::getIndexName).toArray(String[]::new); final Map> responseMapBuilder = new HashMap<>(); - for (FieldCapabilitiesIndexResponse response : indexResponses.values()) { - innerMerge(responseMapBuilder, response); + for (FieldCapabilitiesIndexResponse response : indexResponses) { + innerMerge(responseMapBuilder, request, response); } final Map> responseMap = new HashMap<>(); for (Map.Entry> entry : responseMapBuilder.entrySet()) { final Map typeMapBuilder = entry.getValue(); - if (includeUnmapped) { + if (request.includeUnmapped()) { addUnmappedFields(indices, entry.getKey(), typeMapBuilder); } boolean multiTypes = typeMapBuilder.size() > 1; @@ -245,27 +259,33 @@ private FieldCapabilitiesResponse merge( } private void addUnmappedFields(String[] indices, String field, Map typeMap) { - Set unmappedIndices = new HashSet<>(Arrays.asList(indices)); - typeMap.values().forEach((b) -> b.getIndices().forEach(unmappedIndices::remove)); - if (unmappedIndices.isEmpty() == false) { - FieldCapabilities.Builder unmapped = new FieldCapabilities.Builder(field, "unmapped"); - typeMap.put("unmapped", unmapped); - for (String index : unmappedIndices) { - unmapped.add(index, false, false, false, false, null, Collections.emptyMap()); + final Set mappedIndices = new HashSet<>(); + typeMap.values().forEach(t -> t.getIndices(mappedIndices)); + if (mappedIndices.size() != indices.length) { + final FieldCapabilities.Builder unmapped = new FieldCapabilities.Builder(field, "unmapped"); + for (String index : indices) { + if (mappedIndices.contains(index) == false) { + unmapped.add(index, false, false, false, false, null, Collections.emptyMap()); + } } + typeMap.put("unmapped", unmapped); } } private void innerMerge( Map> responseMapBuilder, + FieldCapabilitiesRequest request, FieldCapabilitiesIndexResponse response ) { - for (Map.Entry entry : response.get().entrySet()) { + Map fields = ResponseRewriter.rewriteOldResponses( + response.getOriginVersion(), + response.get(), + request.filters(), + request.allowedTypes(), + metadataFieldPred + ); + for (Map.Entry entry : fields.entrySet()) { final String field = entry.getKey(); - // best effort to detect metadata field coming from older nodes - final boolean isMetadataField = response.getOriginVersion().onOrAfter(Version.V_7_13_0) - ? entry.getValue().isMetadatafield() - : metadataFieldPred.test(field); final IndexFieldCapabilities fieldCap = entry.getValue(); Map typeMap = responseMapBuilder.computeIfAbsent(field, f -> new HashMap<>()); FieldCapabilities.Builder builder = typeMap.computeIfAbsent( @@ -274,7 +294,7 @@ private void innerMerge( ); builder.add( response.getIndexName(), - isMetadataField, + fieldCap.isMetadatafield(), fieldCap.isSearchable(), fieldCap.isAggregatable(), fieldCap.isDimension(), @@ -341,14 +361,17 @@ public void messageReceived(FieldCapabilitiesNodeRequest request, TransportChann final Map> groupedShardIds = request.shardIds() .stream() .collect(Collectors.groupingBy(ShardId::getIndexName)); + final FieldCapabilitiesFetcher fetcher = new FieldCapabilitiesFetcher(indicesService); for (List shardIds : groupedShardIds.values()) { final Map failures = new HashMap<>(); final Set unmatched = new HashSet<>(); for (ShardId shardId : shardIds) { try { - final FieldCapabilitiesIndexResponse response = fieldCapabilitiesFetcher.fetch( + final FieldCapabilitiesIndexResponse response = fetcher.fetch( shardId, request.fields(), + request.filters(), + request.allowedTypes(), request.indexFilter(), request.nowInMillis(), request.runtimeFields() diff --git a/server/src/main/java/org/elasticsearch/action/support/NodeResponseTracker.java b/server/src/main/java/org/elasticsearch/action/support/NodeResponseTracker.java new file mode 100644 index 0000000000000..aafd6166cb364 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/support/NodeResponseTracker.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.support; + +import java.util.Collection; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReferenceArray; + +/** + * This class tracks the intermediate responses that will be used to create aggregated cluster response to a request. It also gives the + * possibility to discard the intermediate results when asked, for example when the initial request is cancelled, in order to release the + * resources. + */ +public class NodeResponseTracker { + + private final AtomicInteger counter = new AtomicInteger(); + private final int expectedResponsesCount; + private volatile AtomicReferenceArray responses; + private volatile Exception causeOfDiscarding; + + public NodeResponseTracker(int size) { + this.expectedResponsesCount = size; + this.responses = new AtomicReferenceArray<>(size); + } + + public NodeResponseTracker(Collection array) { + this.expectedResponsesCount = array.size(); + this.responses = new AtomicReferenceArray<>(array.toArray()); + } + + /** + * This method discards the results collected so far to free up the resources. + * @param cause the discarding, this will be communicated if they try to access the discarded results + */ + public void discardIntermediateResponses(Exception cause) { + if (responses != null) { + this.causeOfDiscarding = cause; + responses = null; + } + } + + public boolean responsesDiscarded() { + return responses == null; + } + + /** + * This method stores a new node response if the intermediate responses haven't been discarded yet. If the responses are not discarded + * the method asserts that this is the first response encountered from this node to protect from miscounting the responses in case of a + * double invocation. If the responses have been discarded we accept this risk for simplicity. + * @param nodeIndex, the index that represents a single node of the cluster + * @param response, a response can be either a NodeResponse or an error + * @return true if all the nodes' responses have been received, else false + */ + public boolean trackResponseAndCheckIfLast(int nodeIndex, Object response) { + AtomicReferenceArray responses = this.responses; + + if (responsesDiscarded() == false) { + boolean firstEncounter = responses.compareAndSet(nodeIndex, null, response); + assert firstEncounter : "a response should be tracked only once"; + } + return counter.incrementAndGet() == getExpectedResponseCount(); + } + + /** + * Returns the tracked response or null if the response hasn't been received yet for a specific index that represents a node of the + * cluster. + * @throws DiscardedResponsesException if the responses have been discarded + */ + public Object getResponse(int nodeIndex) throws DiscardedResponsesException { + AtomicReferenceArray responses = this.responses; + if (responsesDiscarded()) { + throw new DiscardedResponsesException(causeOfDiscarding); + } + return responses.get(nodeIndex); + } + + public int getExpectedResponseCount() { + return expectedResponsesCount; + } + + /** + * This exception is thrown when the {@link NodeResponseTracker} is asked to give information about the responses after they have been + * discarded. + */ + public static class DiscardedResponsesException extends Exception { + + public DiscardedResponsesException(Exception cause) { + super(cause); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index 5c5594aa094d6..382c9cf01693e 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.NodeResponseTracker; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.action.support.broadcast.BroadcastResponse; @@ -51,7 +52,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReferenceArray; import java.util.function.Consumer; /** @@ -118,28 +118,29 @@ public TransportBroadcastByNodeAction( private Response newResponse( Request request, - AtomicReferenceArray responses, + NodeResponseTracker nodeResponseTracker, int unavailableShardCount, Map> nodes, ClusterState clusterState - ) { + ) throws NodeResponseTracker.DiscardedResponsesException { int totalShards = 0; int successfulShards = 0; List broadcastByNodeResponses = new ArrayList<>(); List exceptions = new ArrayList<>(); - for (int i = 0; i < responses.length(); i++) { - if (responses.get(i)instanceof FailedNodeException exception) { + for (int i = 0; i < nodeResponseTracker.getExpectedResponseCount(); i++) { + Object response = nodeResponseTracker.getResponse(i); + if (response instanceof FailedNodeException exception) { totalShards += nodes.get(exception.nodeId()).size(); for (ShardRouting shard : nodes.get(exception.nodeId())) { exceptions.add(new DefaultShardOperationFailedException(shard.getIndexName(), shard.getId(), exception)); } } else { @SuppressWarnings("unchecked") - NodeResponse response = (NodeResponse) responses.get(i); - broadcastByNodeResponses.addAll(response.results); - totalShards += response.getTotalShards(); - successfulShards += response.getSuccessfulShards(); - for (BroadcastShardOperationFailedException throwable : response.getExceptions()) { + NodeResponse nodeResponse = (NodeResponse) response; + broadcastByNodeResponses.addAll(nodeResponse.results); + totalShards += nodeResponse.getTotalShards(); + successfulShards += nodeResponse.getSuccessfulShards(); + for (BroadcastShardOperationFailedException throwable : nodeResponse.getExceptions()) { if (TransportActions.isShardNotAvailableException(throwable) == false) { exceptions.add( new DefaultShardOperationFailedException( @@ -256,16 +257,15 @@ protected void doExecute(Task task, Request request, ActionListener li new AsyncAction(task, request, listener).start(); } - protected class AsyncAction { + protected class AsyncAction implements CancellableTask.CancellationListener { private final Task task; private final Request request; private final ActionListener listener; private final ClusterState clusterState; private final DiscoveryNodes nodes; private final Map> nodeIds; - private final AtomicReferenceArray responses; - private final AtomicInteger counter = new AtomicInteger(); private final int unavailableShardCount; + private final NodeResponseTracker nodeResponseTracker; protected AsyncAction(Task task, Request request, ActionListener listener) { this.task = task; @@ -312,10 +312,13 @@ protected AsyncAction(Task task, Request request, ActionListener liste } this.unavailableShardCount = unavailableShardCount; - responses = new AtomicReferenceArray<>(nodeIds.size()); + nodeResponseTracker = new NodeResponseTracker(nodeIds.size()); } public void start() { + if (task instanceof CancellableTask cancellableTask) { + cancellableTask.addListener(this); + } if (nodeIds.size() == 0) { try { onCompletion(); @@ -373,38 +376,34 @@ protected void onNodeResponse(DiscoveryNode node, int nodeIndex, NodeResponse re logger.trace("received response for [{}] from node [{}]", actionName, node.getId()); } - // this is defensive to protect against the possibility of double invocation - // the current implementation of TransportService#sendRequest guards against this - // but concurrency is hard, safety is important, and the small performance loss here does not matter - if (responses.compareAndSet(nodeIndex, null, response)) { - if (counter.incrementAndGet() == responses.length()) { - onCompletion(); - } + if (nodeResponseTracker.trackResponseAndCheckIfLast(nodeIndex, response)) { + onCompletion(); } } protected void onNodeFailure(DiscoveryNode node, int nodeIndex, Throwable t) { String nodeId = node.getId(); logger.debug(new ParameterizedMessage("failed to execute [{}] on node [{}]", actionName, nodeId), t); - - // this is defensive to protect against the possibility of double invocation - // the current implementation of TransportService#sendRequest guards against this - // but concurrency is hard, safety is important, and the small performance loss here does not matter - if (responses.compareAndSet(nodeIndex, null, new FailedNodeException(nodeId, "Failed node [" + nodeId + "]", t))) { - if (counter.incrementAndGet() == responses.length()) { - onCompletion(); - } + if (nodeResponseTracker.trackResponseAndCheckIfLast( + nodeIndex, + new FailedNodeException(nodeId, "Failed node [" + nodeId + "]", t) + )) { + onCompletion(); } } protected void onCompletion() { - if (task instanceof CancellableTask && ((CancellableTask) task).notifyIfCancelled(listener)) { + if ((task instanceof CancellableTask t) && t.notifyIfCancelled(listener)) { return; } Response response = null; try { - response = newResponse(request, responses, unavailableShardCount, nodeIds, clusterState); + response = newResponse(request, nodeResponseTracker, unavailableShardCount, nodeIds, clusterState); + } catch (NodeResponseTracker.DiscardedResponsesException e) { + // We propagate the reason that the results, in this case the task cancellation, in case the listener needs to take + // follow-up actions + listener.onFailure((Exception) e.getCause()); } catch (Exception e) { logger.debug("failed to combine responses from nodes", e); listener.onFailure(e); @@ -417,6 +416,21 @@ protected void onCompletion() { } } } + + @Override + public void onCancelled() { + assert task instanceof CancellableTask : "task must be cancellable"; + try { + ((CancellableTask) task).ensureNotCancelled(); + } catch (TaskCancelledException e) { + nodeResponseTracker.discardIntermediateResponses(e); + } + } + + // For testing purposes + public NodeResponseTracker getNodeResponseTracker() { + return nodeResponseTracker; + } } class BroadcastByNodeTransportRequestHandler implements TransportRequestHandler { diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index 5b13f3aab917d..c93f688b5a16d 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.NodeResponseTracker; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; @@ -20,6 +21,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; @@ -34,8 +36,6 @@ import java.util.Arrays; import java.util.List; import java.util.Objects; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReferenceArray; public abstract class TransportNodesAction< NodesRequest extends BaseNodesRequest, @@ -128,14 +128,15 @@ protected void doExecute(Task task, NodesRequest request, ActionListener nodesResponses, ActionListener listener) { + void newResponse(Task task, NodesRequest request, NodeResponseTracker nodeResponseTracker, ActionListener listener) + throws NodeResponseTracker.DiscardedResponsesException { - if (nodesResponses == null) { + if (nodeResponseTracker == null) { listener.onFailure(new NullPointerException("nodesResponses")); return; } @@ -143,11 +144,10 @@ void newResponse(Task task, NodesRequest request, AtomicReferenceArray nodesR final List responses = new ArrayList<>(); final List failures = new ArrayList<>(); - for (int i = 0; i < nodesResponses.length(); ++i) { - Object response = nodesResponses.get(i); - - if (response instanceof FailedNodeException) { - failures.add((FailedNodeException) response); + for (int i = 0; i < nodeResponseTracker.getExpectedResponseCount(); ++i) { + Object response = nodeResponseTracker.getResponse(i); + if (nodeResponseTracker.getResponse(i)instanceof FailedNodeException failedNodeException) { + failures.add(failedNodeException); } else { responses.add(nodeResponseClass.cast(response)); } @@ -203,12 +203,11 @@ protected String getTransportNodeAction(DiscoveryNode node) { return transportNodeAction; } - class AsyncAction { + class AsyncAction implements CancellableTask.CancellationListener { private final NodesRequest request; private final ActionListener listener; - private final AtomicReferenceArray responses; - private final AtomicInteger counter = new AtomicInteger(); + private final NodeResponseTracker nodeResponseTracker; private final Task task; AsyncAction(Task task, NodesRequest request, ActionListener listener) { @@ -219,10 +218,13 @@ class AsyncAction { resolveRequest(request, clusterService.state()); assert request.concreteNodes() != null; } - this.responses = new AtomicReferenceArray<>(request.concreteNodes().length); + this.nodeResponseTracker = new NodeResponseTracker(request.concreteNodes().length); } void start() { + if (task instanceof CancellableTask cancellableTask) { + cancellableTask.addListener(this); + } final DiscoveryNode[] nodes = request.concreteNodes(); if (nodes.length == 0) { finishHim(); @@ -267,28 +269,49 @@ public void handleException(TransportException exp) { } } + // For testing purposes + NodeResponseTracker getNodeResponseTracker() { + return nodeResponseTracker; + } + private void onOperation(int idx, NodeResponse nodeResponse) { - responses.set(idx, nodeResponse); - if (counter.incrementAndGet() == responses.length()) { + if (nodeResponseTracker.trackResponseAndCheckIfLast(idx, nodeResponse)) { finishHim(); } } private void onFailure(int idx, String nodeId, Throwable t) { logger.debug(new ParameterizedMessage("failed to execute on node [{}]", nodeId), t); - responses.set(idx, new FailedNodeException(nodeId, "Failed node [" + nodeId + "]", t)); - if (counter.incrementAndGet() == responses.length()) { + if (nodeResponseTracker.trackResponseAndCheckIfLast(idx, new FailedNodeException(nodeId, "Failed node [" + nodeId + "]", t))) { finishHim(); } } private void finishHim() { - if (task instanceof CancellableTask && ((CancellableTask) task).notifyIfCancelled(listener)) { + if ((task instanceof CancellableTask t) && t.notifyIfCancelled(listener)) { return; } final String executor = finalExecutor.equals(ThreadPool.Names.SAME) ? ThreadPool.Names.GENERIC : finalExecutor; - threadPool.executor(executor).execute(() -> newResponse(task, request, responses, listener)); + threadPool.executor(executor).execute(() -> { + try { + newResponse(task, request, nodeResponseTracker, listener); + } catch (NodeResponseTracker.DiscardedResponsesException e) { + // We propagate the reason that the results, in this case the task cancellation, in case the listener needs to take + // follow-up actions + listener.onFailure((Exception) e.getCause()); + } + }); + } + + @Override + public void onCancelled() { + assert task instanceof CancellableTask : "task must be cancellable"; + try { + ((CancellableTask) task).ensureNotCancelled(); + } catch (TaskCancelledException e) { + nodeResponseTracker.discardIntermediateResponses(e); + } } } diff --git a/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java b/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java index 99ab7b6519a79..4c4bfb4dae799 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java @@ -26,6 +26,7 @@ import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.Transport; +import java.util.List; import java.util.Map; import java.util.function.Supplier; @@ -67,6 +68,13 @@ public void initialize( this.namedWriteableRegistry = namedWriteableRegistry; } + /** + * Return the names of all available actions registered with this client. + */ + public List getActionNames() { + return actions.keySet().stream().map(ActionType::name).toList(); + } + @Override public void close() { // nothing really to do diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java index 3fd0f52ddac94..68fe938e63de7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -631,6 +631,10 @@ public DiscoveryNodes nodes() { return nodes; } + public Builder routingTable(RoutingTable.Builder routingTableBuilder) { + return routingTable(routingTableBuilder.build()); + } + public Builder routingTable(RoutingTable routingTable) { this.routingTable = routingTable; return this; diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java b/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java index b9ceef4c8e98a..25384e38b612e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java @@ -7,12 +7,14 @@ */ package org.elasticsearch.cluster; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Strings; import org.elasticsearch.core.Nullable; import java.util.IdentityHashMap; import java.util.List; import java.util.Map; +import java.util.Objects; public interface ClusterStateTaskExecutor { /** @@ -75,17 +77,28 @@ public static Builder builder() { public static class Builder { private final Map executionResults = new IdentityHashMap<>(); - public Builder success(T task) { - return result(task, TaskResult.success()); - } - - public Builder successes(Iterable tasks) { - for (T task : tasks) { - success(task); - } - return this; + /** + * Record that the cluster state update task succeeded. + * + * @param taskListener A listener for the completion of the resulting cluster state publication. This listener is completed with + * the cluster state that was published (or the publication exception that occurred) in the thread context + * in which the task was submitted. The task's {@link ClusterStateTaskListener#clusterStateProcessed} method + * is not called directly by the master service, nor is {@link ClusterStateTaskListener#onFailure} once the + * task execution has succeeded, but legacy implementations may use this listener to call those methods. + *

+ * The listener should prefer not to use the published state for things like determining the result of a + * task. The task may have been executed as part of a batch, and later tasks in the batch may overwrite + * the results from earlier tasks. Instead the listener should independently capture the information it + * needs to properly process the completion of a cluster state update. + */ + // TODO remove all remaining usages of the published state and then make this an ActionListener + public Builder success(T task, ActionListener taskListener) { + return result(task, TaskResult.success(taskListener)); } + /** + * Record that the cluster state update task failed. + */ public Builder failure(T task, Exception e) { return result(task, TaskResult.failure(e)); } @@ -109,19 +122,22 @@ public ClusterTasksResult build(ClusterState resultingState) { } } - record TaskResult(Exception failure) { - private static final TaskResult SUCCESS = new TaskResult(null); + record TaskResult(@Nullable ActionListener taskListener, @Nullable Exception failure) { + + public TaskResult { + assert failure == null ^ taskListener == null; + } - public static TaskResult success() { - return SUCCESS; + public static TaskResult success(ActionListener taskListener) { + return new TaskResult(Objects.requireNonNull(taskListener), null); } public static TaskResult failure(Exception failure) { - return new TaskResult(failure); + return new TaskResult(null, Objects.requireNonNull(failure)); } public boolean isSuccess() { - return this == SUCCESS; + return failure == null; } public Exception getFailure() { @@ -139,8 +155,19 @@ static ClusterStateTaskExecutor unbatched( @Override public ClusterTasksResult execute(ClusterState currentState, List tasks) throws Exception { assert tasks.size() == 1 : "this only supports a single task but received " + tasks; - ClusterState result = tasks.get(0).execute(currentState); - return ClusterTasksResult.builder().successes(tasks).build(result); + final T task = tasks.get(0); + final ClusterState newState = task.execute(currentState); + return ClusterTasksResult.builder().success(task, new ActionListener<>() { + @Override + public void onResponse(ClusterState publishedState) { + task.clusterStateProcessed(currentState, publishedState); + } + + @Override + public void onFailure(Exception e) { + task.onFailure(e); + } + }).build(newState); } @Override @@ -150,4 +177,26 @@ public String describeTasks(List tasks) { }; } + /** + * An {@link ActionListener} for passing to {@link ClusterStateTaskExecutor.ClusterTasksResult.Builder#success} which preserves the + * legacy behaviour of calling {@link ClusterStateTaskListener#clusterStateProcessed} or {@link ClusterStateTaskListener#onFailure}. + *

+ * New implementations should use a dedicated listener rather than relying on this legacy behaviour. + */ + // TODO remove all remaining usages of this listener + record LegacyClusterTaskResultActionListener(ClusterStateTaskListener task, ClusterState originalState) + implements + ActionListener { + + @Override + public void onResponse(ClusterState publishedState) { + task.clusterStateProcessed(originalState, publishedState); + } + + @Override + public void onFailure(Exception e) { + task.onFailure(e); + } + } + } diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskListener.java b/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskListener.java index da014ddb780ac..8ef25b18a4b06 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskListener.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskListener.java @@ -32,12 +32,18 @@ default void onNoLongerMaster() { } /** - * Called when the result of the {@link ClusterStateTaskExecutor#execute(ClusterState, List)} have been processed - * properly by all listeners. + * Called when the result of the {@link ClusterStateTaskExecutor#execute(ClusterState, List)} method have been processed properly by all + * listeners. + * + * The {@param newState} parameter is the state that was ultimately published. This can lead to surprising behaviour if tasks are + * batched together: a later task in the batch may undo or overwrite the changes made by an earlier task. In general you should prefer + * to ignore the published state and instead handle the success of a publication via the listener that the executor passes to + * {@link ClusterStateTaskExecutor.ClusterTasksResult.Builder#success}. * * Implementations of this callback must not throw exceptions: an exception thrown here is logged by the master service at {@code ERROR} * level and otherwise ignored, except in tests where it raises an {@link AssertionError}. If log-and-ignore is the right behaviour then * implementations must do so themselves, typically using a more specific logger and at a less dramatic log level. */ + // TODO: replace all remaining usages of this method with dedicated listeners and then remove it. default void clusterStateProcessed(ClusterState oldState, ClusterState newState) {} } diff --git a/server/src/main/java/org/elasticsearch/cluster/LocalMasterServiceTask.java b/server/src/main/java/org/elasticsearch/cluster/LocalMasterServiceTask.java index fff3894c16d4a..ba137b4440a83 100644 --- a/server/src/main/java/org/elasticsearch/cluster/LocalMasterServiceTask.java +++ b/server/src/main/java/org/elasticsearch/cluster/LocalMasterServiceTask.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.cluster; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.common.Priority; @@ -23,7 +24,14 @@ public LocalMasterServiceTask(Priority priority) { this.priority = priority; } - public void execute(ClusterState currentState) throws Exception {} + protected void execute(ClusterState currentState) throws Exception {} + + @Override + public final void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + assert false : "not called"; + } + + protected void onPublicationComplete() {} public void submit(MasterService masterService, String source) { masterService.submitStateUpdateTask( @@ -47,10 +55,21 @@ public String describeTasks(List tasks) { @Override public ClusterTasksResult execute(ClusterState currentState, List tasks) throws Exception { - assert tasks.size() == 1 && tasks.get(0) == LocalMasterServiceTask.this + final LocalMasterServiceTask thisTask = LocalMasterServiceTask.this; + assert tasks.size() == 1 && tasks.get(0) == thisTask : "expected one-element task list containing current object but was " + tasks; - LocalMasterServiceTask.this.execute(currentState); - return ClusterTasksResult.builder().successes(tasks).build(currentState); + thisTask.execute(currentState); + return ClusterTasksResult.builder().success(thisTask, new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + onPublicationComplete(); + } + + @Override + public void onFailure(Exception e) { + LocalMasterServiceTask.this.onFailure(e); + } + }).build(currentState); } } ); diff --git a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index ac92b489ebb48..6fe9ded74eaf0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -81,7 +81,7 @@ public class ShardStateAction { private final ThreadPool threadPool; // we deduplicate these shard state requests in order to avoid sending duplicate failed/started shard requests for a shard - private final ResultDeduplicator remoteShardStateUpdateDeduplicator = new ResultDeduplicator<>(); + private final ResultDeduplicator remoteShardStateUpdateDeduplicator; @Inject public ShardStateAction( @@ -94,6 +94,7 @@ public ShardStateAction( this.transportService = transportService; this.clusterService = clusterService; this.threadPool = threadPool; + this.remoteShardStateUpdateDeduplicator = new ResultDeduplicator<>(threadPool.getThreadContext()); transportService.registerRequestHandler( SHARD_STARTED_ACTION_NAME, @@ -333,7 +334,7 @@ public ClusterTasksResult execute(ClusterState currentSta entry, entry.getShardId().getIndex() ); - batchResultBuilder.success(task); + batchResultBuilder.success(task, task.newPublicationListener()); } else { // The primary term is 0 if the shard failed itself. It is > 0 if a write was done on a primary but was failed to be // replicated to the shard copy with the provided allocation id. In case where the shard failed itself, it's ok to just @@ -393,7 +394,7 @@ public ClusterTasksResult execute(ClusterState currentSta } else { // tasks that correspond to non-existent shards are marked as successful logger.debug("{} ignoring shard failed task [{}] (shard does not exist anymore)", entry.getShardId(), entry); - batchResultBuilder.success(task); + batchResultBuilder.success(task, task.newPublicationListener()); } } else { // failing a shard also possibly marks it as stale (see IndexMetadataUpdater) @@ -408,7 +409,9 @@ public ClusterTasksResult execute(ClusterState currentSta ClusterState maybeUpdatedState = currentState; try { maybeUpdatedState = applyFailedShards(currentState, failedShardsToBeApplied, staleShardsToBeApplied); - batchResultBuilder.successes(tasksToBeApplied); + for (var task : tasksToBeApplied) { + batchResultBuilder.success(task, task.newPublicationListener()); + } } catch (Exception e) { logger.warn(() -> new ParameterizedMessage("failed to apply failed shards {}", failedShardsToBeApplied), e); // failures are communicated back to the requester @@ -535,6 +538,21 @@ public record FailedShardUpdateTask(FailedShardEntry entry, ActionListener newPublicationListener() { + return new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + listener.onResponse(TransportResponse.Empty.INSTANCE); + } + + @Override + public void onFailure(Exception e) { + // delegate to task's onFailure for logging + FailedShardUpdateTask.this.onFailure(e); + } + }; + } + @Override public void onFailure(Exception e) { if (e instanceof NotMasterException) { @@ -549,7 +567,7 @@ public void onFailure(Exception e) { @Override public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - listener.onResponse(TransportResponse.Empty.INSTANCE); + assert false : "should not be called"; } } @@ -637,7 +655,7 @@ public ClusterTasksResult execute(ClusterState currentSt // requests might still be in flight even after the shard has already been started or failed on the master. We just // ignore these requests for now. logger.debug("{} ignoring shard started task [{}] (shard does not exist anymore)", entry.shardId, entry); - builder.success(task); + builder.success(task, task.newPublicationListener()); } else { if (matched.primary() && entry.primaryTerm > 0) { final IndexMetadata indexMetadata = currentState.metadata().index(entry.shardId.getIndex()); @@ -658,7 +676,7 @@ public ClusterTasksResult execute(ClusterState currentSt entry.primaryTerm, currentPrimaryTerm ); - builder.success(task); + builder.success(task, task.newPublicationListener()); continue; } } @@ -671,7 +689,7 @@ public ClusterTasksResult execute(ClusterState currentSt entry, matched ); - builder.success(task); + builder.success(task, task.newPublicationListener()); } else { // remove duplicate actions as allocation service expects a clean list without duplicates if (seenShardRoutings.contains(matched)) { @@ -727,7 +745,9 @@ public ClusterTasksResult execute(ClusterState currentSt assert assertStartedIndicesHaveCompleteTimestampRanges(maybeUpdatedState); - builder.successes(tasksToBeApplied); + for (var task : tasksToBeApplied) { + builder.success(task, task.newPublicationListener()); + } } catch (Exception e) { logger.warn(() -> new ParameterizedMessage("failed to apply started shards {}", shardRoutingsToBeApplied), e); builder.failures(tasksToBeApplied, e); @@ -833,20 +853,29 @@ public int hashCode() { } } - public static class StartedShardUpdateTask implements ClusterStateTaskListener { - - private final StartedShardEntry entry; - private final ActionListener listener; - - public StartedShardUpdateTask(StartedShardEntry entry, ActionListener listener) { - this.entry = entry; - this.listener = listener; - } + public record StartedShardUpdateTask(StartedShardEntry entry, ActionListener listener) + implements + ClusterStateTaskListener { public StartedShardEntry getEntry() { return entry; } + public ActionListener newPublicationListener() { + return new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + listener.onResponse(TransportResponse.Empty.INSTANCE); + } + + @Override + public void onFailure(Exception e) { + // delegate to task's onFailure for logging + StartedShardUpdateTask.this.onFailure(e); + } + }; + } + @Override public void onFailure(Exception e) { if (e instanceof NotMasterException) { @@ -861,7 +890,7 @@ public void onFailure(Exception e) { @Override public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - listener.onResponse(TransportResponse.Empty.INSTANCE); + assert false : "should not be called"; } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java index fbbf6ed2391c4..2e95915661707 100644 --- a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java +++ b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java @@ -300,6 +300,13 @@ public static Builder builder() { return new Builder(); } + /** + * Convenience method, equivalent to: {@code builder().blocks(blocks)} + */ + public static Builder builder(ClusterBlocks blocks) { + return builder().blocks(blocks); + } + public static class Builder { private final Set global = new HashSet<>(); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/InstanceHasMasterHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/cluster/coordination/InstanceHasMasterHealthIndicatorService.java new file mode 100644 index 0000000000000..5bcf24992cbed --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/InstanceHasMasterHealthIndicatorService.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cluster.coordination; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.health.HealthIndicatorResult; +import org.elasticsearch.health.HealthIndicatorService; +import org.elasticsearch.health.HealthStatus; + +import static org.elasticsearch.health.ServerHealthComponents.CLUSTER_COORDINATION; + +public class InstanceHasMasterHealthIndicatorService implements HealthIndicatorService { + + public static final String NAME = "instance_has_master"; + + private static final String INSTANCE_HAS_MASTER_GREEN_SUMMARY = "Health coordinating instance has a master node."; + private static final String INSTANCE_HAS_MASTER_RED_SUMMARY = "Health coordinating instance does not have a master node."; + + private final ClusterService clusterService; + + public InstanceHasMasterHealthIndicatorService(ClusterService clusterService) { + this.clusterService = clusterService; + } + + @Override + public String name() { + return NAME; + } + + @Override + public String component() { + return CLUSTER_COORDINATION; + } + + @Override + public HealthIndicatorResult calculate() { + + DiscoveryNode coordinatingNode = clusterService.localNode(); + ClusterState clusterState = clusterService.state(); + DiscoveryNodes nodes = clusterState.nodes(); + DiscoveryNode masterNode = nodes.getMasterNode(); + + HealthStatus instanceHasMasterStatus = masterNode == null ? HealthStatus.RED : HealthStatus.GREEN; + String instanceHasMasterSummary = masterNode == null ? INSTANCE_HAS_MASTER_RED_SUMMARY : INSTANCE_HAS_MASTER_GREEN_SUMMARY; + + return createIndicator(instanceHasMasterStatus, instanceHasMasterSummary, (builder, params) -> { + builder.startObject(); + builder.object("coordinating_node", xContentBuilder -> { + builder.field("node_id", coordinatingNode.getId()); + builder.field("name", coordinatingNode.getName()); + }); + builder.object("master_node", xContentBuilder -> { + if (masterNode != null) { + builder.field("node_id", masterNode.getId()); + builder.field("name", masterNode.getName()); + } else { + builder.nullField("node_id"); + builder.nullField("name"); + } + }); + return builder.endObject(); + }); + } +} diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java index 7ee8a10fd7c37..0de30b9cd7c40 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskConfig; -import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.coordination.Coordinator.Mode; import org.elasticsearch.cluster.metadata.Metadata; @@ -104,8 +103,7 @@ public class JoinHelper { private final long term = currentTermSupplier.getAsLong(); @Override - public ClusterTasksResult execute(ClusterState currentState, List joiningTasks) - throws Exception { + public ClusterTasksResult execute(ClusterState currentState, List joinTasks) { // The current state that MasterService uses might have been updated by a (different) master in a higher term already // Stop processing the current cluster state update, as there's no point in continuing to compute it as // it will later be rejected by Coordinator.publish(...) anyhow @@ -114,7 +112,7 @@ public ClusterTasksResult execute(ClusterState currentSta throw new NotMasterException( "Higher term encountered (current: " + currentState.term() + " > used: " + term + "), there is a newer master" ); - } else if (currentState.nodes().getMasterNodeId() == null && joiningTasks.stream().anyMatch(Task::isBecomeMasterTask)) { + } else if (currentState.nodes().getMasterNodeId() == null && joinTasks.stream().anyMatch(JoinTask::isBecomingMaster)) { assert currentState.term() < term : "there should be at most one become master task per election (= by term)"; final CoordinationMetadata coordinationMetadata = CoordinationMetadata.builder(currentState.coordinationMetadata()) .term(term) @@ -124,7 +122,7 @@ public ClusterTasksResult execute(ClusterState currentSta } else if (currentState.nodes().isLocalNodeElectedMaster()) { assert currentState.term() == term : "term should be stable for the same master"; } - return super.execute(currentState, joiningTasks); + return super.execute(currentState, joinTasks); } }; @@ -293,7 +291,7 @@ public void sendJoinRequest(DiscoveryNode destination, long term, Optional // Typically we're already connected to the destination at this point, the PeerFinder holds a reference to this connection to // keep it open, but we need to acquire our own reference to keep the connection alive through the joining process. - transportService.connectToNode(destination, new ActionListener() { + transportService.connectToNode(destination, new ActionListener<>() { @Override public void onResponse(Releasable connectionReference) { logger.trace("acquired connection for joining join {} with {}", destination, joinRequest); @@ -361,31 +359,6 @@ public void handleException(TransportException exp) { }); } - static class JoinTaskListener implements ClusterStateTaskListener { - private final JoinTaskExecutor.Task task; - private final ActionListener joinListener; - - JoinTaskListener(JoinTaskExecutor.Task task, ActionListener joinListener) { - this.task = task; - this.joinListener = joinListener; - } - - @Override - public void onFailure(Exception e) { - joinListener.onFailure(e); - } - - @Override - public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - joinListener.onResponse(null); - } - - @Override - public String toString() { - return "JoinTaskListener{task=" + task + "}"; - } - } - interface JoinAccumulator { void handleJoinRequest(DiscoveryNode sender, ActionListener joinListener); @@ -395,11 +368,7 @@ default void close(Mode newMode) {} class LeaderJoinAccumulator implements JoinAccumulator { @Override public void handleJoinRequest(DiscoveryNode sender, ActionListener joinListener) { - final JoinTaskExecutor.Task task = new JoinTaskExecutor.Task( - sender, - joinReasonService.getJoinReason(sender, Mode.LEADER), - joinListener - ); + final JoinTask task = JoinTask.singleNode(sender, joinReasonService.getJoinReason(sender, Mode.LEADER), joinListener); assert joinTaskExecutor != null; masterService.submitStateUpdateTask("node-join", task, ClusterStateTaskConfig.build(Priority.URGENT), joinTaskExecutor); } @@ -454,21 +423,20 @@ public void close(Mode newMode) { assert closed == false : "CandidateJoinAccumulator closed"; closed = true; if (newMode == Mode.LEADER) { - final List pendingAsTasks = new ArrayList<>(); - joinRequestAccumulator.forEach( - (node, listener) -> pendingAsTasks.add( - new JoinTaskExecutor.Task(node, joinReasonService.getJoinReason(node, Mode.CANDIDATE), listener) - ) - ); - - final String stateUpdateSource = "elected-as-master ([" + pendingAsTasks.size() + "] nodes joined)"; + final JoinTask joinTask = JoinTask.completingElection(joinRequestAccumulator.entrySet().stream().map(entry -> { + final DiscoveryNode discoveryNode = entry.getKey(); + final ActionListener listener = entry.getValue(); + return new JoinTask.NodeJoinTask( + discoveryNode, + joinReasonService.getJoinReason(discoveryNode, Mode.CANDIDATE), + listener + ); + })); - pendingAsTasks.add(JoinTaskExecutor.newBecomeMasterTask()); - pendingAsTasks.add(JoinTaskExecutor.newFinishElectionTask()); joinTaskExecutor = joinTaskExecutorGenerator.get(); - masterService.submitStateUpdateTasks( - stateUpdateSource, - pendingAsTasks, + masterService.submitStateUpdateTask( + "elected-as-master ([" + joinTask.nodeCount() + "] nodes joined)", + joinTask, ClusterStateTaskConfig.build(Priority.URGENT), joinTaskExecutor ); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTask.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTask.java new file mode 100644 index 0000000000000..142823d878446 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTask.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cluster.coordination; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateTaskListener; +import org.elasticsearch.cluster.node.DiscoveryNode; + +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.stream.Stream; + +public record JoinTask(List nodeJoinTasks, boolean isBecomingMaster) implements ClusterStateTaskListener { + + public static JoinTask singleNode(DiscoveryNode node, String reason, ActionListener listener) { + return new JoinTask(List.of(new NodeJoinTask(node, reason, listener)), false); + } + + public static JoinTask completingElection(Stream nodeJoinTaskStream) { + return new JoinTask(nodeJoinTaskStream.toList(), true); + } + + public JoinTask(List nodeJoinTasks, boolean isBecomingMaster) { + this.nodeJoinTasks = Collections.unmodifiableList(nodeJoinTasks); + this.isBecomingMaster = isBecomingMaster; + } + + public int nodeCount() { + return nodeJoinTasks.size(); + } + + @Override + public void onFailure(Exception e) { + for (NodeJoinTask nodeJoinTask : nodeJoinTasks) { + nodeJoinTask.listener.onFailure(e); + } + } + + @Override + public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + assert false : "not called"; + } + + @Override + public String toString() { + final StringBuilder stringBuilder = new StringBuilder(); + + if (isBecomingMaster) { + stringBuilder.append("_FINISH_ELECTION_"); + } + + for (NodeJoinTask nodeJoinTask : nodeJoinTasks) { + if (stringBuilder.isEmpty() == false) { + stringBuilder.append(", "); + } + nodeJoinTask.appendDescription(stringBuilder); + } + + return stringBuilder.toString(); + } + + public Iterable nodes() { + return () -> nodeJoinTasks.stream().map(j -> j.node).iterator(); + } + + public record NodeJoinTask(DiscoveryNode node, String reason, ActionListener listener) { + + public NodeJoinTask(DiscoveryNode node, String reason, ActionListener listener) { + this.node = Objects.requireNonNull(node); + this.reason = reason; + this.listener = listener; + } + + @Override + public String toString() { + final StringBuilder stringBuilder = new StringBuilder(); + appendDescription(stringBuilder); + return stringBuilder.toString(); + } + + public void appendDescription(StringBuilder stringBuilder) { + node.appendDescriptionWithoutAttributes(stringBuilder); + stringBuilder.append(' ').append(reason); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java index 03d58a9760ca7..2e54505cca556 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskExecutor; -import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -37,72 +36,33 @@ import static org.elasticsearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK; -public class JoinTaskExecutor implements ClusterStateTaskExecutor { +public class JoinTaskExecutor implements ClusterStateTaskExecutor { private static final Logger logger = LogManager.getLogger(JoinTaskExecutor.class); private final AllocationService allocationService; private final RerouteService rerouteService; - public record Task(DiscoveryNode node, String reason, ActionListener listener) implements ClusterStateTaskListener { - - @Override - public String toString() { - if (node == null) { - return reason; - } - - final StringBuilder stringBuilder = new StringBuilder(); - node.appendDescriptionWithoutAttributes(stringBuilder); - stringBuilder.append(' ').append(reason); - return stringBuilder.toString(); - } - - public boolean isBecomeMasterTask() { - return reason.equals(BECOME_MASTER_TASK_REASON); - } - - public boolean isFinishElectionTask() { - return reason.equals(FINISH_ELECTION_TASK_REASON); - } - - private static final String BECOME_MASTER_TASK_REASON = "_BECOME_MASTER_TASK_"; - private static final String FINISH_ELECTION_TASK_REASON = "_FINISH_ELECTION_"; - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - - @Override - public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - listener.onResponse(null); - } - - } - public JoinTaskExecutor(AllocationService allocationService, RerouteService rerouteService) { this.allocationService = allocationService; this.rerouteService = rerouteService; } @Override - public ClusterTasksResult execute(ClusterState currentState, List joiningNodes) throws Exception { - final ClusterTasksResult.Builder results = ClusterTasksResult.builder(); + public ClusterTasksResult execute(ClusterState currentState, List joinTasks) { + final ClusterTasksResult.Builder results = ClusterTasksResult.builder(); + + final boolean isBecomingMaster = joinTasks.stream().anyMatch(JoinTask::isBecomingMaster); final DiscoveryNodes currentNodes = currentState.nodes(); boolean nodesChanged = false; ClusterState.Builder newState; - if (joiningNodes.size() == 1 && joiningNodes.get(0).isFinishElectionTask()) { - return results.successes(joiningNodes).build(currentState); - } else if (currentNodes.getMasterNode() == null && joiningNodes.stream().anyMatch(Task::isBecomeMasterTask)) { - assert joiningNodes.stream().anyMatch(Task::isFinishElectionTask) - : "becoming a master but election is not finished " + joiningNodes; + if (currentNodes.getMasterNode() == null && isBecomingMaster) { // use these joins to try and become the master. // Note that we don't have to do any validation of the amount of joining nodes - the commit // during the cluster state publishing guarantees that we have enough - newState = becomeMasterAndTrimConflictingNodes(currentState, joiningNodes); + newState = becomeMasterAndTrimConflictingNodes(currentState, joinTasks); nodesChanged = true; } else if (currentNodes.isLocalNodeElectedMaster() == false) { logger.trace("processing node joins, but we are not the master. current master: {}", currentNodes.getMasterNode()); @@ -121,34 +81,48 @@ public ClusterTasksResult execute(ClusterState currentState, List jo final boolean enforceVersionBarrier = currentState.getBlocks().hasGlobalBlock(STATE_NOT_RECOVERED_BLOCK) == false; // processing any joins Map joiniedNodeNameIds = new HashMap<>(); - for (final Task joinTask : joiningNodes) { - if (joinTask.isBecomeMasterTask() || joinTask.isFinishElectionTask()) { - // noop - } else if (currentNodes.nodeExistsWithSameRoles(joinTask.node())) { - logger.debug("received a join request for an existing node [{}]", joinTask.node()); - } else { - final DiscoveryNode node = joinTask.node(); - try { - if (enforceVersionBarrier) { - ensureVersionBarrier(node.getVersion(), minClusterNodeVersion); - } - ensureNodesCompatibility(node.getVersion(), minClusterNodeVersion, maxClusterNodeVersion); - // we do this validation quite late to prevent race conditions between nodes joining and importing dangling indices - // we have to reject nodes that don't support all indices we have in this cluster - ensureIndexCompatibility(node.getVersion(), currentState.getMetadata()); - nodesBuilder.add(node); - nodesChanged = true; - minClusterNodeVersion = Version.min(minClusterNodeVersion, node.getVersion()); - maxClusterNodeVersion = Version.max(maxClusterNodeVersion, node.getVersion()); - if (node.isMasterNode()) { - joiniedNodeNameIds.put(node.getName(), node.getId()); + for (final JoinTask joinTask : joinTasks) { + final List onTaskSuccess = new ArrayList<>(joinTask.nodeCount()); + for (final JoinTask.NodeJoinTask nodeJoinTask : joinTask.nodeJoinTasks()) { + final DiscoveryNode node = nodeJoinTask.node(); + if (currentNodes.nodeExistsWithSameRoles(node)) { + logger.debug("received a join request for an existing node [{}]", node); + } else { + try { + if (enforceVersionBarrier) { + ensureVersionBarrier(node.getVersion(), minClusterNodeVersion); + } + ensureNodesCompatibility(node.getVersion(), minClusterNodeVersion, maxClusterNodeVersion); + // we do this validation quite late to prevent race conditions between nodes joining and importing dangling indices + // we have to reject nodes that don't support all indices we have in this cluster + ensureIndexCompatibility(node.getVersion(), currentState.getMetadata()); + nodesBuilder.add(node); + nodesChanged = true; + minClusterNodeVersion = Version.min(minClusterNodeVersion, node.getVersion()); + maxClusterNodeVersion = Version.max(maxClusterNodeVersion, node.getVersion()); + if (node.isMasterNode()) { + joiniedNodeNameIds.put(node.getName(), node.getId()); + } + } catch (IllegalArgumentException | IllegalStateException e) { + onTaskSuccess.add(() -> nodeJoinTask.listener().onFailure(e)); + continue; } - } catch (IllegalArgumentException | IllegalStateException e) { - results.failure(joinTask, e); - continue; } + onTaskSuccess.add(() -> nodeJoinTask.listener().onResponse(null)); } - results.success(joinTask); + results.success(joinTask, new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + for (Runnable joinCompleter : onTaskSuccess) { + joinCompleter.run(); + } + } + + @Override + public void onFailure(Exception e) { + joinTask.onFailure(e); + } + }); } if (nodesChanged) { @@ -202,17 +176,14 @@ public ClusterTasksResult execute(ClusterState currentState, List jo } } - protected ClusterState.Builder becomeMasterAndTrimConflictingNodes(ClusterState currentState, List joiningNodes) { + protected ClusterState.Builder becomeMasterAndTrimConflictingNodes(ClusterState currentState, List joinTasks) { assert currentState.nodes().getMasterNodeId() == null : currentState; DiscoveryNodes currentNodes = currentState.nodes(); DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(currentNodes); nodesBuilder.masterNodeId(currentState.nodes().getLocalNodeId()); - for (final Task joinTask : joiningNodes) { - if (joinTask.isBecomeMasterTask() || joinTask.isFinishElectionTask()) { - // noop - } else { - final DiscoveryNode joiningNode = joinTask.node(); + for (final JoinTask joinTask : joinTasks) { + for (final DiscoveryNode joiningNode : joinTask.nodes()) { final DiscoveryNode nodeWithSameId = nodesBuilder.get(joiningNode.getId()); if (nodeWithSameId != null && nodeWithSameId.equals(joiningNode) == false) { logger.debug("removing existing node [{}], which conflicts with incoming join from [{}]", nodeWithSameId, joiningNode); @@ -248,18 +219,6 @@ public boolean runOnlyOnMaster() { return false; } - public static Task newBecomeMasterTask() { - return new Task(null, Task.BECOME_MASTER_TASK_REASON, ActionListener.wrap(() -> {})); - } - - /** - * a task that is used to signal the election is stopped and we should process pending joins. - * it may be used in combination with {@link JoinTaskExecutor#newBecomeMasterTask()} - */ - public static Task newFinishElectionTask() { - return new Task(null, Task.FINISH_ELECTION_TASK_REASON, ActionListener.wrap(() -> {})); - } - /** * Ensures that all indices are compatible with the given node version. This will ensure that all indices in the given metadata * will not be created with a newer version of elasticsearch as well as that all indices are newer or equal to the minimum index diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeRemovalClusterStateTaskExecutor.java b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeRemovalClusterStateTaskExecutor.java index 5038471d0c2cd..999b0ba8f7134 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeRemovalClusterStateTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeRemovalClusterStateTaskExecutor.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterStateTaskListener; @@ -39,7 +40,7 @@ public void onNoLongerMaster() { @Override public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - onClusterStateProcessed.run(); + assert false : "not called"; } @Override @@ -59,6 +60,7 @@ public NodeRemovalClusterStateTaskExecutor(AllocationService allocationService) public ClusterTasksResult execute(final ClusterState currentState, final List tasks) throws Exception { final DiscoveryNodes.Builder remainingNodesBuilder = DiscoveryNodes.builder(currentState.nodes()); boolean removed = false; + final var resultBuilder = ClusterTasksResult.builder(); for (final Task task : tasks) { if (currentState.nodes().nodeExists(task.node())) { remainingNodesBuilder.remove(task.node()); @@ -66,18 +68,31 @@ public ClusterTasksResult execute(final ClusterState currentState, final L } else { logger.debug("node [{}] does not exist in cluster state, ignoring", task); } + resultBuilder.success(task, new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + task.onClusterStateProcessed.run(); + } + + @Override + public void onFailure(Exception e) { + task.onFailure(e); + } + }); } - if (removed == false) { + final ClusterState finalState; + + if (removed) { + final ClusterState remainingNodesClusterState = remainingNodesClusterState(currentState, remainingNodesBuilder); + final ClusterState ptasksDisassociatedState = PersistentTasksCustomMetadata.disassociateDeadNodes(remainingNodesClusterState); + finalState = allocationService.disassociateDeadNodes(ptasksDisassociatedState, true, describeTasks(tasks)); + } else { // no nodes to remove, keep the current cluster state - return ClusterTasksResult.builder().successes(tasks).build(currentState); + finalState = currentState; } - final ClusterState remainingNodesClusterState = remainingNodesClusterState(currentState, remainingNodesBuilder); - final ClusterState ptasksDisassociatedState = PersistentTasksCustomMetadata.disassociateDeadNodes(remainingNodesClusterState); - final ClusterState finalState = allocationService.disassociateDeadNodes(ptasksDisassociatedState, true, describeTasks(tasks)); - - return ClusterTasksResult.builder().successes(tasks).build(finalState); + return resultBuilder.build(finalState); } // visible for testing diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java index 0c34449fdea20..55063ce658d46 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -312,7 +313,12 @@ public static class DataStreamTemplate implements Writeable, ToXContentObject { "data_stream_template", false, args -> { - IndexMode indexMode = args[2] != null ? IndexMode.fromString((String) args[2]) : null; + IndexMode indexMode; + if (IndexSettings.isTimeSeriesModeEnabled()) { + indexMode = args[2] != null ? IndexMode.fromString((String) args[2]) : null; + } else { + indexMode = null; + } return new DataStreamTemplate(args[0] != null && (boolean) args[0], args[1] != null && (boolean) args[1], indexMode); } ); @@ -320,7 +326,9 @@ public static class DataStreamTemplate implements Writeable, ToXContentObject { static { PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), HIDDEN); PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), ALLOW_CUSTOM_ROUTING); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), INDEX_MODE); + if (IndexSettings.isTimeSeriesModeEnabled()) { + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), INDEX_MODE); + } } private final boolean hidden; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 3e1ef209c3ea6..f16812a1570ef 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -171,8 +171,14 @@ public Index selectTimeSeriesWriteIndex(Instant timestamp, Metadata metadata) { Index index = indices.get(i); IndexMetadata im = metadata.index(index); - // TODO: make start and end time fields in IndexMetadata class. + // TODO: make index_mode, start and end time fields in IndexMetadata class. // (this to avoid the overhead that occurs when reading a setting) + if (IndexSettings.MODE.get(im.getSettings()) != IndexMode.TIME_SERIES) { + // Not a tsdb backing index, so skip. + // (This can happen is this is a migrated tsdb data stream) + continue; + } + Instant start = IndexSettings.TIME_SERIES_START_TIME.get(im.getSettings()); Instant end = IndexSettings.TIME_SERIES_END_TIME.get(im.getSettings()); // Check should be in sync with DataStreamTimestampFieldMapper#validateTimestamp(...) method @@ -192,12 +198,19 @@ public Index selectTimeSeriesWriteIndex(Instant timestamp, Metadata metadata) { public void validate(Function imSupplier) { if (indexMode == IndexMode.TIME_SERIES) { // Get a sorted overview of each backing index with there start and end time range: - var startAndEndTimes = indices.stream().map(index -> imSupplier.apply(index.getName())).map(im -> { - Instant start = IndexSettings.TIME_SERIES_START_TIME.get(im.getSettings()); - Instant end = IndexSettings.TIME_SERIES_END_TIME.get(im.getSettings()); - assert end.isAfter(start); // This is also validated by TIME_SERIES_END_TIME setting. - return new Tuple<>(im.getIndex().getName(), new Tuple<>(start, end)); - }) + var startAndEndTimes = indices.stream() + .map(index -> imSupplier.apply(index.getName())) + .filter( + // Migrated tsdb data streams have non tsdb backing indices: + im -> IndexSettings.TIME_SERIES_START_TIME.exists(im.getSettings()) + && IndexSettings.TIME_SERIES_END_TIME.exists(im.getSettings()) + ) + .map(im -> { + Instant start = IndexSettings.TIME_SERIES_START_TIME.get(im.getSettings()); + Instant end = IndexSettings.TIME_SERIES_END_TIME.get(im.getSettings()); + assert end.isAfter(start); // This is also validated by TIME_SERIES_END_TIME setting. + return new Tuple<>(im.getIndex().getName(), new Tuple<>(start, end)); + }) .sorted(Comparator.comparing(entry -> entry.v2().v1())) // Sort by start time .collect(Collectors.toList()); @@ -265,21 +278,29 @@ public IndexMode getIndexMode() { * Performs a rollover on a {@code DataStream} instance and returns a new instance containing * the updated list of backing indices and incremented generation. * - * @param writeIndex new write index - * @param generation new generation + * @param writeIndex new write index + * @param generation new generation + * @param indexModeFromTemplate the index mode as is defined in the template that created this data stream * * @return new {@code DataStream} instance with the rollover operation applied */ - public DataStream rollover(Index writeIndex, long generation) { + public DataStream rollover(Index writeIndex, long generation, IndexMode indexModeFromTemplate) { ensureNotReplicated(); - return unsafeRollover(writeIndex, generation); + return unsafeRollover(writeIndex, generation, indexModeFromTemplate); } /** - * Like {@link #rollover(Index, long)}, but does no validation, use with care only. + * Like {@link #rollover(Index, long, IndexMode)}, but does no validation, use with care only. */ - public DataStream unsafeRollover(Index writeIndex, long generation) { + public DataStream unsafeRollover(Index writeIndex, long generation, IndexMode indexModeFromTemplate) { + IndexMode indexMode = this.indexMode; + // This allows for migrating a data stream to be a tsdb data stream: + // (only if index_mode=null|standard then allow it to be set to time_series) + if ((indexMode == null || indexMode == IndexMode.STANDARD) && indexModeFromTemplate == IndexMode.TIME_SERIES) { + indexMode = IndexMode.TIME_SERIES; + } + List backingIndices = new ArrayList<>(indices); backingIndices.add(writeIndex); return new DataStream( @@ -298,7 +319,7 @@ public DataStream unsafeRollover(Index writeIndex, long generation) { /** * Performs a dummy rollover on a {@code DataStream} instance and returns the tuple of the next write index name and next generation - * that this {@code DataStream} should roll over to using {@link #rollover(Index, long)}. + * that this {@code DataStream} should roll over to using {@link #rollover(Index, long, IndexMode)}. * * @param clusterMetadata Cluster metadata * diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java index a372649a6ce2f..4f8083f1506b4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.Index; @@ -26,7 +25,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; @@ -251,20 +249,12 @@ public Alias(AliasMetadata aliasMetadata, List indices) { } else if (writeIndices.size() == 1) { this.writeIndex = writeIndices.get(0).getIndex(); } else { - List writeIndicesStrings = writeIndices.stream().map(i -> i.getIndex().getName()).collect(Collectors.toList()); - throw new IllegalStateException( - "alias [" - + aliasName - + "] has more than one write index [" - + Strings.collectionToCommaDelimitedString(writeIndicesStrings) - + "]" - ); + throw new IllegalStateException("write indices size can only be 0 or 1, but is [" + writeIndices.size() + "]"); } this.isHidden = aliasMetadata.isHidden() == null ? false : aliasMetadata.isHidden(); this.isSystem = indices.stream().allMatch(IndexMetadata::isSystem); dataStreamAlias = false; - validateAliasProperties(indices); } public Alias(DataStreamAlias dataStreamAlias, List indicesOfAllDataStreams, Index writeIndexOfWriteDataStream) { @@ -321,68 +311,6 @@ public List getAliases() { return null; } - private void validateAliasProperties(List referenceIndexMetadatas) { - // Validate hidden status - final Map> groupedByHiddenStatus = referenceIndexMetadatas.stream() - .collect(Collectors.groupingBy(idxMeta -> Boolean.TRUE.equals(idxMeta.getAliases().get(aliasName).isHidden()))); - if (isNonEmpty(groupedByHiddenStatus.get(true)) && isNonEmpty(groupedByHiddenStatus.get(false))) { - List hiddenOn = groupedByHiddenStatus.get(true) - .stream() - .map(idx -> idx.getIndex().getName()) - .collect(Collectors.toList()); - List nonHiddenOn = groupedByHiddenStatus.get(false) - .stream() - .map(idx -> idx.getIndex().getName()) - .collect(Collectors.toList()); - throw new IllegalStateException( - "alias [" - + aliasName - + "] has is_hidden set to true on indices [" - + Strings.collectionToCommaDelimitedString(hiddenOn) - + "] but does not have is_hidden set to true on indices [" - + Strings.collectionToCommaDelimitedString(nonHiddenOn) - + "]; alias must have the same is_hidden setting " - + "on all indices" - ); - } - - // Validate system status - - final Map> groupedBySystemStatus = referenceIndexMetadatas.stream() - .collect(Collectors.groupingBy(IndexMetadata::isSystem)); - // If the alias has either all system or all non-system, then no more validation is required - if (isNonEmpty(groupedBySystemStatus.get(false)) && isNonEmpty(groupedBySystemStatus.get(true))) { - final List newVersionSystemIndices = groupedBySystemStatus.get(true) - .stream() - .filter(i -> i.getCreationVersion().onOrAfter(IndexNameExpressionResolver.SYSTEM_INDEX_ENFORCEMENT_VERSION)) - .map(i -> i.getIndex().getName()) - .sorted() // reliable error message for testing - .collect(Collectors.toList()); - - if (newVersionSystemIndices.isEmpty() == false) { - final List nonSystemIndices = groupedBySystemStatus.get(false) - .stream() - .map(i -> i.getIndex().getName()) - .sorted() // reliable error message for testing - .collect(Collectors.toList()); - throw new IllegalStateException( - "alias [" - + aliasName - + "] refers to both system indices " - + newVersionSystemIndices - + " and non-system indices: " - + nonSystemIndices - + ", but aliases must refer to either system or" - + " non-system indices, not both" - ); - } - } - } - - private boolean isNonEmpty(List idxMetas) { - return (Objects.isNull(idxMetas) || idxMetas.isEmpty()) == false; - } - @Override public boolean equals(Object o) { if (this == o) return true; @@ -443,10 +371,6 @@ public Index getWriteIndex(IndexRequest request, Metadata metadata) { return getWriteIndex(); } - if (getType() != IndexAbstraction.Type.DATA_STREAM) { - return getWriteIndex(); - } - if (dataStream.getIndexMode() != IndexMode.TIME_SERIES) { return getWriteIndex(); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java index 41f13470fbec2..2531ee686ae50 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java @@ -54,6 +54,7 @@ import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; +import java.util.AbstractCollection; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -86,7 +87,7 @@ * The details of how this is persisted are covered in {@link org.elasticsearch.gateway.PersistedClusterStateService}. *

*/ -public class Metadata implements Iterable, Diffable, ToXContentFragment { +public class Metadata extends AbstractCollection implements Diffable, ToXContentFragment { private static final Logger logger = LogManager.getLogger(Metadata.class); @@ -203,6 +204,7 @@ default boolean isRestorable() { private final Settings settings; private final DiffableStringMap hashesOfConsistentSettings; private final ImmutableOpenMap indices; + private final ImmutableOpenMap> aliasedIndices; private final ImmutableOpenMap templates; private final ImmutableOpenMap customs; @@ -233,6 +235,7 @@ private Metadata( int totalNumberOfShards, int totalOpenIndexShards, ImmutableOpenMap indices, + ImmutableOpenMap> aliasedIndices, ImmutableOpenMap templates, ImmutableOpenMap customs, String[] allIndices, @@ -254,6 +257,7 @@ private Metadata( this.settings = settings; this.hashesOfConsistentSettings = hashesOfConsistentSettings; this.indices = indices; + this.aliasedIndices = aliasedIndices; this.customs = customs; this.templates = templates; this.totalNumberOfShards = totalNumberOfShards; @@ -282,6 +286,7 @@ public Metadata withIncrementedVersion() { totalNumberOfShards, totalOpenIndexShards, indices, + aliasedIndices, templates, customs, allIndices, @@ -339,15 +344,6 @@ public Version oldestIndexVersion() { return this.oldestIndexVersion; } - public boolean hasAlias(String alias) { - IndexAbstraction indexAbstraction = getIndicesLookup().get(alias); - if (indexAbstraction != null) { - return indexAbstraction.getType() == IndexAbstraction.Type.ALIAS; - } else { - return false; - } - } - public boolean equalsAliases(Metadata other) { for (IndexMetadata otherIndex : other.indices().values()) { IndexMetadata thisIndex = index(otherIndex.getIndex()); @@ -383,6 +379,10 @@ public SortedMap getIndicesLookup() { return indicesLookup; } + public boolean sameIndicesLookup(Metadata other) { + return this.indicesLookup == other.indicesLookup; + } + /** * Finds the specific index aliases that point to the requested concrete indices directly * or that match with the indices via wildcards. @@ -787,6 +787,36 @@ public ImmutableOpenMap getIndices() { return indices(); } + /** + * Returns whether an alias exists with provided alias name. + * + * @param aliasName The provided alias name + * @return whether an alias exists with provided alias name + */ + public boolean hasAlias(String aliasName) { + return aliasedIndices.containsKey(aliasName) || dataStreamAliases().containsKey(aliasName); + } + + /** + * Returns all the indices that the alias with the provided alias name refers to. + * These are aliased indices. Not that, this only return indices that have been aliased + * and not indices that are behind a data stream or data stream alias. + * + * @param aliasName The provided alias name + * @return all aliased indices by the alias with the provided alias name + */ + public Set aliasedIndices(String aliasName) { + Objects.requireNonNull(aliasName); + return aliasedIndices.getOrDefault(aliasName, Set.of()); + } + + /** + * @return the names of all indices aliases. + */ + public Set aliasedIndices() { + return aliasedIndices.keySet(); + } + public ImmutableOpenMap templates() { return this.templates; } @@ -869,6 +899,11 @@ public Iterator iterator() { return indices.valuesIt(); } + @Override + public int size() { + return indices.size(); + } + public static boolean isGlobalStateEquals(Metadata metadata1, Metadata metadata2) { if (metadata1.coordinationMetadata.equals(metadata2.coordinationMetadata) == false) { return false; @@ -1136,6 +1171,7 @@ public static class Builder { private DiffableStringMap hashesOfConsistentSettings = DiffableStringMap.EMPTY; private final ImmutableOpenMap.Builder indices; + private final ImmutableOpenMap.Builder> aliasedIndices; private final ImmutableOpenMap.Builder templates; private final ImmutableOpenMap.Builder customs; @@ -1155,15 +1191,17 @@ public Builder() { this.hashesOfConsistentSettings = metadata.hashesOfConsistentSettings; this.version = metadata.version; this.indices = ImmutableOpenMap.builder(metadata.indices); + this.aliasedIndices = ImmutableOpenMap.builder(metadata.aliasedIndices); this.templates = ImmutableOpenMap.builder(metadata.templates); this.customs = ImmutableOpenMap.builder(metadata.customs); - previousIndicesLookup = metadata.getIndicesLookup(); + this.previousIndicesLookup = metadata.indicesLookup; this.mappingsByHash = new HashMap<>(metadata.mappingsByHash); } private Builder(Map mappingsByHash) { clusterUUID = UNKNOWN_CLUSTER_UUID; indices = ImmutableOpenMap.builder(); + aliasedIndices = ImmutableOpenMap.builder(); templates = ImmutableOpenMap.builder(); customs = ImmutableOpenMap.builder(); indexGraveyard(IndexGraveyard.builder().build()); // create new empty index graveyard to initialize @@ -1177,6 +1215,7 @@ public Builder put(IndexMetadata.Builder indexMetadataBuilder) { dedupeMapping(indexMetadataBuilder); IndexMetadata indexMetadata = indexMetadataBuilder.build(); IndexMetadata previous = indices.put(indexMetadata.getIndex().getName(), indexMetadata); + updateAliases(previous, indexMetadata); if (unsetPreviousIndicesLookup(previous, indexMetadata)) { previousIndicesLookup = null; } @@ -1193,6 +1232,7 @@ public Builder put(IndexMetadata indexMetadata, boolean incrementVersion) { indexMetadata = IndexMetadata.builder(indexMetadata).version(indexMetadata.getVersion() + 1).build(); } IndexMetadata previous = indices.put(indexMetadata.getIndex().getName(), indexMetadata); + updateAliases(previous, indexMetadata); if (unsetPreviousIndicesLookup(previous, indexMetadata)) { previousIndicesLookup = null; } @@ -1246,7 +1286,8 @@ public IndexMetadata getSafe(Index index) { public Builder remove(String index) { previousIndicesLookup = null; - indices.remove(index); + IndexMetadata previous = indices.remove(index); + updateAliases(previous, null); return this; } @@ -1255,6 +1296,7 @@ public Builder removeAllIndices() { indices.clear(); mappingsByHash.clear(); + aliasedIndices.clear(); return this; } @@ -1267,6 +1309,67 @@ public Builder indices(ImmutableOpenMap indices) { return this; } + void updateAliases(IndexMetadata previous, IndexMetadata current) { + if (previous == null && current != null) { + for (var cursor : current.getAliases()) { + putAlias(cursor.key, current.getIndex()); + } + } else if (previous != null && current == null) { + for (var cursor : previous.getAliases()) { + removeAlias(cursor.key, previous.getIndex()); + } + } else if (previous != null && current != null) { + if (Objects.equals(previous.getAliases(), current.getAliases())) { + return; + } + + for (var currentCursor : current.getAliases()) { + if (previous.getAliases().containsKey(currentCursor.key) == false) { + putAlias(currentCursor.key, current.getIndex()); + } + } + for (var previousCursor : previous.getAliases()) { + if (current.getAliases().containsKey(previousCursor.key) == false) { + removeAlias(previousCursor.key, current.getIndex()); + } + } + } + } + + private Builder putAlias(String alias, Index index) { + Objects.requireNonNull(alias); + Objects.requireNonNull(index); + + Set indices = new HashSet<>(aliasedIndices.getOrDefault(alias, Set.of())); + if (indices.add(index) == false) { + return this; // indices already contained this index + } + aliasedIndices.put(alias, Collections.unmodifiableSet(indices)); + return this; + } + + private Builder removeAlias(String alias, Index index) { + Objects.requireNonNull(alias); + Objects.requireNonNull(index); + + Set indices = aliasedIndices.get(alias); + if (indices == null || indices.isEmpty()) { + throw new IllegalStateException("Cannot remove non-existent alias [" + alias + "] for index [" + index.getName() + "]"); + } + + indices = new HashSet<>(indices); + if (indices.remove(index) == false) { + throw new IllegalStateException("Cannot remove non-existent alias [" + alias + "] for index [" + index.getName() + "]"); + } + + if (indices.isEmpty()) { + aliasedIndices.remove(alias); // for consistency, we don't store empty sets, so null it out + } else { + aliasedIndices.put(alias, Collections.unmodifiableSet(indices)); + } + return this; + } + public Builder put(IndexTemplateMetadata.Builder template) { return put(template.build()); } @@ -1618,18 +1721,6 @@ public Builder generateClusterUuidIfNeeded() { * @return a new Metadata instance */ public Metadata build() { - return build(true); - } - - /** - * @param builtIndicesLookupEagerly Controls whether indices lookup should be build as part of the execution of this method - * or after when needed. Almost all of the time indices lookup should be built eagerly, however - * in certain cases when Metdata instances are build that are not published and - * many indices have been defined then it makes sense to skip building indices lookup. - * - * @return a new Metadata instance - */ - public Metadata build(boolean builtIndicesLookupEagerly) { // TODO: We should move these datastructures to IndexNameExpressionResolver, this will give the following benefits: // 1) The datastructures will be rebuilt only when needed. Now during serializing we rebuild these datastructures // while these datastructures aren't even used. @@ -1639,7 +1730,6 @@ public Metadata build(boolean builtIndicesLookupEagerly) { final List visibleOpenIndices = new ArrayList<>(); final List allClosedIndices = new ArrayList<>(); final List visibleClosedIndices = new ArrayList<>(); - final Set indicesAliases = new HashSet<>(); final ImmutableOpenMap indicesMap = indices.build(); final Set allIndices = indicesMap.keySet(); @@ -1662,23 +1752,24 @@ public Metadata build(boolean builtIndicesLookupEagerly) { visibleClosedIndices.add(name); } } - indexMetadata.getAliases().keysIt().forEachRemaining(indicesAliases::add); oldestIndexVersionId = Math.min(oldestIndexVersionId, indexMetadata.getCompatibilityVersion().id); } + var aliasedIndices = this.aliasedIndices.build(); + for (var cursor : aliasedIndices) { + List aliasIndices = cursor.value.stream() + .map(idx -> indicesMap.get(idx.getName())) + .collect(Collectors.toList()); + validateAlias(cursor.key, aliasIndices); + } final DataStreamMetadata dataStreamMetadata = (DataStreamMetadata) this.customs.get(DataStreamMetadata.TYPE); - ensureNoNameCollisions(indicesAliases, indicesMap, allIndices, dataStreamMetadata); + ensureNoNameCollisions(aliasedIndices.keySet(), indicesMap, allIndices, dataStreamMetadata); + assert assertDataStreams(indicesMap, dataStreamMetadata); - SortedMap indicesLookup; + SortedMap indicesLookup = null; if (previousIndicesLookup != null) { assert previousIndicesLookup.equals(buildIndicesLookup(dataStreamMetadata, indicesMap)); indicesLookup = previousIndicesLookup; - } else { - if (builtIndicesLookupEagerly) { - indicesLookup = buildIndicesLookup(dataStreamMetadata, indicesMap); - } else { - indicesLookup = null; - } } purgeUnusedEntries(indicesMap); @@ -1715,6 +1806,7 @@ public Metadata build(boolean builtIndicesLookupEagerly) { totalNumberOfShards, totalOpenIndexShards, indicesMap, + aliasedIndices, templates.build(), customs.build(), allIndicesArray, @@ -1904,32 +1996,108 @@ static SortedMap buildIndicesLookup( assert existing == null : "duplicate for " + entry.getKey(); } - validateDataStreams(indicesLookup, dataStreamMetadata); return Collections.unmodifiableSortedMap(indicesLookup); } - static void validateDataStreams(SortedMap indicesLookup, @Nullable DataStreamMetadata dsMetadata) { + private static boolean isNonEmpty(List idxMetas) { + return (Objects.isNull(idxMetas) || idxMetas.isEmpty()) == false; + } + + private static void validateAlias(String aliasName, List indexMetadatas) { + // Validate write indices + List writeIndices = indexMetadatas.stream() + .filter(idxMeta -> Boolean.TRUE.equals(idxMeta.getAliases().get(aliasName).writeIndex())) + .map(im -> im.getIndex().getName()) + .collect(Collectors.toList()); + if (writeIndices.size() > 1) { + throw new IllegalStateException( + "alias [" + + aliasName + + "] has more than one write index [" + + Strings.collectionToCommaDelimitedString(writeIndices) + + "]" + ); + } + + // Validate hidden status + final Map> groupedByHiddenStatus = indexMetadatas.stream() + .collect(Collectors.groupingBy(idxMeta -> Boolean.TRUE.equals(idxMeta.getAliases().get(aliasName).isHidden()))); + if (isNonEmpty(groupedByHiddenStatus.get(true)) && isNonEmpty(groupedByHiddenStatus.get(false))) { + List hiddenOn = groupedByHiddenStatus.get(true) + .stream() + .map(idx -> idx.getIndex().getName()) + .collect(Collectors.toList()); + List nonHiddenOn = groupedByHiddenStatus.get(false) + .stream() + .map(idx -> idx.getIndex().getName()) + .collect(Collectors.toList()); + throw new IllegalStateException( + "alias [" + + aliasName + + "] has is_hidden set to true on indices [" + + Strings.collectionToCommaDelimitedString(hiddenOn) + + "] but does not have is_hidden set to true on indices [" + + Strings.collectionToCommaDelimitedString(nonHiddenOn) + + "]; alias must have the same is_hidden setting " + + "on all indices" + ); + } + + // Validate system status + final Map> groupedBySystemStatus = indexMetadatas.stream() + .collect(Collectors.groupingBy(IndexMetadata::isSystem)); + // If the alias has either all system or all non-system, then no more validation is required + if (isNonEmpty(groupedBySystemStatus.get(false)) && isNonEmpty(groupedBySystemStatus.get(true))) { + final List newVersionSystemIndices = groupedBySystemStatus.get(true) + .stream() + .filter(i -> i.getCreationVersion().onOrAfter(IndexNameExpressionResolver.SYSTEM_INDEX_ENFORCEMENT_VERSION)) + .map(i -> i.getIndex().getName()) + .sorted() // reliable error message for testing + .collect(Collectors.toList()); + + if (newVersionSystemIndices.isEmpty() == false) { + final List nonSystemIndices = groupedBySystemStatus.get(false) + .stream() + .map(i -> i.getIndex().getName()) + .sorted() // reliable error message for testing + .collect(Collectors.toList()); + throw new IllegalStateException( + "alias [" + + aliasName + + "] refers to both system indices " + + newVersionSystemIndices + + " and non-system indices: " + + nonSystemIndices + + ", but aliases must refer to either system or" + + " non-system indices, not both" + ); + } + } + } + + static boolean assertDataStreams(ImmutableOpenMap indices, @Nullable DataStreamMetadata dsMetadata) { if (dsMetadata != null) { // Sanity check, because elsewhere a more user friendly error should have occurred: - List conflictingAliases = indicesLookup.values() - .stream() - .filter(ia -> ia.getType() == IndexAbstraction.Type.ALIAS) - .filter(ia -> ia.isDataStreamRelated() == false) - .filter(ia -> { - for (Index index : ia.getIndices()) { - if (indicesLookup.get(index.getName()).getParentDataStream() != null) { - return true; + List conflictingAliases = null; + + for (var dataStream : dsMetadata.dataStreams().values()) { + for (var index : dataStream.getIndices()) { + IndexMetadata im = indices.get(index.getName()); + if (im != null && im.getAliases().isEmpty() == false) { + for (var alias : im.getAliases().values()) { + if (conflictingAliases == null) { + conflictingAliases = new LinkedList<>(); + } + conflictingAliases.add(alias.alias()); } } - - return false; - }) - .map(IndexAbstraction::getName) - .collect(Collectors.toList()); - if (conflictingAliases.isEmpty() == false) { - throw new IllegalStateException("aliases " + conflictingAliases + " cannot refer to backing indices of data streams"); + } + } + if (conflictingAliases != null) { + throw new AssertionError("aliases " + conflictingAliases + " cannot refer to backing indices of data streams"); } } + return true; } public static void toXContent(Metadata metadata, XContentBuilder builder, ToXContent.Params params) throws IOException { @@ -2128,5 +2296,4 @@ public Metadata fromXContent(XContentParser parser) throws IOException { return Builder.fromXContent(parser); } }; - } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index 3c639a5d15012..993d7cba8a3f1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -1463,11 +1463,13 @@ static void prepareResizeIndexSettings( } indexSettingsBuilder.put(IndexMetadata.SETTING_VERSION_CREATED, sourceMetadata.getCreationVersion()) - .put(IndexMetadata.SETTING_VERSION_COMPATIBILITY, sourceMetadata.getCompatibilityVersion()) .put(builder.build()) .put(IndexMetadata.SETTING_ROUTING_PARTITION_SIZE, sourceMetadata.getRoutingPartitionSize()) .put(IndexMetadata.INDEX_RESIZE_SOURCE_NAME.getKey(), resizeSourceIndex.getName()) .put(IndexMetadata.INDEX_RESIZE_SOURCE_UUID.getKey(), resizeSourceIndex.getUUID()); + if (sourceMetadata.getSettings().hasValue(IndexMetadata.SETTING_VERSION_COMPATIBILITY)) { + indexSettingsBuilder.put(IndexMetadata.SETTING_VERSION_COMPATIBILITY, sourceMetadata.getCompatibilityVersion()); + } } /** diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java index 66c48106fb0c6..312bb505f9059 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java @@ -32,14 +32,17 @@ import org.elasticsearch.action.support.master.ShardsAcknowledgedResponse; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateAckListener; +import org.elasticsearch.cluster.ClusterStateTaskConfig; import org.elasticsearch.cluster.ClusterStateTaskExecutor; +import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetadata.APIBlock; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; @@ -55,6 +58,8 @@ import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -76,6 +81,7 @@ import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -111,12 +117,10 @@ public class MetadataIndexStateService { private final ClusterService clusterService; private final AllocationService allocationService; - private final IndexMetadataVerifier indexMetadataVerifier; - private final IndicesService indicesService; - private final ShardLimitValidator shardLimitValidator; - private final ThreadPool threadPool; private final NodeClient client; + private final ThreadPool threadPool; private final ActiveShardsObserver activeShardsObserver; + private final ClusterStateTaskExecutor opensExecutor; @Inject public MetadataIndexStateService( @@ -128,14 +132,12 @@ public MetadataIndexStateService( NodeClient client, ThreadPool threadPool ) { - this.indicesService = indicesService; this.clusterService = clusterService; this.allocationService = allocationService; - this.threadPool = threadPool; this.client = client; - this.indexMetadataVerifier = indexMetadataVerifier; - this.shardLimitValidator = shardLimitValidator; + this.threadPool = threadPool; this.activeShardsObserver = new ActiveShardsObserver(clusterService, threadPool); + this.opensExecutor = new OpenIndicesExecutor(allocationService, indexMetadataVerifier, indicesService, shardLimitValidator); } /** @@ -223,10 +225,8 @@ public void clusterStateProcessed( ); } // acknowledged maybe be false but some indices may have been correctly - // closed, so - // we maintain a kind of coherency by overriding the shardsAcknowledged - // value - // (see ShardsAcknowledgedResponse constructor) + // closed, so we maintain a kind of coherency by overriding the + // shardsAcknowledged value (see ShardsAcknowledgedResponse constructor) boolean shardsAcked = acknowledged ? shardsAcknowledged : false; listener.onResponse( new CloseIndexResponse(acknowledged, shardsAcked, indices) @@ -269,11 +269,9 @@ static ClusterState addIndexClosedBlocks( final Map blockedIndices, final ClusterState currentState ) { - final Metadata.Builder metadata = Metadata.builder(currentState.metadata()); - final Set indicesToClose = new HashSet<>(); for (Index index : indices) { - final IndexMetadata indexMetadata = metadata.getSafe(index); + final IndexMetadata indexMetadata = currentState.metadata().getIndexSafe(index); if (indexMetadata.getState() != IndexMetadata.State.CLOSE) { indicesToClose.add(index); } else { @@ -302,8 +300,7 @@ static ClusterState addIndexClosedBlocks( ); } - final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - final RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable()); + final ClusterBlocks.Builder blocks = ClusterBlocks.builder(currentState.blocks()); for (Index index : indicesToClose) { ClusterBlock indexBlock = null; @@ -332,7 +329,7 @@ static ClusterState addIndexClosedBlocks( blockedIndices.keySet().stream().map(Object::toString).collect(Collectors.joining(",")) ) ); - return ClusterState.builder(currentState).blocks(blocks).metadata(metadata).routingTable(routingTable.build()).build(); + return ClusterState.builder(currentState).blocks(blocks).build(); } /** @@ -343,7 +340,7 @@ static ClusterState addIndexClosedBlocks( * @param block The type of block to add * @return a tuple of the updated cluster state, as well as the blocks that got added */ - static Tuple> addIndexBlock( + private static Tuple> addIndexBlock( final Index[] indices, final ClusterState currentState, final APIBlock block @@ -364,8 +361,7 @@ static Tuple> addIndexBlock( return Tuple.tuple(currentState, Collections.emptyMap()); } - final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - final RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable()); + final ClusterBlocks.Builder blocks = ClusterBlocks.builder(currentState.blocks()); final Map blockedIndices = new HashMap<>(); for (Index index : indicesToAddBlock) { @@ -403,10 +399,7 @@ static Tuple> addIndexBlock( block.name, blockedIndices.keySet().stream().map(Object::toString).collect(Collectors.toList()) ); - return Tuple.tuple( - ClusterState.builder(currentState).blocks(blocks).metadata(metadata).routingTable(routingTable.build()).build(), - blockedIndices - ); + return Tuple.tuple(ClusterState.builder(currentState).blocks(blocks).metadata(metadata).build(), blockedIndices); } /** @@ -538,7 +531,7 @@ public void onFailure(final Exception e) { * this action succeed then the shard is considered to be ready for closing. When all shards of a given index are ready for closing, * the index is considered ready to be closed. */ - class WaitForClosedBlocksApplied extends ActionRunnable> { + private class WaitForClosedBlocksApplied extends ActionRunnable> { private final Map blockedIndices; private final CloseIndexClusterStateUpdateRequest request; @@ -670,7 +663,7 @@ private void sendVerifyShardBeforeCloseRequest( * Helper class that coordinates with shards to ensure that blocks have been properly applied to all shards using * {@link TransportVerifyShardIndexBlockAction}. */ - class WaitForBlocksApplied extends ActionRunnable> { + private class WaitForBlocksApplied extends ActionRunnable> { private final Map blockedIndices; private final AddIndexBlockClusterStateUpdateRequest request; @@ -791,9 +784,8 @@ static Tuple> closeRoutingTable( final Map blockedIndices, final Map verifyResult ) { - final Metadata.Builder metadata = Metadata.builder(currentState.metadata()); - final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); + final ClusterBlocks.Builder blocks = ClusterBlocks.builder(currentState.blocks()); final RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable()); final Set closedIndices = new HashSet<>(); @@ -879,13 +871,13 @@ static Tuple> closeRoutingTable( } logger.info("completed closing of indices {}", closedIndices); return Tuple.tuple( - ClusterState.builder(currentState).blocks(blocks).metadata(metadata).routingTable(routingTable.build()).build(), + ClusterState.builder(currentState).blocks(blocks).metadata(metadata).routingTable(routingTable).build(), closingResults.values() ); } - public void openIndex(final OpenIndexClusterStateUpdateRequest request, final ActionListener listener) { - onlyOpenIndex(request, ActionListener.wrap(response -> { + public void openIndices(final OpenIndexClusterStateUpdateRequest request, final ActionListener listener) { + onlyOpenIndices(request, ActionListener.wrap(response -> { if (response.isAcknowledged()) { String[] indexNames = Arrays.stream(request.indices()).map(Index::getName).toArray(String[]::new); activeShardsObserver.waitForActiveShards( @@ -895,7 +887,7 @@ public void openIndex(final OpenIndexClusterStateUpdateRequest request, final Ac shardsAcknowledged -> { if (shardsAcknowledged == false) { logger.debug( - "[{}] indices opened, but the operation timed out while waiting for " + "enough shards to be started.", + "[{}] indices opened, but the operation timed out while waiting for enough shards to be started.", Arrays.toString(indexNames) ); } @@ -909,7 +901,7 @@ public void openIndex(final OpenIndexClusterStateUpdateRequest request, final Ac }, listener::onFailure)); } - private void onlyOpenIndex(final OpenIndexClusterStateUpdateRequest request, final ActionListener listener) { + private void onlyOpenIndices(final OpenIndexClusterStateUpdateRequest request, final ActionListener listener) { if (request.indices() == null || request.indices().length == 0) { throw new IllegalArgumentException("Index name is required"); } @@ -917,82 +909,10 @@ private void onlyOpenIndex(final OpenIndexClusterStateUpdateRequest request, fin final String indicesAsString = Arrays.toString(request.indices()); clusterService.submitStateUpdateTask( "open-indices " + indicesAsString, - new AckedClusterStateUpdateTask(Priority.URGENT, request, listener) { - @Override - public ClusterState execute(final ClusterState currentState) { - final ClusterState updatedState = openIndices(request.indices(), currentState); - // no explicit wait for other nodes needed as we use AckedClusterStateUpdateTask - return allocationService.reroute(updatedState, "indices opened [" + indicesAsString + "]"); - } - }, - ClusterStateTaskExecutor.unbatched() - ); - } - - ClusterState openIndices(final Index[] indices, final ClusterState currentState) { - final List indicesToOpen = new ArrayList<>(); - for (Index index : indices) { - final IndexMetadata indexMetadata = currentState.metadata().getIndexSafe(index); - if (indexMetadata.getState() != IndexMetadata.State.OPEN) { - indicesToOpen.add(indexMetadata); - } else if (currentState.blocks().hasIndexBlockWithId(index.getName(), INDEX_CLOSED_BLOCK_ID)) { - indicesToOpen.add(indexMetadata); - } - } - - shardLimitValidator.validateShardLimit(currentState, indices); - if (indicesToOpen.isEmpty()) { - return currentState; - } - - logger.info( - () -> new ParameterizedMessage( - "opening indices [{}]", - String.join(",", indicesToOpen.stream().map(i -> (CharSequence) i.getIndex().toString())::iterator) - ) + new OpenIndicesTask(request, listener), + ClusterStateTaskConfig.build(Priority.URGENT, request.masterNodeTimeout()), + this.opensExecutor ); - - final Metadata.Builder metadata = Metadata.builder(currentState.metadata()); - final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - final Version minIndexCompatibilityVersion = currentState.getNodes().getMaxNodeVersion().minimumIndexCompatibilityVersion(); - - for (IndexMetadata indexMetadata : indicesToOpen) { - final Index index = indexMetadata.getIndex(); - if (indexMetadata.getState() != IndexMetadata.State.OPEN) { - final Settings.Builder updatedSettings = Settings.builder().put(indexMetadata.getSettings()); - updatedSettings.remove(VERIFIED_BEFORE_CLOSE_SETTING.getKey()); - - IndexMetadata newIndexMetadata = IndexMetadata.builder(indexMetadata) - .state(IndexMetadata.State.OPEN) - .settingsVersion(indexMetadata.getSettingsVersion() + 1) - .settings(updatedSettings) - .timestampRange(IndexLongFieldRange.NO_SHARDS) - .build(); - - // The index might be closed because we couldn't import it due to an old incompatible - // version, so we need to verify its compatibility. - newIndexMetadata = indexMetadataVerifier.verifyIndexMetadata(newIndexMetadata, minIndexCompatibilityVersion); - try { - indicesService.verifyIndexMetadata(newIndexMetadata, newIndexMetadata); - } catch (Exception e) { - throw new ElasticsearchException("Failed to verify index " + index, e); - } - metadata.put(newIndexMetadata, true); - } - - // Always removes index closed blocks (note: this can fail on-going close index actions) - blocks.removeIndexBlockWithId(index.getName(), INDEX_CLOSED_BLOCK_ID); - } - - ClusterState updatedState = ClusterState.builder(currentState).metadata(metadata).blocks(blocks).build(); - - final RoutingTable.Builder routingTable = RoutingTable.builder(updatedState.routingTable()); - for (IndexMetadata previousIndexMetadata : indicesToOpen) { - if (previousIndexMetadata.getState() != IndexMetadata.State.OPEN) { - routingTable.addAsFromCloseToOpen(updatedState.metadata().getIndexSafe(previousIndexMetadata.getIndex())); - } - } - return ClusterState.builder(updatedState).routingTable(routingTable.build()).build(); } /** @@ -1003,16 +923,13 @@ ClusterState openIndices(final Index[] indices, final ClusterState currentState) * @param block the full block to convert to * @return the updated cluster state, as well as the (failed and successful) index-level results for adding the block */ - static Tuple> finalizeBlock( + private static Tuple> finalizeBlock( final ClusterState currentState, final Map blockedIndices, final Map verifyResult, final APIBlock block ) { - - final Metadata.Builder metadata = Metadata.builder(currentState.metadata()); - final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - final RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable()); + final ClusterBlocks.Builder blocks = ClusterBlocks.builder(currentState.blocks()); final Set effectivelyBlockedIndices = new HashSet<>(); Map blockingResults = new HashMap<>(verifyResult); @@ -1065,10 +982,7 @@ static Tuple> finalizeBlock( } } logger.info("completed adding block {} to indices {}", block.name, effectivelyBlockedIndices); - return Tuple.tuple( - ClusterState.builder(currentState).blocks(blocks).metadata(metadata).routingTable(routingTable.build()).build(), - blockingResults.values() - ); + return Tuple.tuple(ClusterState.builder(currentState).blocks(blocks).build(), blockingResults.values()); } /** @@ -1079,7 +993,7 @@ public static ClusterBlock createIndexClosingBlock() { return new ClusterBlock( INDEX_CLOSED_BLOCK_ID, UUIDs.randomBase64UUID(), - "index preparing to close. Reopen the index to allow " + "writes again or retry closing the index to fully close the index.", + "index preparing to close. Reopen the index to allow writes again or retry closing the index to fully close the index.", false, false, false, @@ -1108,4 +1022,173 @@ public static ClusterBlock createUUIDBasedBlock(ClusterBlock clusterBlock) { clusterBlock.levels() ); } + + private static class OpenIndicesExecutor implements ClusterStateTaskExecutor { + + private final AllocationService allocationService; + private final IndexMetadataVerifier indexMetadataVerifier; + private final IndicesService indicesService; + private final ShardLimitValidator shardLimitValidator; + + OpenIndicesExecutor( + AllocationService allocationService, + IndexMetadataVerifier indexMetadataVerifier, + IndicesService indicesService, + ShardLimitValidator shardLimitValidator + ) { + this.allocationService = allocationService; + this.indexMetadataVerifier = indexMetadataVerifier; + this.indicesService = indicesService; + this.shardLimitValidator = shardLimitValidator; + } + + @Override + public ClusterTasksResult execute(ClusterState currentState, List tasks) throws Exception { + ClusterTasksResult.Builder builder = ClusterTasksResult.builder(); + ClusterState state = currentState; + + try { + // build an in-order de-duplicated array of all the indices to open + final Set indicesToOpen = new LinkedHashSet<>(tasks.size()); + for (OpenIndicesTask task : tasks) { + Collections.addAll(indicesToOpen, task.request.indices()); + } + Index[] indices = indicesToOpen.toArray(Index.EMPTY_ARRAY); + + // open them + state = openIndices(indices, state); + + // do a final reroute + state = allocationService.reroute(state, "indices opened"); + + for (OpenIndicesTask task : tasks) { + builder.success(task, new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + // listener is notified at the end of acking + } + + @Override + public void onFailure(Exception e) { + task.onFailure(e); + } + }); + } + } catch (Exception e) { + for (OpenIndicesTask task : tasks) { + builder.failure(task, e); + } + } + + return builder.build(state); + } + + private ClusterState openIndices(final Index[] indices, final ClusterState currentState) { + final List indicesToOpen = new ArrayList<>(indices.length); + for (Index index : indices) { + final IndexMetadata indexMetadata = currentState.metadata().getIndexSafe(index); + if (indexMetadata.getState() != IndexMetadata.State.OPEN) { + indicesToOpen.add(indexMetadata); + } else if (currentState.blocks().hasIndexBlockWithId(index.getName(), INDEX_CLOSED_BLOCK_ID)) { + indicesToOpen.add(indexMetadata); + } + } + + shardLimitValidator.validateShardLimit(currentState, indices); + if (indicesToOpen.isEmpty()) { + return currentState; + } + + logger.info(() -> { + final StringBuilder indexNames = new StringBuilder(); + Strings.collectionToDelimitedStringWithLimit( + indicesToOpen.stream().map(i -> (CharSequence) i.getIndex().toString()).toList(), + ",", + "", + "", + 512, + indexNames + ); + return new ParameterizedMessage("opening indices [{}]", indexNames); + }); + + final Metadata.Builder metadata = Metadata.builder(currentState.metadata()); + final ClusterBlocks.Builder blocks = ClusterBlocks.builder(currentState.blocks()); + final Version minIndexCompatibilityVersion = currentState.getNodes().getMaxNodeVersion().minimumIndexCompatibilityVersion(); + + for (IndexMetadata indexMetadata : indicesToOpen) { + final Index index = indexMetadata.getIndex(); + if (indexMetadata.getState() != IndexMetadata.State.OPEN) { + final Settings.Builder updatedSettings = Settings.builder().put(indexMetadata.getSettings()); + updatedSettings.remove(VERIFIED_BEFORE_CLOSE_SETTING.getKey()); + + IndexMetadata newIndexMetadata = IndexMetadata.builder(indexMetadata) + .state(IndexMetadata.State.OPEN) + .settingsVersion(indexMetadata.getSettingsVersion() + 1) + .settings(updatedSettings) + .timestampRange(IndexLongFieldRange.NO_SHARDS) + .build(); + + // The index might be closed because we couldn't import it due to an old incompatible + // version, so we need to verify its compatibility. + newIndexMetadata = indexMetadataVerifier.verifyIndexMetadata(newIndexMetadata, minIndexCompatibilityVersion); + try { + indicesService.verifyIndexMetadata(newIndexMetadata, newIndexMetadata); + } catch (Exception e) { + throw new ElasticsearchException("Failed to verify index " + index, e); + } + metadata.put(newIndexMetadata, true); + } + + // Always removes index closed blocks (note: this can fail on-going close index actions) + blocks.removeIndexBlockWithId(index.getName(), INDEX_CLOSED_BLOCK_ID); + } + + ClusterState updatedState = ClusterState.builder(currentState).metadata(metadata).blocks(blocks).build(); + + final RoutingTable.Builder routingTable = RoutingTable.builder(updatedState.routingTable()); + for (IndexMetadata previousIndexMetadata : indicesToOpen) { + if (previousIndexMetadata.getState() != IndexMetadata.State.OPEN) { + routingTable.addAsFromCloseToOpen(updatedState.metadata().getIndexSafe(previousIndexMetadata.getIndex())); + } + } + return ClusterState.builder(updatedState).routingTable(routingTable).build(); + } + } + + private record OpenIndicesTask(OpenIndexClusterStateUpdateRequest request, ActionListener listener) + implements + ClusterStateTaskListener, + ClusterStateAckListener { + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + + @Override + public boolean mustAck(DiscoveryNode discoveryNode) { + return true; + } + + @Override + public void onAllNodesAcked(@Nullable Exception e) { + listener.onResponse(AcknowledgedResponse.of(e == null)); + } + + @Override + public void onAckTimeout() { + listener.onResponse(AcknowledgedResponse.FALSE); + } + + @Override + public TimeValue ackTimeout() { + return request.ackTimeout(); + } + + @Override + public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + assert false : "not called"; + } + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java index 6924c20fed814..d82a617dd9d6b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java @@ -57,15 +57,10 @@ public MetadataMappingService(ClusterService clusterService, IndicesService indi this.indicesService = indicesService; } - static class PutMappingClusterStateUpdateTask implements ClusterStateTaskListener, ClusterStateAckListener { - - private final PutMappingClusterStateUpdateRequest request; - private final ActionListener listener; - - PutMappingClusterStateUpdateTask(PutMappingClusterStateUpdateRequest request, ActionListener listener) { - this.request = request; - this.listener = listener; - } + record PutMappingClusterStateUpdateTask(PutMappingClusterStateUpdateRequest request, ActionListener listener) + implements + ClusterStateTaskListener, + ClusterStateAckListener { @Override public void onFailure(Exception e) { @@ -99,6 +94,7 @@ public ClusterTasksResult execute( ClusterState currentState, List tasks ) throws Exception { + final ClusterState originalState = currentState; Map indexMapperServices = new HashMap<>(); ClusterTasksResult.Builder builder = ClusterTasksResult.builder(); try { @@ -115,7 +111,17 @@ public ClusterTasksResult execute( } } currentState = applyRequest(currentState, request, indexMapperServices); - builder.success(task); + builder.success(task, new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + // listener is notified at the end of acking + } + + @Override + public void onFailure(Exception e) { + task.onFailure(e); + } + }); } catch (Exception e) { builder.failure(task, e); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java index ae06e6f6f9636..06f01a2129805 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java @@ -78,7 +78,7 @@ public MetadataUpdateSettingsService( for (AckedClusterStateUpdateTask task : tasks) { try { state = task.execute(state); - builder.success(task); + builder.success(task, new ClusterStateTaskExecutor.LegacyClusterTaskResultActionListener(task, currentState)); } catch (Exception e) { builder.failure(task, e); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/IndexMetadataUpdater.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/IndexMetadataUpdater.java index 05a0286c26f38..7f90ffc46712c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/IndexMetadataUpdater.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/IndexMetadataUpdater.java @@ -131,7 +131,7 @@ public Metadata applyChanges(Metadata oldMetadata, RoutingTable newRoutingTable) if (metadataBuilder != null) { Metadata newMetadata = metadataBuilder.build(); - assert oldMetadata.getIndicesLookup() == newMetadata.getIndicesLookup(); + assert oldMetadata.sameIndicesLookup(newMetadata); return newMetadata; } else { return oldMetadata; diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java index 66d5428d5d135..122659c64422e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java @@ -213,7 +213,7 @@ public void removeApplier(ClusterStateApplier applier) { } /** - * Add a listener for updated cluster states + * Add a listener for updated cluster states. Listeners are executed in the system thread context. */ public void addListener(ClusterStateListener listener) { clusterStateListeners.add(listener); @@ -222,7 +222,7 @@ public void addListener(ClusterStateListener listener) { /** * Removes a listener for updated cluster states. */ - public void removeListener(ClusterStateListener listener) { + public void removeListener(final ClusterStateListener listener) { clusterStateListeners.remove(listener); } diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java index 9f97e63c14a3b..24aa87f3bec36 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java @@ -28,8 +28,6 @@ import org.elasticsearch.node.Node; import org.elasticsearch.threadpool.ThreadPool; -import java.util.List; - public class ClusterService extends AbstractLifecycleComponent { private final MasterService masterService; @@ -259,7 +257,7 @@ public void submitStateUpdateTask( ClusterStateTaskConfig config, ClusterStateTaskExecutor executor ) { - masterService.submitStateUpdateTasks(source, List.of(task), config, executor); + masterService.submitStateUpdateTask(source, task, config, executor); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java index 4474b24ddc241..85d7435b7fd5b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java @@ -12,6 +12,7 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.Assertions; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState.Builder; @@ -45,7 +46,6 @@ import org.elasticsearch.threadpool.ThreadPool; import java.util.Arrays; -import java.util.Collection; import java.util.List; import java.util.Locale; import java.util.Map; @@ -54,6 +54,7 @@ import java.util.function.LongSupplier; import java.util.function.Supplier; import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; @@ -148,13 +149,9 @@ class Batcher extends TaskBatcher { } @Override - protected void onTimeout(List tasks, TimeValue timeout) { + protected void onTimeout(BatchedTask task, TimeValue timeout) { threadPool.generic() - .execute( - () -> tasks.forEach( - task -> ((UpdateTask) task).onFailure(new ProcessClusterEventTimeoutException(timeout, task.source)) - ) - ); + .execute(() -> ((UpdateTask) task).onFailure(new ProcessClusterEventTimeoutException(timeout, task.source))); } @Override @@ -212,18 +209,6 @@ public void onNoLongerMaster() { } } - public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - try (ThreadContext.StoredContext ignore = threadContextSupplier.get()) { - listener.clusterStateProcessed(oldState, newState); - } catch (Exception e) { - logger.error(() -> new ParameterizedMessage(""" - exception thrown by listener while notifying of cluster state, old cluster state: - {} - new cluster state: - {}""", oldState, newState), e); - } - } - @Nullable public TaskAckListener createTaskAckListener(long clusterStateVersion, DiscoveryNodes nodes) { return contextPreservingAckListener == null @@ -231,14 +216,6 @@ public TaskAckListener createTaskAckListener(long clusterStateVersion, Discovery : new TaskAckListener(contextPreservingAckListener, clusterStateVersion, nodes, threadPool); } - public void clusterStateUnchanged(ClusterState clusterState) { - if (contextPreservingAckListener != null) { - // no need to wait for ack if nothing changed, the update can be counted as acknowledged - contextPreservingAckListener.onAllNodesAcked(null); - } - clusterStateProcessed(clusterState, clusterState); - } - @Override public ClusterStateTaskListener getTask() { return (ClusterStateTaskListener) task; @@ -367,7 +344,7 @@ protected boolean blockingAllowed() { void onPublicationSuccess(ClusterStatePublicationEvent clusterStatePublicationEvent, TaskOutputs taskOutputs) { final long notificationStartTime = threadPool.rawRelativeTimeInMillis(); - taskOutputs.processedDifferentClusterState(clusterStatePublicationEvent.getOldState(), clusterStatePublicationEvent.getNewState()); + taskOutputs.processedDifferentClusterState(clusterStatePublicationEvent.getNewState()); try { taskOutputs.clusterStatePublished(clusterStatePublicationEvent); @@ -464,7 +441,6 @@ private ClusterState patchVersions(ClusterState previousClusterState, ClusterTas if (previousClusterState != newClusterState) { // only the master controls the version numbers - final var previousIndicesLookup = newClusterState.metadata().getIndicesLookup(); Builder builder = incrementVersion(newClusterState); if (previousClusterState.routingTable() != newClusterState.routingTable()) { builder.routingTable(newClusterState.routingTable().withIncrementedVersion()); @@ -473,8 +449,9 @@ private ClusterState patchVersions(ClusterState previousClusterState, ClusterTas builder.metadata(newClusterState.metadata().withIncrementedVersion()); } + final var previousMetadata = newClusterState.metadata(); newClusterState = builder.build(); - assert previousIndicesLookup == newClusterState.metadata().getIndicesLookup(); + assert previousMetadata.sameIndicesLookup(newClusterState.metadata()); } return newClusterState; @@ -524,7 +501,21 @@ public void submitStateUpdateTask( ClusterStateTaskConfig config, ClusterStateTaskExecutor executor ) { - submitStateUpdateTasks(source, List.of(task), config, executor); + if (lifecycle.started() == false) { + return; + } + final ThreadContext threadContext = threadPool.getThreadContext(); + final Supplier supplier = threadContext.newRestorableContext(true); + try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { + threadContext.markAsSystemContext(); + taskBatcher.submitTask(taskBatcher.new UpdateTask(config.priority(), source, task, supplier, executor), config.timeout()); + } catch (EsRejectedExecutionException e) { + // ignore cases where we are shutting down..., there is really nothing interesting + // to be done here... + if (lifecycle.stoppedOrClosed() == false) { + throw e; + } + } } /** @@ -534,14 +525,14 @@ class TaskOutputs { final TaskInputs taskInputs; final ClusterState previousClusterState; final ClusterState newClusterState; - final List nonFailedTasks; + final List nonFailedTasks; final Map executionResults; TaskOutputs( TaskInputs taskInputs, ClusterState previousClusterState, ClusterState newClusterState, - List nonFailedTasks, + List nonFailedTasks, Map executionResults ) { this.taskInputs = taskInputs; @@ -551,12 +542,12 @@ class TaskOutputs { this.executionResults = executionResults; } - void publishingFailed(FailedToCommitClusterStateException t) { - nonFailedTasks.forEach(task -> task.onFailure(t)); + void publishingFailed(FailedToCommitClusterStateException e) { + nonFailedTasks.forEach(task -> task.onPublishFailure(e)); } - void processedDifferentClusterState(ClusterState previousClusterState, ClusterState newClusterState) { - nonFailedTasks.forEach(task -> task.clusterStateProcessed(previousClusterState, newClusterState)); + void processedDifferentClusterState(ClusterState newClusterState) { + nonFailedTasks.forEach(task -> task.onPublishSuccess(newClusterState)); } void clusterStatePublished(ClusterStatePublicationEvent clusterStatePublicationEvent) { @@ -566,7 +557,7 @@ void clusterStatePublished(ClusterStatePublicationEvent clusterStatePublicationE ClusterStatePublisher.AckListener createAckListener(ClusterState newClusterState) { return new CompositeTaskAckListener( nonFailedTasks.stream() - .map(task -> task.createTaskAckListener(newClusterState.version(), newClusterState.nodes())) + .map(task -> task.task().createTaskAckListener(newClusterState.version(), newClusterState.nodes())) .filter(Objects::nonNull) .collect(Collectors.toList()) ); @@ -588,7 +579,14 @@ void notifyFailedTasks() { } void notifySuccessfulTasksOnUnchangedClusterState() { - nonFailedTasks.forEach(task -> task.clusterStateUnchanged(newClusterState)); + nonFailedTasks.forEach(task -> { + Batcher.UpdateTask updateTask = task.task(); + if (updateTask.contextPreservingAckListener != null) { + // no need to wait for ack if nothing changed, the update can be counted as acknowledged + updateTask.contextPreservingAckListener.onAllNodesAcked(null); + } + task.onClusterStateUnchanged(newClusterState); + }); } } @@ -839,13 +837,55 @@ private ClusterTasksResult executeTasks(TaskInputs tas return clusterTasksResult; } - private List getNonFailedTasks( - TaskInputs taskInputs, - ClusterTasksResult clusterTasksResult - ) { - return taskInputs.updateTasks.stream().filter(updateTask -> { + private record NonFailedTask(Batcher.UpdateTask task, ActionListener publishListener) { + + public void onPublishSuccess(ClusterState newClusterState) { + try (ThreadContext.StoredContext ignored = task.threadContextSupplier.get()) { + publishListener.onResponse(newClusterState); + } catch (Exception e) { + logger.error( + () -> new ParameterizedMessage( + "exception thrown by listener while notifying of new cluster state:\n{}", + newClusterState + ), + e + ); + } + } + + public void onClusterStateUnchanged(ClusterState clusterState) { + try (ThreadContext.StoredContext ignored = task.threadContextSupplier.get()) { + publishListener.onResponse(clusterState); + } catch (Exception e) { + logger.error( + () -> new ParameterizedMessage( + "exception thrown by listener while notifying of unchanged cluster state:\n{}", + clusterState + ), + e + ); + } + } + + public void onPublishFailure(FailedToCommitClusterStateException e) { + try (ThreadContext.StoredContext ignored = task.threadContextSupplier.get()) { + publishListener.onFailure(e); + } catch (Exception inner) { + inner.addSuppressed(e); + logger.error("exception thrown by listener notifying of failure", inner); + } + } + } + + private List getNonFailedTasks(TaskInputs taskInputs, ClusterTasksResult clusterTasksResult) { + return taskInputs.updateTasks.stream().flatMap(updateTask -> { assert clusterTasksResult.executionResults().containsKey(updateTask.getTask()) : "missing " + updateTask; - return clusterTasksResult.executionResults().get(updateTask.getTask()).isSuccess(); + final ClusterStateTaskExecutor.TaskResult taskResult = clusterTasksResult.executionResults().get(updateTask.getTask()); + if (taskResult.isSuccess()) { + return Stream.of(new NonFailedTask(updateTask, taskResult.taskListener())); + } else { + return Stream.of(); + } }).collect(Collectors.toList()); } @@ -872,47 +912,6 @@ void onNoLongerMaster() { } } - /** - * Submits a batch of cluster state update tasks; submitted updates are guaranteed to be processed together, - * potentially with more tasks of the same executor. - * - * @param source the source of the cluster state update task - * @param tasks a collection of update tasks, which implement {@link ClusterStateTaskListener} so that they are notified when they - * are executed; tasks that also implement {@link ClusterStateAckListener} are notified on acks too. - * @param config the cluster state update task configuration - * @param executor the cluster state update task executor; tasks - * that share the same executor will be executed - * batches on this executor - * @param the type of the cluster state update task state - * - */ - public void submitStateUpdateTasks( - final String source, - final Collection tasks, - final ClusterStateTaskConfig config, - final ClusterStateTaskExecutor executor - ) { - if (lifecycle.started() == false) { - return; - } - final ThreadContext threadContext = threadPool.getThreadContext(); - final Supplier supplier = threadContext.newRestorableContext(true); - try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { - threadContext.markAsSystemContext(); - - List safeTasks = tasks.stream() - .map(task -> taskBatcher.new UpdateTask(config.priority(), source, task, supplier, executor)) - .toList(); - taskBatcher.submitTasks(safeTasks, config.timeout()); - } catch (EsRejectedExecutionException e) { - // ignore cases where we are shutting down..., there is really nothing interesting - // to be done here... - if (lifecycle.stoppedOrClosed() == false) { - throw e; - } - } - } - private static class MasterServiceStarvationWatcher implements PrioritizedEsThreadPoolExecutor.StarvationWatcher { private final long warnThreshold; diff --git a/server/src/main/java/org/elasticsearch/cluster/service/TaskBatcher.java b/server/src/main/java/org/elasticsearch/cluster/service/TaskBatcher.java index 60ebffc9cbc0e..7c8aca8fb1469 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/TaskBatcher.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/TaskBatcher.java @@ -19,15 +19,12 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; -import java.util.IdentityHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Function; -import java.util.stream.Collectors; /** * Batching support for {@link PrioritizedEsThreadPoolExecutor} @@ -45,86 +42,50 @@ public TaskBatcher(Logger logger, PrioritizedEsThreadPoolExecutor threadExecutor this.threadExecutor = threadExecutor; } - public void submitTasks(List tasks, @Nullable TimeValue timeout) throws EsRejectedExecutionException { - if (tasks.isEmpty()) { - return; - } - final BatchedTask firstTask = tasks.get(0); - assert tasks.stream().allMatch(t -> t.batchingKey == firstTask.batchingKey) - : "tasks submitted in a batch should share the same batching key: " + tasks; - // convert to an identity map to check for dups based on task identity - - tasksPerBatchingKey.compute(firstTask.batchingKey, (k, existingTasks) -> { - assert assertNoDuplicateTasks(tasks, existingTasks); + public void submitTask(BatchedTask task, @Nullable TimeValue timeout) throws EsRejectedExecutionException { + tasksPerBatchingKey.compute(task.batchingKey, (k, existingTasks) -> { if (existingTasks == null) { - return Collections.synchronizedSet(new LinkedHashSet<>(tasks)); + existingTasks = Collections.synchronizedSet(new LinkedHashSet<>()); + } else { + assert assertNoDuplicateTasks(task, existingTasks); } - existingTasks.addAll(tasks); + existingTasks.add(task); return existingTasks; }); if (timeout != null) { - threadExecutor.execute(firstTask, timeout, () -> onTimeoutInternal(tasks, timeout)); + threadExecutor.execute(task, timeout, () -> onTimeoutInternal(task, timeout)); } else { - threadExecutor.execute(firstTask); + threadExecutor.execute(task); } } - private static boolean assertNoDuplicateTasks(List tasks, Set existingTasks) { - final Map tasksIdentity = tasks.stream() - .collect( - Collectors.toMap( - BatchedTask::getTask, - Function.identity(), - (a, b) -> { throw new AssertionError("cannot add duplicate task: " + a); }, - IdentityHashMap::new - ) - ); - if (existingTasks == null) { - return true; - } - for (BatchedTask existing : existingTasks) { - // check that there won't be two tasks with the same identity for the same batching key - BatchedTask duplicateTask = tasksIdentity.get(existing.getTask()); - assert duplicateTask == null - : "task [" - + duplicateTask.describeTasks(Collections.singletonList(existing)) - + "] with source [" - + duplicateTask.source - + "] is already queued"; + private static boolean assertNoDuplicateTasks(BatchedTask task, Set existingTasks) { + for (final var existingTask : existingTasks) { + assert existingTask.getTask() != task.getTask() + : "task [" + task.describeTasks(List.of(task)) + "] with source [" + task.source + "] is already queued"; } return true; } - private void onTimeoutInternal(List tasks, TimeValue timeout) { - final ArrayList toRemove = new ArrayList<>(); - for (BatchedTask task : tasks) { - if (task.processed.getAndSet(true) == false) { - logger.debug("task [{}] timed out after [{}]", task.source, timeout); - toRemove.add(task); - } - } - if (toRemove.isEmpty() == false) { - BatchedTask firstTask = toRemove.get(0); - Object batchingKey = firstTask.batchingKey; - assert tasks.stream().allMatch(t -> t.batchingKey == batchingKey) - : "tasks submitted in a batch should share the same batching key: " + tasks; - tasksPerBatchingKey.computeIfPresent(batchingKey, (key, existingTasks) -> { - toRemove.forEach(existingTasks::remove); - if (existingTasks.isEmpty()) { - return null; - } - return existingTasks; - }); - onTimeout(toRemove, timeout); + private void onTimeoutInternal(BatchedTask task, TimeValue timeout) { + if (task.processed.getAndSet(true)) { + return; } + + logger.debug("task [{}] timed out after [{}]", task.source, timeout); + tasksPerBatchingKey.computeIfPresent(task.batchingKey, (key, existingTasks) -> { + existingTasks.remove(task); + return existingTasks.isEmpty() ? null : existingTasks; + }); + onTimeout(task, timeout); } /** * Action to be implemented by the specific batching implementation. * All tasks have the same batching key. */ - protected abstract void onTimeout(List tasks, TimeValue timeout); + protected abstract void onTimeout(BatchedTask task, TimeValue timeout); void runIfNotProcessed(BatchedTask updateTask) { // if this task is already processed, it shouldn't execute other tasks with same batching key that arrived later, @@ -135,6 +96,7 @@ void runIfNotProcessed(BatchedTask updateTask) { final Set pending = tasksPerBatchingKey.remove(updateTask.batchingKey); if (pending != null) { // pending is a java.util.Collections.SynchronizedSet so we can safely iterate holding its mutex + // noinspection SynchronizationOnLocalVariableOrMethodParameter synchronized (pending) { for (BatchedTask task : pending) { if (task.processed.getAndSet(true) == false) { diff --git a/server/src/main/java/org/elasticsearch/common/bytes/AbstractBytesReference.java b/server/src/main/java/org/elasticsearch/common/bytes/AbstractBytesReference.java index 2ca8c7b9be20d..459328c6df5aa 100644 --- a/server/src/main/java/org/elasticsearch/common/bytes/AbstractBytesReference.java +++ b/server/src/main/java/org/elasticsearch/common/bytes/AbstractBytesReference.java @@ -18,7 +18,8 @@ public abstract class AbstractBytesReference implements BytesReference { - private Integer hash = null; // we cache the hash of this reference since it can be quite costly to re-calculated it + private int hash; // we cache the hash of this reference since it can be quite costly to re-calculated it + private boolean hashIsZero; // if the calculated hash is actually zero @Override public int getInt(int index) { @@ -89,7 +90,7 @@ public boolean equals(Object other) { @Override public int hashCode() { - if (hash == null) { + if (hash == 0 && hashIsZero == false) { final BytesRefIterator iterator = iterator(); BytesRef ref; int result = 1; @@ -102,10 +103,13 @@ public int hashCode() { } catch (IOException ex) { throw new AssertionError("wont happen", ex); } - return hash = result; - } else { - return hash; + if (result == 0) { + hashIsZero = true; + } else { + hash = result; + } } + return hash; } @Override diff --git a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java index aa9f5ee41567f..5acc18df3f8a3 100644 --- a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java +++ b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java @@ -241,7 +241,7 @@ public boolean remove(Object o) { } public Spliterator> spliterator() { - return Spliterators.spliteratorUnknownSize(iterator(), 0); + return Spliterators.spliterator(iterator(), size(), Spliterator.SIZED); } public void forEach(Consumer> action) { diff --git a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java index 20427cc734638..8afef238aae50 100644 --- a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java +++ b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java @@ -209,7 +209,7 @@ public boolean remove(Object o) { } public Spliterator> spliterator() { - return Spliterators.spliteratorUnknownSize(iterator(), 0); + return Spliterators.spliterator(iterator(), size(), Spliterator.SIZED); } public void forEach(Consumer> action) { diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeoPolygonDecomposer.java b/server/src/main/java/org/elasticsearch/common/geo/GeoPolygonDecomposer.java index 44c0b2297f55e..9f80619b7e80d 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/GeoPolygonDecomposer.java +++ b/server/src/main/java/org/elasticsearch/common/geo/GeoPolygonDecomposer.java @@ -213,13 +213,9 @@ private static Edge[] ring( minX = Math.min(minX, points[i].getX()); maxX = Math.max(maxX, points[i].getX()); } - if (signedArea == 0) { - // Points are collinear or self-intersection - throw new IllegalArgumentException( - "Cannot determine orientation: signed area equal to 0." + " Points are collinear or polygon self-intersects." - ); - } - boolean orientation = signedArea < 0; + // if the polygon is tiny, the computed area can result in zero. In that case + // we assume orientation is correct + boolean orientation = signedArea == 0 ? handedness != false : signedArea < 0; // OGC requires shell as ccw (Right-Handedness) and holes as cw (Left-Handedness) // since GeoJSON doesn't specify (and doesn't need to) GEO core will assume OGC standards diff --git a/server/src/main/java/org/elasticsearch/common/util/CollectionUtils.java b/server/src/main/java/org/elasticsearch/common/util/CollectionUtils.java index 54954c80665e2..1dfc2398b5270 100644 --- a/server/src/main/java/org/elasticsearch/common/util/CollectionUtils.java +++ b/server/src/main/java/org/elasticsearch/common/util/CollectionUtils.java @@ -346,4 +346,7 @@ public static List wrapUnmodifiableOrEmptySingleton(List list) { return list.isEmpty() ? List.of() : Collections.unmodifiableList(list); } + public static List limitSize(List list, int size) { + return list.size() <= size ? list : list.subList(0, size); + } } diff --git a/server/src/main/java/org/elasticsearch/health/GetHealthAction.java b/server/src/main/java/org/elasticsearch/health/GetHealthAction.java index 7fc0c97579540..926f58646bf58 100644 --- a/server/src/main/java/org/elasticsearch/health/GetHealthAction.java +++ b/server/src/main/java/org/elasticsearch/health/GetHealthAction.java @@ -15,12 +15,10 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.health.components.controller.ClusterCoordination; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.ToXContent; @@ -28,9 +26,9 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; import java.util.List; +import java.util.NoSuchElementException; +import java.util.Objects; public class GetHealthAction extends ActionType { @@ -69,6 +67,13 @@ public List getComponents() { return components; } + public HealthComponentResult findComponent(String name) { + return components.stream() + .filter(c -> Objects.equals(c.name(), name)) + .findFirst() + .orElseThrow(() -> new NoSuchElementException("Component [" + name + "] is not found")); + } + @Override public void writeTo(StreamOutput out) throws IOException { throw new AssertionError("GetHealthAction should not be sent over the wire."); @@ -77,7 +82,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(); - builder.field("status", status); + builder.field("status", status.xContentValue()); builder.field("cluster_name", clusterName.value()); builder.array("impacts"); builder.startObject("components"); @@ -87,6 +92,28 @@ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params par builder.endObject(); return builder.endObject(); } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Response response = (Response) o; + return clusterName.equals(response.clusterName) && status == response.status && components.equals(response.components); + } + + @Override + public int hashCode() { + return Objects.hash(clusterName, status, components); + } + + @Override + public String toString() { + return "Response{clusterName=" + clusterName + ", status=" + status + ", components=" + components + '}'; + } } public static class Request extends ActionRequest { @@ -100,27 +127,23 @@ public ActionRequestValidationException validate() { public static class TransportAction extends org.elasticsearch.action.support.TransportAction { private final ClusterService clusterService; + private final HealthService healthService; @Inject public TransportAction( - final ActionFilters actionFilters, - final TransportService transportService, - final ClusterService clusterService + ActionFilters actionFilters, + TransportService transportService, + ClusterService clusterService, + HealthService healthService ) { super(NAME, actionFilters, transportService.getTaskManager()); this.clusterService = clusterService; + this.healthService = healthService; } @Override protected void doExecute(Task task, Request request, ActionListener listener) { - final ClusterState clusterState = clusterService.state(); - final HealthComponentResult controller = ClusterCoordination.createClusterCoordinationComponent( - clusterService.localNode(), - clusterState - ); - final HealthComponentResult snapshots = new HealthComponentResult("snapshots", HealthStatus.GREEN, Collections.emptyMap()); - final ClusterName clusterName = clusterService.getClusterName(); - listener.onResponse(new Response(clusterName, Arrays.asList(controller, snapshots))); + listener.onResponse(new Response(clusterService.getClusterName(), healthService.getHealth())); } } } diff --git a/server/src/main/java/org/elasticsearch/health/HealthComponentResult.java b/server/src/main/java/org/elasticsearch/health/HealthComponentResult.java index 645b53960c965..f2c0db461ddbb 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthComponentResult.java +++ b/server/src/main/java/org/elasticsearch/health/HealthComponentResult.java @@ -12,51 +12,26 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import java.util.Collection; import java.util.List; -import java.util.Map; -import java.util.TreeMap; -import java.util.stream.Collectors; +import java.util.NoSuchElementException; +import java.util.Objects; -import static java.util.stream.Collectors.collectingAndThen; -import static java.util.stream.Collectors.groupingBy; -import static java.util.stream.Collectors.toList; +public record HealthComponentResult(String name, HealthStatus status, List indicators) implements ToXContentObject { -public record HealthComponentResult(String name, HealthStatus status, Map indicators) - implements - ToXContentObject { - - public static Collection createComponentsFromIndicators(Collection indicators) { + public HealthIndicatorResult findIndicator(String name) { return indicators.stream() - .collect( - groupingBy( - HealthIndicatorResult::component, - TreeMap::new, - collectingAndThen(toList(), HealthComponentResult::createComponentFromIndicators) - ) - ) - .values(); - } - - private static HealthComponentResult createComponentFromIndicators(List indicators) { - assert indicators.size() > 0 : "Component should not be non empty"; - assert indicators.stream().map(HealthIndicatorResult::component).distinct().count() == 1L - : "Should not mix indicators from different components"; - return new HealthComponentResult( - indicators.get(0).component(), - HealthStatus.merge(indicators.stream().map(HealthIndicatorResult::status)), - indicators.stream().collect(Collectors.toMap(HealthIndicatorResult::name, i -> i)) - ); + .filter(i -> Objects.equals(i.name(), name)) + .findFirst() + .orElseThrow(() -> new NoSuchElementException("Indicator [" + name + "] is not found")); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field("status", status); + builder.field("status", status.xContentValue()); builder.startObject("indicators"); - for (Map.Entry indicator : indicators.entrySet()) { - builder.field(indicator.getKey()); - indicator.getValue().toXContent(builder, params); + for (HealthIndicatorResult indicator : indicators) { + builder.field(indicator.name(), indicator, params); } builder.endObject(); return builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/health/HealthIndicatorDetails.java b/server/src/main/java/org/elasticsearch/health/HealthIndicatorDetails.java index e085b9981e496..1817012cd6640 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthIndicatorDetails.java +++ b/server/src/main/java/org/elasticsearch/health/HealthIndicatorDetails.java @@ -18,7 +18,7 @@ public interface HealthIndicatorDetails extends ToXContentObject { HealthIndicatorDetails EMPTY = new HealthIndicatorDetails() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder; + return builder.startObject().endObject(); } }; } diff --git a/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java b/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java index cec405e16a59d..5d54ecb709d56 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java +++ b/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java @@ -20,7 +20,7 @@ public record HealthIndicatorResult(String name, String component, HealthStatus @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field("status", status); + builder.field("status", status.xContentValue()); builder.field("summary", summary); builder.field("details", details, params); // TODO 83303: Add detail / documentation diff --git a/server/src/main/java/org/elasticsearch/health/HealthIndicatorService.java b/server/src/main/java/org/elasticsearch/health/HealthIndicatorService.java index 44a979efaaff1..e15b0e60cdf09 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/health/HealthIndicatorService.java @@ -9,7 +9,7 @@ package org.elasticsearch.health; /** - * This is a service interface used to report health indicators from the different plugins. + * This is a service interface used to calculate health indicator from the different modules or plugins. */ public interface HealthIndicatorService { diff --git a/server/src/main/java/org/elasticsearch/health/HealthService.java b/server/src/main/java/org/elasticsearch/health/HealthService.java new file mode 100644 index 0000000000000..5a82929f69984 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/health/HealthService.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.health; + +import java.util.List; +import java.util.TreeMap; + +import static java.util.stream.Collectors.collectingAndThen; +import static java.util.stream.Collectors.groupingBy; +import static java.util.stream.Collectors.toList; + +/** + * This service collects health indicators from all modules and plugins of elasticsearch + */ +public class HealthService { + + private final List healthIndicatorServices; + + public HealthService(List healthIndicatorServices) { + this.healthIndicatorServices = healthIndicatorServices; + } + + public List getHealth() { + return List.copyOf( + healthIndicatorServices.stream() + .map(HealthIndicatorService::calculate) + .collect( + groupingBy( + HealthIndicatorResult::component, + TreeMap::new, + collectingAndThen(toList(), HealthService::createComponentFromIndicators) + ) + ) + .values() + ); + } + + private static HealthComponentResult createComponentFromIndicators(List indicators) { + assert indicators.size() > 0 : "Component should not be non empty"; + assert indicators.stream().map(HealthIndicatorResult::component).distinct().count() == 1L + : "Should not mix indicators from different components"; + return new HealthComponentResult( + indicators.get(0).component(), + HealthStatus.merge(indicators.stream().map(HealthIndicatorResult::status)), + indicators + ); + } +} diff --git a/server/src/main/java/org/elasticsearch/health/HealthStatus.java b/server/src/main/java/org/elasticsearch/health/HealthStatus.java index a9e6a7dba9d44..1094c35b5aac3 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthStatus.java +++ b/server/src/main/java/org/elasticsearch/health/HealthStatus.java @@ -13,6 +13,7 @@ import java.io.IOException; import java.util.Comparator; +import java.util.Locale; import java.util.stream.Stream; public enum HealthStatus implements Writeable { @@ -38,4 +39,8 @@ public byte value() { public static HealthStatus merge(Stream statuses) { return statuses.max(Comparator.comparing(HealthStatus::value)).orElse(GREEN); } + + public String xContentValue() { + return name().toLowerCase(Locale.ROOT); + } } diff --git a/server/src/main/java/org/elasticsearch/health/ServerHealthComponents.java b/server/src/main/java/org/elasticsearch/health/ServerHealthComponents.java index c07273c731352..1a4d2458263dd 100644 --- a/server/src/main/java/org/elasticsearch/health/ServerHealthComponents.java +++ b/server/src/main/java/org/elasticsearch/health/ServerHealthComponents.java @@ -8,7 +8,14 @@ package org.elasticsearch.health; +/** + * This class defines common component names for the health indicators. + */ public class ServerHealthComponents { + + public static final String CLUSTER_COORDINATION = "cluster_coordination"; public static final String DATA = "data"; public static final String SNAPSHOT = "snapshot"; + + private ServerHealthComponents() {} } diff --git a/server/src/main/java/org/elasticsearch/health/components/controller/ClusterCoordination.java b/server/src/main/java/org/elasticsearch/health/components/controller/ClusterCoordination.java deleted file mode 100644 index fd3988f38b5de..0000000000000 --- a/server/src/main/java/org/elasticsearch/health/components/controller/ClusterCoordination.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.health.components.controller; - -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.health.HealthComponentResult; -import org.elasticsearch.health.HealthIndicatorResult; -import org.elasticsearch.health.HealthStatus; - -import java.util.Collections; - -public final class ClusterCoordination { - - public static final String NAME = "cluster_coordination"; - - public static final String INSTANCE_HAS_MASTER_NAME = "instance_has_master"; - public static final String INSTANCE_HAS_MASTER_GREEN_SUMMARY = "Health coordinating instance has a master node."; - public static final String INSTANCE_HAS_MASTER_RED_SUMMARY = "Health coordinating instance does not have a master node."; - - private ClusterCoordination() {} - - public static HealthComponentResult createClusterCoordinationComponent( - final DiscoveryNode coordinatingNode, - final ClusterState clusterState - ) { - final DiscoveryNodes nodes = clusterState.nodes(); - final DiscoveryNode masterNode = nodes.getMasterNode(); - - HealthStatus instanceHasMasterStatus = masterNode == null ? HealthStatus.RED : HealthStatus.GREEN; - String instanceHasMasterSummary = masterNode == null ? INSTANCE_HAS_MASTER_RED_SUMMARY : INSTANCE_HAS_MASTER_GREEN_SUMMARY; - HealthIndicatorResult instanceHasMaster = new HealthIndicatorResult( - INSTANCE_HAS_MASTER_NAME, - NAME, - instanceHasMasterStatus, - instanceHasMasterSummary, - (builder, params) -> { - builder.startObject(); - builder.object("coordinating_node", xContentBuilder -> { - builder.field("node_id", coordinatingNode.getId()); - builder.field("name", coordinatingNode.getName()); - }); - builder.object("master_node", xContentBuilder -> { - if (masterNode != null) { - builder.field("node_id", masterNode.getId()); - builder.field("name", masterNode.getName()); - } else { - builder.nullField("node_id"); - builder.nullField("name"); - } - }); - return builder.endObject(); - } - ); - - // Only a single indicator currently so it determines the status - final HealthStatus status = instanceHasMaster.status(); - return new HealthComponentResult(NAME, status, Collections.singletonMap(INSTANCE_HAS_MASTER_NAME, instanceHasMaster)); - } -} diff --git a/server/src/main/java/org/elasticsearch/index/IndexMode.java b/server/src/main/java/org/elasticsearch/index/IndexMode.java index 66c0f71b328b0..de6633db7de15 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexMode.java +++ b/server/src/main/java/org/elasticsearch/index/IndexMode.java @@ -17,9 +17,11 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.DocumentDimensions; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.MetadataFieldMapper; +import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; @@ -83,6 +85,11 @@ public MetadataFieldMapper buildTimeSeriesIdFieldMapper() { // non time-series indices must not have a TimeSeriesIdFieldMapper return null; } + + @Override + public DocumentDimensions buildDocumentDimensions() { + return new DocumentDimensions.OnlySingleValueAllowed(); + } }, TIME_SERIES("time_series") { @Override @@ -110,6 +117,9 @@ private String error(Setting unsupported) { @Override public void validateMapping(MappingLookup lookup) { + if (lookup.nestedLookup() != NestedLookup.EMPTY) { + throw new IllegalArgumentException("cannot have nested fields when index is in " + tsdbMode()); + } if (((RoutingFieldMapper) lookup.getMapper(RoutingFieldMapper.NAME)).required()) { throw new IllegalArgumentException(routingRequiredBad()); } @@ -145,6 +155,11 @@ private String routingRequiredBad() { public MetadataFieldMapper buildTimeSeriesIdFieldMapper() { return TimeSeriesIdFieldMapper.INSTANCE; } + + @Override + public DocumentDimensions buildDocumentDimensions() { + return new TimeSeriesIdFieldMapper.TimeSeriesIdBuilder(); + } }; protected String tsdbMode() { @@ -237,6 +252,8 @@ public String getName() { */ public abstract MetadataFieldMapper buildTimeSeriesIdFieldMapper(); + public abstract DocumentDimensions buildDocumentDimensions(); + public static IndexMode fromString(String value) { return switch (value) { case "standard" -> IndexMode.STANDARD; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java index ea066d3543735..bf7b0baf4a818 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java @@ -13,7 +13,10 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.xcontent.DeprecationHandler; +import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.support.MapXContentParser; import java.io.IOException; @@ -53,7 +56,7 @@ public abstract void parse(XContentParser parser, CheckedConsumer consumer) { - try (XContentParser parser = MapXContentParser.wrapObject(sourceMap)) { + try (XContentParser parser = wrapObject(sourceMap)) { parse(parser, v -> consumer.accept(normalizeFromSource(v)), e -> {}); /* ignore malformed */ } catch (IOException e) { throw new UncheckedIOException(e); @@ -67,6 +70,18 @@ private void fetchFromSource(Object sourceMap, Consumer consumer) { // TODO: move geometry normalization to the geometry parser. public abstract T normalizeFromSource(T geometry); + private static XContentParser wrapObject(Object sourceMap) throws IOException { + XContentParser parser = new MapXContentParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.IGNORE_DEPRECATIONS, + Collections.singletonMap("dummy_field", sourceMap), + XContentType.JSON + ); + parser.nextToken(); // start object + parser.nextToken(); // field name + parser.nextToken(); // field value + return parser; + } } public abstract static class AbstractGeometryFieldType extends MappedFieldType { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java index 9dc019ba9ccbd..d3c5d3193adc1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java @@ -30,11 +30,15 @@ import org.elasticsearch.search.suggest.completion.CompletionSuggester; import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMappings; +import org.elasticsearch.xcontent.DeprecationHandler; import org.elasticsearch.xcontent.FilterXContentParser; +import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentLocation; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParser.NumberType; import org.elasticsearch.xcontent.XContentParser.Token; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.support.MapXContentParser; import java.io.IOException; @@ -425,8 +429,11 @@ public void parse(DocumentParserContext context) throws IOException { context.addToFieldNames(fieldType().name()); for (CompletionInputMetadata metadata : inputMap.values()) { - DocumentParserContext externalValueContext = context.switchParser(new CompletionParser(metadata)); - multiFields.parse(this, externalValueContext); + multiFields.parse( + this, + context, + () -> context.switchParser(new MultiFieldParser(metadata, fieldType().name(), context.parser().getTokenLocation())) + ); } } @@ -586,19 +593,66 @@ public void doValidate(MappingLookup mappers) { } } - private static class CompletionParser extends FilterXContentParser { + /** + * Parser that exposes the expected format depending on the type of multi-field that is consuming content. + * Completion fields can hold multi-fields, which can either parse a simple string value or an object in case of another completion + * field. This parser detects which of the two is parsing content and exposes the full object when needed (including input, weight + * and context if available), otherwise the input value only. + * + * A few assumptions are made that make this work: + * 1) only string values are supported for a completion field, hence only sub-fields that parse strings are supported + * 2) sub-fields that parse simple values only ever call {@link #textOrNull()} to do so. They may call {@link #currentToken()} only to + * check if there's a null value, which is irrelevant in the multi-fields scenario as null values are ignored in the parent field and + * don't lead to any field creation. + * 3) completion is the only sub-field type that may be parsing the object structure. + * + * The parser is set to expose by default simple value, unless {@link #nextToken()} is called which is what signals that the + * consumer supports the object structure. + */ + // This parser changes behaviour depending on which methods are called by consumers, which is extremely delicate. This kind of works for + // our internal mappers, but what about mappers from plugins? + static class MultiFieldParser extends FilterXContentParser { + private final String textValue; + private final String fieldName; + private final XContentLocation locationOffset; + private final XContentParser fullObjectParser; + // we assume that the consumer is parsing values, we will switch to exposing the object format if nextToken is called + private boolean parsingObject = false; + + MultiFieldParser(CompletionInputMetadata metadata, String fieldName, XContentLocation locationOffset) { + this.fullObjectParser = new MapXContentParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.IGNORE_DEPRECATIONS, + metadata.toMap(), + XContentType.JSON + ); + this.fieldName = fieldName; + this.locationOffset = locationOffset; + this.textValue = metadata.input; + } - boolean advanced = false; - final String textValue; + @Override + protected XContentParser delegate() { + // if consumers are only reading values, they should never go through delegate and rather call the + // overridden currentToken and textOrNull below that don't call super + assert parsingObject; + return fullObjectParser; + } - private CompletionParser(CompletionInputMetadata metadata) throws IOException { - super(MapXContentParser.wrapObject(metadata.toMap())); - this.textValue = metadata.input; + @Override + public Token currentToken() { + if (parsingObject == false) { + // nextToken has not been called, it may or may not be called at a later time. + // What we return does not really matter for mappers that support simple values, as they only check for VALUE_NULL. + // For mappers that do support objects, START_OBJECT is a good choice. + return Token.START_OBJECT; + } + return super.currentToken(); } @Override public String textOrNull() throws IOException { - if (advanced == false) { + if (parsingObject == false) { return textValue; } return super.textOrNull(); @@ -606,8 +660,32 @@ public String textOrNull() throws IOException { @Override public Token nextToken() throws IOException { - advanced = true; + if (parsingObject == false) { + // a completion sub-field is parsing + parsingObject = true; + // move to START_OBJECT, currentToken has already returned START_OBJECT and we will advance one token further just below + this.fullObjectParser.nextToken(); + } return super.nextToken(); } + + @Override + public String currentName() throws IOException { + if (parsingObject == false) { + return fieldName; + } + String currentName = super.currentName(); + if (currentName == null && currentToken() == Token.END_OBJECT) { + return fieldName; + } + return currentName; + } + + @Override + public XContentLocation getTokenLocation() { + // return fixed token location: it's not possible to match the token location while parsing through the object structure, + // because completion metadata have been rewritten hence they won't match the incoming document + return locationOffset; + } } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentDimensions.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentDimensions.java new file mode 100644 index 0000000000000..6f5f0c336633c --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentDimensions.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import java.net.InetAddress; +import java.util.HashSet; +import java.util.Set; + +/** + * Collects dimensions from documents. + */ +public interface DocumentDimensions { + void addString(String fieldName, String value); + + void addIp(String fieldName, InetAddress value); + + void addLong(String fieldName, long value); + + void addUnsignedLong(String fieldName, long value); + + /** + * Makes sure that each dimension only appears on time. + */ + class OnlySingleValueAllowed implements DocumentDimensions { + private final Set names = new HashSet<>(); + + @Override + public void addString(String fieldName, String value) { + add(fieldName); + } + + @Override + public void addIp(String fieldName, InetAddress value) { + add(fieldName); + } + + @Override + public void addLong(String fieldName, long value) { + add(fieldName); + } + + @Override + public void addUnsignedLong(String fieldName, long value) { + add(fieldName); + } + + private void add(String fieldName) { + boolean isNew = names.add(fieldName); + if (false == isNew) { + throw new IllegalArgumentException("Dimension field [" + fieldName + "] cannot be a multi-valued field."); + } + } + }; +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index be8f9c5834787..5ccb43c099f91 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -9,7 +9,6 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.index.IndexSettings; import java.util.List; @@ -95,6 +94,7 @@ public void validate(IndexSettings settings, boolean checkLimits) { ); } } + settings.getMode().validateMapping(mappingLookup); if (settings.getIndexSortConfig().hasIndexSort() && mappers().nestedLookup() != NestedLookup.EMPTY) { throw new IllegalArgumentException("cannot have nested fields when index sort is activated"); } @@ -104,7 +104,8 @@ public void validate(IndexSettings settings, boolean checkLimits) { mappingLookup.getFieldType(match).validateMatchedRoutingPath(); } for (String objectName : mappingLookup.objectMappers().keySet()) { - if (Regex.simpleMatch(path, objectName)) { + // object type is not allowed in the routing paths + if (path.equals(objectName)) { throw new IllegalArgumentException( "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "and without the [script] parameter. [" @@ -117,6 +118,5 @@ public void validate(IndexSettings settings, boolean checkLimits) { if (checkLimits) { this.mappingLookup.checkLimits(settings); } - settings.getMode().validateMapping(mappingLookup); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index 67247733e9e4e..9f234598f5e76 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -13,7 +13,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.xcontent.DotExpandingXContentParser; -import org.elasticsearch.xcontent.FilterXContentParser; +import org.elasticsearch.xcontent.FilterXContentParserWrapper; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; @@ -89,6 +89,7 @@ protected void addDoc(LuceneDocument doc) { private final Set newFieldsSeen; private final Map dynamicObjectMappers; private final List dynamicRuntimeFields; + private final DocumentDimensions dimensions; private Field version; private SeqNoFieldMapper.SequenceIDFields seqID; @@ -105,6 +106,7 @@ private DocumentParserContext(DocumentParserContext in) { this.dynamicRuntimeFields = in.dynamicRuntimeFields; this.version = in.version; this.seqID = in.seqID; + this.dimensions = in.dimensions; } protected DocumentParserContext( @@ -124,6 +126,7 @@ protected DocumentParserContext( this.newFieldsSeen = new HashSet<>(); this.dynamicObjectMappers = new HashMap<>(); this.dynamicRuntimeFields = new ArrayList<>(); + this.dimensions = indexSettings.getMode().buildDocumentDimensions(); } public final IndexSettings indexSettings() { @@ -334,6 +337,13 @@ public XContentParser parser() { }; } + /** + * The collection of dimensions for this document. + */ + public DocumentDimensions getDimensions() { + return dimensions; + } + public abstract ContentPath path(); public abstract XContentParser parser(); @@ -370,7 +380,7 @@ public final DynamicTemplate findDynamicTemplate(String fieldName, DynamicTempla // XContentParser that wraps an existing parser positioned on a value, // and a field name, and returns a stream that looks like { 'field' : 'value' } - private static class CopyToParser extends FilterXContentParser { + private static class CopyToParser extends FilterXContentParserWrapper { enum State { FIELD, @@ -390,7 +400,7 @@ enum State { public Token nextToken() throws IOException { if (state == State.FIELD) { state = State.VALUE; - return in.currentToken(); + return delegate().currentToken(); } return Token.END_OBJECT; } @@ -400,7 +410,7 @@ public Token currentToken() { if (state == State.FIELD) { return Token.FIELD_NAME; } - return in.currentToken(); + return delegate().currentToken(); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 3ce33fb26c4b4..3a001e99236a2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -255,7 +255,7 @@ public void parse(DocumentParserContext context) throws IOException { valuePreview ); } - multiFields.parse(this, context); + multiFields.parse(this, context, () -> context); } /** @@ -449,7 +449,7 @@ public final Map indexAnalyzers() { return indexAnalyzers; } - public static class MultiFields implements Iterable, ToXContent { + public static final class MultiFields implements Iterable, ToXContent { private static final MultiFields EMPTY = new MultiFields(Collections.emptyMap()); @@ -507,16 +507,16 @@ private MultiFields(Map mappers) { this.mappers = mappers; } - public void parse(FieldMapper mainField, DocumentParserContext context) throws IOException { + public void parse(FieldMapper mainField, DocumentParserContext context, Supplier multiFieldContextSupplier) + throws IOException { // TODO: multi fields are really just copy fields, we just need to expose "sub fields" or something that can be part // of the mappings if (mappers.isEmpty()) { return; } - context.path().add(mainField.simpleName()); for (FieldMapper mapper : mappers.values()) { - mapper.parse(context); + mapper.parse(multiFieldContextSupplier.get()); } context.path().remove(); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java index af51b42deda80..c67d64bd2048c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java @@ -25,6 +25,7 @@ final class FieldTypeLookup { private final Map fullNameToFieldType = new HashMap<>(); private final Map dynamicFieldTypes = new HashMap<>(); + private final Set runtimeFieldNames = new HashSet<>(); /** * A map from field name to all fields whose content has been copied into it @@ -80,6 +81,7 @@ final class FieldTypeLookup { for (MappedFieldType fieldType : RuntimeField.collectFieldTypes(runtimeFields).values()) { // this will override concrete fields with runtime fields that have the same name fullNameToFieldType.put(fieldType.name(), fieldType); + runtimeFieldNames.add(fieldType.name()); } } @@ -104,6 +106,10 @@ MappedFieldType get(String field) { return getDynamicField(field); } + boolean isRuntimeField(String field) { + return runtimeFieldNames.contains(field); + } + // for testing int getMaxParentPathDots() { return maxParentPathDots; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java index ee2e33c3ed3a8..2a9bde2cca1c7 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java @@ -42,9 +42,9 @@ import org.elasticsearch.search.lookup.FieldValues; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.runtime.GeoPointScriptFieldDistanceFeatureQuery; +import org.elasticsearch.xcontent.FilterXContentParserWrapper; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.support.MapXContentParser; import java.io.IOException; import java.io.UncheckedIOException; @@ -215,7 +215,38 @@ protected void index(DocumentParserContext context, GeoPoint geometry) throws IO context.doc().add(new StoredField(fieldType().name(), geometry.toString())); } // TODO phase out geohash (which is currently used in the CompletionSuggester) - multiFields.parse(this, context.switchParser(MapXContentParser.wrapObject(geometry.geohash()))); + // we only expose the geohash value and disallow advancing tokens, hence we can reuse the same parser throughout multiple sub-fields + DocumentParserContext parserContext = context.switchParser(new GeoHashMultiFieldParser(context.parser(), geometry.geohash())); + multiFields.parse(this, context, () -> parserContext); + } + + /** + * Parser that pretends to be the main document parser, but exposes the provided geohash regardless of how the geopoint was provided + * in the incoming document. We rely on the fact that consumers are only ever call {@link XContentParser#textOrNull()} and never + * advance tokens, which is explicitly disallowed by this parser. + */ + static class GeoHashMultiFieldParser extends FilterXContentParserWrapper { + private final String value; + + GeoHashMultiFieldParser(XContentParser innerParser, String value) { + super(innerParser); + this.value = value; + } + + @Override + public String textOrNull() throws IOException { + return value; + } + + @Override + public Token currentToken() { + return Token.VALUE_STRING; + } + + @Override + public Token nextToken() throws IOException { + throw new UnsupportedOperationException(); + } } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index 9efbbe27ec8dd..ab1b580d7a5be 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -18,11 +18,9 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.network.InetAddresses; -import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -492,12 +490,7 @@ private static InetAddress value(XContentParser parser, InetAddress nullValue) t private void indexValue(DocumentParserContext context, InetAddress address) { if (dimension) { - // Encode the tsid part of the dimension field if the _tsid field is enabled. - // If the _tsid field is not enabled, we can skip the encoding part. - BytesReference bytes = context.getMetadataMapper(TimeSeriesIdFieldMapper.NAME) != null - ? TimeSeriesIdFieldMapper.encodeTsidValue(NetworkAddress.format(address)) - : null; - context.doc().addDimensionBytes(fieldType().name(), bytes); + context.getDimensions().addIp(fieldType().name(), address); } if (indexed) { Field field = new InetAddressPoint(fieldType().name(), address); @@ -527,4 +520,13 @@ protected void indexScriptValues( public FieldMapper.Builder getMergeBuilder() { return new Builder(simpleName(), scriptCompiler, ignoreMalformedByDefault, indexCreatedVersion).dimension(dimension).init(this); } + + @Override + public void doValidate(MappingLookup lookup) { + if (dimension && null != lookup.nestedLookup().getNestedParent(name())) { + throw new IllegalArgumentException( + TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + name() + "]" + ); + } + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index e667428c28722..b3835364a9e36 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -32,7 +32,6 @@ import org.apache.lucene.util.automaton.CompiledAutomaton.AUTOMATON_TYPE; import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.AutomatonQueries; @@ -62,6 +61,7 @@ import java.io.IOException; import java.io.UncheckedIOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -70,6 +70,8 @@ import java.util.Objects; import java.util.function.Supplier; +import static org.apache.lucene.util.ByteBlockPool.BYTE_BLOCK_SIZE; + /** * A field mapper for keywords. This mapper accepts strings and indexes them as-is. */ @@ -901,16 +903,32 @@ private void indexValue(DocumentParserContext context, String value) { value = normalizeValue(fieldType().normalizer(), name(), value); if (dimension) { - // Encode the tsid part of the dimension field. Although, it would seem reasonable - // to skip the encode part if we don't generate a _tsid field (as we do with number - // and ip fields), we keep this test because we must ensure that the value of this - // dimension field is not larger than TimeSeriesIdFieldMapper.DIMENSION_VALUE_LIMIT - BytesReference bytes = TimeSeriesIdFieldMapper.encodeTsidValue(value); - context.doc().addDimensionBytes(fieldType().name(), bytes); + context.getDimensions().addString(fieldType().name(), value); } // convert to utf8 only once before feeding postings/dv/stored fields final BytesRef binaryValue = new BytesRef(value); + + // If the UTF8 encoding of the field value is bigger than the max length 32766, Lucene fill fail the indexing request and, to roll + // back the changes, will mark the (possibly partially indexed) document as deleted. This results in deletes, even in an append-only + // workload, which in turn leads to slower merges, as these will potentially have to fall back to MergeStrategy.DOC instead of + // MergeStrategy.BULK. To avoid this, we do a preflight check here before indexing the document into Lucene. + if (binaryValue.length > BYTE_BLOCK_SIZE - 2) { + byte[] prefix = new byte[30]; + System.arraycopy(binaryValue.bytes, binaryValue.offset, prefix, 0, 30); + String msg = "Document contains at least one immense term in field=\"" + + fieldType().name() + + "\" (whose " + + "UTF8 encoding is longer than the max length " + + (BYTE_BLOCK_SIZE - 2) + + "), all of which were " + + "skipped. Please correct the analyzer to not produce such terms. The prefix of the first immense " + + "term is: '" + + Arrays.toString(prefix) + + "...'"; + throw new IllegalArgumentException(msg); + } + if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { Field field = new KeywordField(fieldType().name(), binaryValue, fieldType); context.doc().add(field); @@ -961,4 +979,13 @@ protected String contentType() { public FieldMapper.Builder getMergeBuilder() { return new Builder(simpleName(), indexAnalyzers, scriptCompiler).dimension(dimension).init(this); } + + @Override + public void doValidate(MappingLookup lookup) { + if (dimension && null != lookup.nestedLookup().getNestedParent(name())) { + throw new IllegalArgumentException( + TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + name() + "]" + ); + } + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LuceneDocument.java b/server/src/main/java/org/elasticsearch/index/mapper/LuceneDocument.java index 3cb2b030ebeff..22b5d8bfc8ffa 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/LuceneDocument.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/LuceneDocument.java @@ -10,16 +10,12 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.bytes.BytesReference; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.SortedMap; -import java.util.TreeMap; /** * Fork of {@link org.apache.lucene.document.Document} with additional functionality. @@ -31,12 +27,6 @@ public class LuceneDocument implements Iterable { private final String prefix; private final List fields; private Map keyedFields; - /** - * A sorted map of the serialized values of dimension fields that will be used - * for generating the _tsid field. The map will be used by {@link TimeSeriesIdFieldMapper} - * to build the _tsid field for the document. - */ - private SortedMap dimensionBytes; LuceneDocument(String path, LuceneDocument parent) { fields = new ArrayList<>(); @@ -109,27 +99,6 @@ public IndexableField getByKey(Object key) { return keyedFields == null ? null : keyedFields.get(key); } - /** - * Add the serialized byte reference for a dimension field. This will be used by {@link TimeSeriesIdFieldMapper} - * to build the _tsid field for the document. - */ - public void addDimensionBytes(String fieldName, BytesReference tsidBytes) { - if (dimensionBytes == null) { - // It is a {@link TreeMap} so that it is order by field name. - dimensionBytes = new TreeMap<>(); - } else if (dimensionBytes.containsKey(fieldName)) { - throw new IllegalArgumentException("Dimension field [" + fieldName + "] cannot be a multi-valued field."); - } - dimensionBytes.put(fieldName, tsidBytes); - } - - public SortedMap getDimensionBytes() { - if (dimensionBytes == null) { - return Collections.emptySortedMap(); - } - return dimensionBytes; - } - public IndexableField[] getFields(String name) { List f = new ArrayList<>(); for (IndexableField field : fields) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 8d5ff6df45b00..d2615641d468f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -510,6 +510,10 @@ public boolean isMetadataField(String field) { return mapperRegistry.getMetadataMapperParsers(indexVersionCreated).containsKey(field); } + public boolean isMultiField(String field) { + return mappingLookup().isMultiField(field); + } + public synchronized List reloadSearchAnalyzers(AnalysisRegistry registry) throws IOException { logger.info("reloading search analyzers"); // refresh indexAnalyzers and search analyzers diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java index 5ce6b05556459..e35b283af0197 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java @@ -336,6 +336,9 @@ public NestedLookup nestedLookup() { } public boolean isMultiField(String field) { + if (fieldTypeLookup.isRuntimeField(field)) { + return false; + } String sourceParent = parentObject(field); return sourceParent != null && fieldMappers.containsKey(sourceParent); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index f21b697dba0da..bc6e9fb1f1ef6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -28,7 +28,6 @@ import org.apache.lucene.util.NumericUtils; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Numbers; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -1476,14 +1475,7 @@ private static Number value(XContentParser parser, NumberType numberType, Number private void indexValue(DocumentParserContext context, Number numericValue) { if (dimension && numericValue != null) { - // Dimension can only be one of byte, short, int, long. So, we encode the tsid - // part of the dimension field by using the long value. - // Also, there is no point in encoding the tsid value if we do not generate - // the _tsid field. - BytesReference bytes = context.getMetadataMapper(TimeSeriesIdFieldMapper.NAME) != null - ? TimeSeriesIdFieldMapper.encodeTsidValue(numericValue.longValue()) - : null; - context.doc().addDimensionBytes(fieldType().name(), bytes); + context.getDimensions().addLong(fieldType().name(), numericValue.longValue()); } List fields = fieldType().type.createFields(fieldType().name(), numericValue, indexed, hasDocValues, stored); context.doc().addAll(fields); @@ -1509,4 +1501,13 @@ public FieldMapper.Builder getMergeBuilder() { .metric(metricType) .init(this); } + + @Override + public void doValidate(MappingLookup lookup) { + if (dimension && null != lookup.nestedLookup().getNestedParent(name())) { + throw new IllegalArgumentException( + TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + name() + "]" + ); + } + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java index 160b3732d03d3..136a297d8b1f5 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -29,12 +29,15 @@ import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; +import java.net.InetAddress; import java.time.ZoneId; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.SortedMap; +import java.util.TreeMap; import java.util.function.Supplier; /** @@ -140,25 +143,8 @@ private TimeSeriesIdFieldMapper() { public void postParse(DocumentParserContext context) throws IOException { assert fieldType().isIndexed() == false; - // SortedMap is expected to be sorted by key (field name) - SortedMap dimensionFields = context.doc().getDimensionBytes(); - BytesReference timeSeriesId = buildTsidField(dimensionFields); - context.doc().add(new SortedDocValuesField(fieldType().name(), timeSeriesId.toBytesRef())); - } - - public static BytesReference buildTsidField(SortedMap dimensionFields) throws IOException { - if (dimensionFields == null || dimensionFields.isEmpty()) { - throw new IllegalArgumentException("Dimension fields are missing."); - } - - try (BytesStreamOutput out = new BytesStreamOutput()) { - encodeTsid(out, dimensionFields); - BytesReference timeSeriesId = out.bytes(); - if (timeSeriesId.length() > LIMIT) { - throw new IllegalArgumentException(NAME + " longer than [" + LIMIT + "] bytes [" + timeSeriesId.length() + "]."); - } - return timeSeriesId; - } + TimeSeriesIdBuilder timeSeriesIdBuilder = (TimeSeriesIdBuilder) context.getDimensions(); + context.doc().add(new SortedDocValuesField(fieldType().name(), timeSeriesIdBuilder.build().toBytesRef())); } @Override @@ -166,24 +152,6 @@ protected String contentType() { return CONTENT_TYPE; } - public static void encodeTsid(StreamOutput out, SortedMap dimensionFields) throws IOException { - out.writeVInt(dimensionFields.size()); - for (Map.Entry entry : dimensionFields.entrySet()) { - String fieldName = entry.getKey(); - BytesRef fieldNameBytes = new BytesRef(fieldName); - int len = fieldNameBytes.length; - if (len > DIMENSION_NAME_LIMIT) { - throw new IllegalArgumentException( - "Dimension name must be less than [" + DIMENSION_NAME_LIMIT + "] bytes but [" + fieldName + "] was [" + len + "]." - ); - } - // Write field name in utf-8 instead of writeString's utf-16-ish thing - out.writeBytesRef(fieldNameBytes); - entry.getValue().writeTo(out); - } - - } - /** * Decode the {@code _tsid} into a human readable map. */ @@ -193,7 +161,7 @@ public static Map decodeTsid(StreamInput in) { Map result = new LinkedHashMap(size); for (int i = 0; i < size; i++) { - String name = in.readString(); + String name = in.readBytesRef().utf8ToString(); int type = in.read(); switch (type) { @@ -214,52 +182,107 @@ public static Map decodeTsid(StreamInput in) { } } - public static Map decodeTsid(BytesRef bytesRef) { - try (StreamInput input = new BytesArray(bytesRef).streamInput()) { - return decodeTsid(input); - } catch (IOException ex) { - throw new IllegalArgumentException("Dimension field cannot be deserialized.", ex); + public static class TimeSeriesIdBuilder implements DocumentDimensions { + /** + * A sorted map of the serialized values of dimension fields that will be used + * for generating the _tsid field. The map will be used by {@link TimeSeriesIdFieldMapper} + * to build the _tsid field for the document. + */ + private final SortedMap dimensions = new TreeMap<>(); + + public BytesReference build() throws IOException { + if (dimensions.isEmpty()) { + throw new IllegalArgumentException("Dimension fields are missing."); + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(dimensions.size()); + for (Map.Entry entry : dimensions.entrySet()) { + BytesRef fieldName = entry.getKey(); + if (fieldName.length > DIMENSION_NAME_LIMIT) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Dimension name must be less than [%d] bytes but [%s] was [%s].", + DIMENSION_NAME_LIMIT, + fieldName.utf8ToString(), + fieldName.length + ) + ); + } + out.writeBytesRef(fieldName); + entry.getValue().writeTo(out); + } + BytesReference timeSeriesId = out.bytes(); + if (timeSeriesId.length() > LIMIT) { + throw new IllegalArgumentException(NAME + " longer than [" + LIMIT + "] bytes [" + timeSeriesId.length() + "]."); + } + return timeSeriesId; + } } - } - public static BytesReference encodeTsidValue(String value) { - try (BytesStreamOutput out = new BytesStreamOutput()) { - out.write((byte) 's'); - /* - * Write in utf8 instead of StreamOutput#writeString which is utf-16-ish - * so its easier for folks to reason about the space taken up. Mostly - * it'll be smaller too. - */ - BytesRef bytes = new BytesRef(value); - if (bytes.length > DIMENSION_VALUE_LIMIT) { - throw new IllegalArgumentException( - "Dimension fields must be less than [" + DIMENSION_VALUE_LIMIT + "] bytes but was [" + bytes.length + "]." - ); + @Override + public void addString(String fieldName, String value) { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.write((byte) 's'); + /* + * Write in utf8 instead of StreamOutput#writeString which is utf-16-ish + * so its easier for folks to reason about the space taken up. Mostly + * it'll be smaller too. + */ + BytesRef bytes = new BytesRef(value); + if (bytes.length > DIMENSION_VALUE_LIMIT) { + throw new IllegalArgumentException( + "Dimension fields must be less than [" + DIMENSION_VALUE_LIMIT + "] bytes but was [" + bytes.length + "]." + ); + } + out.writeBytesRef(bytes); + add(fieldName, out.bytes()); + } catch (IOException e) { + throw new IllegalArgumentException("Dimension field cannot be serialized.", e); } - out.writeBytesRef(bytes); - return out.bytes(); - } catch (IOException e) { - throw new IllegalArgumentException("Dimension field cannot be serialized.", e); } - } - public static BytesReference encodeTsidValue(long value) { - try (BytesStreamOutput out = new BytesStreamOutput()) { - out.write((byte) 'l'); - out.writeLong(value); - return out.bytes(); - } catch (IOException e) { - throw new IllegalArgumentException("Dimension field cannot be serialized.", e); + @Override + public void addIp(String fieldName, InetAddress value) { + addString(fieldName, NetworkAddress.format(value)); + } + + @Override + public void addLong(String fieldName, long value) { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.write((byte) 'l'); + out.writeLong(value); + add(fieldName, out.bytes()); + } catch (IOException e) { + throw new IllegalArgumentException("Dimension field cannot be serialized.", e); + } + } + + @Override + public void addUnsignedLong(String fieldName, long value) { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.write((byte) 'u'); + out.writeLong(value); + add(fieldName, out.bytes()); + } catch (IOException e) { + throw new IllegalArgumentException("Dimension field cannot be serialized.", e); + } + } + + private void add(String fieldName, BytesReference encoded) { + BytesReference old = dimensions.put(new BytesRef(fieldName), encoded); + if (old != null) { + throw new IllegalArgumentException("Dimension field [" + fieldName + "] cannot be a multi-valued field."); + } } } - public static BytesReference encodeTsidUnsignedLongValue(long value) { - try (BytesStreamOutput out = new BytesStreamOutput()) { - out.write((byte) 'u'); - out.writeLong(value); - return out.bytes(); - } catch (IOException e) { - throw new IllegalArgumentException("Dimension field cannot be serialized.", e); + public static Map decodeTsid(BytesRef bytesRef) { + try (StreamInput input = new BytesArray(bytesRef).streamInput()) { + return decodeTsid(input); + } catch (IOException ex) { + throw new IllegalArgumentException("Dimension field cannot be deserialized.", ex); } } } diff --git a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java index fb45597fb4771..f1a31db6125d2 100644 --- a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java @@ -78,13 +78,30 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws protected abstract void doXContent(XContentBuilder builder, Params params) throws IOException; - protected void printBoostAndQueryName(XContentBuilder builder) throws IOException { + /** + * Add {@code boost} and {@code query_name} to the builder. + * @deprecated use {@link #boostAndQueryNameToXContent} + */ + @Deprecated + protected final void printBoostAndQueryName(XContentBuilder builder) throws IOException { builder.field(BOOST_FIELD.getPreferredName(), boost); if (queryName != null) { builder.field(NAME_FIELD.getPreferredName(), queryName); } } + /** + * Add {@code boost} and {@code query_name} to the builder. + */ + protected final void boostAndQueryNameToXContent(XContentBuilder builder) throws IOException { + if (boost != DEFAULT_BOOST) { + builder.field(BOOST_FIELD.getPreferredName(), boost); + } + if (queryName != null) { + builder.field(NAME_FIELD.getPreferredName(), queryName); + } + } + @Override public final Query toQuery(SearchExecutionContext context) throws IOException { Query query = doToQuery(context); diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java index ccf33a2818d6a..0f8630ccbb030 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java @@ -328,15 +328,21 @@ public void doXContent(XContentBuilder builder, Params params) throws IOExceptio builder.startObject(fieldName); builder.field(QUERY_FIELD.getPreferredName(), value); - builder.field(OPERATOR_FIELD.getPreferredName(), operator.toString()); + if (operator != DEFAULT_OPERATOR) { + builder.field(OPERATOR_FIELD.getPreferredName(), operator.toString()); + } if (analyzer != null) { builder.field(ANALYZER_FIELD.getPreferredName(), analyzer); } if (fuzziness != null) { fuzziness.toXContent(builder, params); } - builder.field(PREFIX_LENGTH_FIELD.getPreferredName(), prefixLength); - builder.field(MAX_EXPANSIONS_FIELD.getPreferredName(), maxExpansions); + if (prefixLength != FuzzyQuery.defaultPrefixLength) { + builder.field(PREFIX_LENGTH_FIELD.getPreferredName(), prefixLength); + } + if (maxExpansions != FuzzyQuery.defaultMaxExpansions) { + builder.field(MAX_EXPANSIONS_FIELD.getPreferredName(), maxExpansions); + } if (minimumShouldMatch != null) { builder.field(MINIMUM_SHOULD_MATCH_FIELD.getPreferredName(), minimumShouldMatch); } @@ -344,11 +350,19 @@ public void doXContent(XContentBuilder builder, Params params) throws IOExceptio builder.field(FUZZY_REWRITE_FIELD.getPreferredName(), fuzzyRewrite); } // LUCENE 4 UPGRADE we need to document this & test this - builder.field(FUZZY_TRANSPOSITIONS_FIELD.getPreferredName(), fuzzyTranspositions); - builder.field(LENIENT_FIELD.getPreferredName(), lenient); - builder.field(ZERO_TERMS_QUERY_FIELD.getPreferredName(), zeroTermsQuery.toString()); - builder.field(GENERATE_SYNONYMS_PHRASE_QUERY.getPreferredName(), autoGenerateSynonymsPhraseQuery); - printBoostAndQueryName(builder); + if (fuzzyTranspositions != FuzzyQuery.defaultTranspositions) { + builder.field(FUZZY_TRANSPOSITIONS_FIELD.getPreferredName(), fuzzyTranspositions); + } + if (lenient != MatchQueryParser.DEFAULT_LENIENCY) { + builder.field(LENIENT_FIELD.getPreferredName(), lenient); + } + if (false == zeroTermsQuery.equals(MatchQueryParser.DEFAULT_ZERO_TERMS_QUERY)) { + builder.field(ZERO_TERMS_QUERY_FIELD.getPreferredName(), zeroTermsQuery.toString()); + } + if (autoGenerateSynonymsPhraseQuery == false) { + builder.field(GENERATE_SYNONYMS_PHRASE_QUERY.getPreferredName(), autoGenerateSynonymsPhraseQuery); + } + boostAndQueryNameToXContent(builder); builder.endObject(); builder.endObject(); } diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java index 457f7160306eb..ee584dd9acb38 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java @@ -374,6 +374,10 @@ public boolean isMetadataField(String field) { return mapperService.isMetadataField(field); } + public boolean isMultiField(String field) { + return mapperService.isMultiField(field); + } + public Set sourcePath(String fullName) { return mappingLookup.sourcePaths(fullName); } diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index 5aabb13e957e4..28b6452acc5ed 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -52,6 +52,7 @@ import org.elasticsearch.common.lucene.store.InputStreamIndexInput; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RefCounted; @@ -759,25 +760,17 @@ public String toString() { */ public static final class MetadataSnapshot implements Iterable, Writeable { private final Map metadata; - - public static final MetadataSnapshot EMPTY = new MetadataSnapshot(); - private final Map commitUserData; - private final long numDocs; + public static final MetadataSnapshot EMPTY = new MetadataSnapshot(emptyMap(), emptyMap(), 0L); + public MetadataSnapshot(Map metadata, Map commitUserData, long numDocs) { this.metadata = metadata; this.commitUserData = commitUserData; this.numDocs = numDocs; } - MetadataSnapshot() { - metadata = emptyMap(); - commitUserData = emptyMap(); - numDocs = 0; - } - MetadataSnapshot(IndexCommit commit, Directory directory, Logger logger) throws IOException { LoadedMetadata loadedMetadata = loadMetadata(commit, directory, logger); metadata = loadedMetadata.fileMetadata; @@ -786,26 +779,21 @@ public MetadataSnapshot(Map metadata, Map metadata = new HashMap<>(); - for (int i = 0; i < size; i++) { - StoreFileMetadata meta = new StoreFileMetadata(in); - metadata.put(meta.name(), meta); - } - Map commitUserData = new HashMap<>(); - int num = in.readVInt(); - for (int i = num; i > 0; i--) { - commitUserData.put(in.readString(), in.readString()); + public static MetadataSnapshot readFrom(StreamInput in) throws IOException { + final int metadataSize = in.readVInt(); + final Map metadata = metadataSize == 0 ? emptyMap() : Maps.newMapWithExpectedSize(metadataSize); + for (int i = 0; i < metadataSize; i++) { + final var storeFileMetadata = new StoreFileMetadata(in); + metadata.put(storeFileMetadata.name(), storeFileMetadata); } + final var commitUserData = in.readMap(StreamInput::readString, StreamInput::readString); + final var numDocs = in.readLong(); - this.metadata = unmodifiableMap(metadata); - this.commitUserData = unmodifiableMap(commitUserData); - this.numDocs = in.readLong(); - assert metadata.isEmpty() || numSegmentFiles() == 1 : "numSegmentFiles: " + numSegmentFiles(); + if (metadataSize == 0 && commitUserData.size() == 0 && numDocs == 0) { + return MetadataSnapshot.EMPTY; + } else { + return new MetadataSnapshot(metadata, commitUserData, numDocs); + } } @Override diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndexManager.java b/server/src/main/java/org/elasticsearch/indices/SystemIndexManager.java index 49b9563730734..78d763ff224ef 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndexManager.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndexManager.java @@ -32,6 +32,7 @@ import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.xcontent.XContentType; +import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -97,9 +98,19 @@ public void clusterChanged(ClusterChangedEvent event) { } if (isUpgradeInProgress.compareAndSet(false, true)) { - final List descriptors = getEligibleDescriptors(state.getMetadata()).stream() - .filter(descriptor -> getUpgradeStatus(state, descriptor) == UpgradeStatus.NEEDS_MAPPINGS_UPDATE) - .collect(Collectors.toList()); + final List descriptors = new ArrayList<>(); + for (SystemIndexDescriptor systemIndexDescriptor : getEligibleDescriptors(state.getMetadata())) { + UpgradeStatus upgradeStatus; + try { + upgradeStatus = getUpgradeStatus(state, systemIndexDescriptor); + } catch (Exception e) { + logger.warn("Failed to calculate upgrade status: {}" + e.getMessage(), e); + continue; + } + if (upgradeStatus == UpgradeStatus.NEEDS_MAPPINGS_UPDATE) { + descriptors.add(systemIndexDescriptor); + } + } if (descriptors.isEmpty() == false) { // Use a GroupedActionListener so that we only release the lock once all upgrade attempts have succeeded or failed. @@ -271,14 +282,20 @@ private boolean checkIndexMappingUpToDate(SystemIndexDescriptor descriptor, Inde /** * Fetches the mapping version from an index's mapping's `_meta` info. */ - @SuppressWarnings("unchecked") private Version readMappingVersion(SystemIndexDescriptor descriptor, MappingMetadata mappingMetadata) { final String indexName = descriptor.getPrimaryIndex(); try { + @SuppressWarnings("unchecked") Map meta = (Map) mappingMetadata.sourceAsMap().get("_meta"); if (meta == null) { - logger.warn("Missing _meta field in mapping [{}] of index [{}]", mappingMetadata.type(), indexName); - throw new IllegalStateException("Cannot read version string in index " + indexName); + logger.warn( + "Missing _meta field in mapping [{}] of index [{}], assuming mappings update required", + mappingMetadata.type(), + indexName + ); + // This can happen with old system indices, such as .watches, which were created before we had the convention of + // storing a version under `_meta.` We should just replace the template to be sure. + return Version.V_EMPTY; } final Object rawVersion = meta.get(descriptor.getVersionMetaKey()); @@ -289,7 +306,7 @@ private Version readMappingVersion(SystemIndexDescriptor descriptor, MappingMeta } final String versionString = rawVersion != null ? rawVersion.toString() : null; if (versionString == null) { - logger.warn("No value found in mappings for [_meta.{}]", descriptor.getVersionMetaKey()); + logger.warn("No value found in mappings for [_meta.{}], assuming mappings update required", descriptor.getVersionMetaKey()); // If we called `Version.fromString(null)`, it would return `Version.CURRENT` and we wouldn't update the mappings return Version.V_EMPTY; } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCleanFilesRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCleanFilesRequest.java index 2e6ba419752f3..d5997938e715b 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCleanFilesRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCleanFilesRequest.java @@ -43,7 +43,7 @@ public RecoveryCleanFilesRequest( super(in); recoveryId = in.readLong(); shardId = new ShardId(in); - snapshotFiles = new Store.MetadataSnapshot(in); + snapshotFiles = Store.MetadataSnapshot.readFrom(in); totalTranslogOps = in.readVInt(); globalCheckpoint = in.readZLong(); } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java index fd6d287f5f5fa..a19388ca26126 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java @@ -40,7 +40,7 @@ public StartRecoveryRequest(StreamInput in) throws IOException { targetAllocationId = in.readString(); sourceNode = new DiscoveryNode(in); targetNode = new DiscoveryNode(in); - metadataSnapshot = new Store.MetadataSnapshot(in); + metadataSnapshot = Store.MetadataSnapshot.readFrom(in); primaryRelocation = in.readBoolean(); startingSeqNo = in.readLong(); if (in.getVersion().onOrAfter(RecoverySettings.SNAPSHOT_FILE_DOWNLOAD_THROTTLING_SUPPORTED_VERSION)) { diff --git a/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetadata.java b/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetadata.java index d28249eefc716..016841af9601d 100644 --- a/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetadata.java +++ b/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetadata.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; @@ -45,12 +46,13 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Objects; import java.util.concurrent.TimeUnit; +import static java.util.Collections.emptyList; + public class TransportNodesListShardStoreMetadata extends TransportNodesAction< TransportNodesListShardStoreMetadata.Request, TransportNodesListShardStoreMetadata.NodesStoreFilesMetadata, @@ -132,7 +134,6 @@ private StoreFilesMetadata listStoreMetadata(NodeRequest request) throws IOExcep if (indexShard != null) { try { final StoreFilesMetadata storeFilesMetadata = new StoreFilesMetadata( - shardId, indexShard.snapshotStoreMetadata(), indexShard.getPeerRecoveryRetentionLeases() ); @@ -140,10 +141,10 @@ private StoreFilesMetadata listStoreMetadata(NodeRequest request) throws IOExcep return storeFilesMetadata; } catch (org.apache.lucene.index.IndexNotFoundException e) { logger.trace(new ParameterizedMessage("[{}] node is missing index, responding with empty", shardId), e); - return new StoreFilesMetadata(shardId, Store.MetadataSnapshot.EMPTY, Collections.emptyList()); + return StoreFilesMetadata.EMPTY; } catch (IOException e) { logger.warn(new ParameterizedMessage("[{}] can't read metadata from store, responding with empty", shardId), e); - return new StoreFilesMetadata(shardId, Store.MetadataSnapshot.EMPTY, Collections.emptyList()); + return StoreFilesMetadata.EMPTY; } } } @@ -166,7 +167,7 @@ private StoreFilesMetadata listStoreMetadata(NodeRequest request) throws IOExcep } final ShardPath shardPath = ShardPath.loadShardPath(logger, nodeEnv, shardId, customDataPath); if (shardPath == null) { - return new StoreFilesMetadata(shardId, Store.MetadataSnapshot.EMPTY, Collections.emptyList()); + return StoreFilesMetadata.EMPTY; } // note that this may fail if it can't get access to the shard lock. Since we check above there is an active shard, this means: // 1) a shard is being constructed, which means the master will not use a copy of this replica @@ -180,7 +181,7 @@ private StoreFilesMetadata listStoreMetadata(NodeRequest request) throws IOExcep ); // We use peer recovery retention leases from the primary for allocating replicas. We should always have retention leases when // we refresh shard info after the primary has started. Hence, we can ignore retention leases if there is no active shard. - return new StoreFilesMetadata(shardId, metadataSnapshot, Collections.emptyList()); + return new StoreFilesMetadata(metadataSnapshot, emptyList()); } finally { TimeValue took = new TimeValue(System.nanoTime() - startTimeNS, TimeUnit.NANOSECONDS); if (exists) { @@ -192,37 +193,43 @@ private StoreFilesMetadata listStoreMetadata(NodeRequest request) throws IOExcep } public static class StoreFilesMetadata implements Iterable, Writeable { - private final ShardId shardId; private final Store.MetadataSnapshot metadataSnapshot; private final List peerRecoveryRetentionLeases; - public StoreFilesMetadata( - ShardId shardId, - Store.MetadataSnapshot metadataSnapshot, - List peerRecoveryRetentionLeases - ) { - this.shardId = shardId; + private static final ShardId FAKE_SHARD_ID = new ShardId("_na_", "_na_", 0); + public static final StoreFilesMetadata EMPTY = new StoreFilesMetadata(Store.MetadataSnapshot.EMPTY, emptyList()); + + public StoreFilesMetadata(Store.MetadataSnapshot metadataSnapshot, List peerRecoveryRetentionLeases) { this.metadataSnapshot = metadataSnapshot; this.peerRecoveryRetentionLeases = peerRecoveryRetentionLeases; } - public StoreFilesMetadata(StreamInput in) throws IOException { - this.shardId = new ShardId(in); - this.metadataSnapshot = new Store.MetadataSnapshot(in); - this.peerRecoveryRetentionLeases = in.readList(RetentionLease::new); + public static StoreFilesMetadata readFrom(StreamInput in) throws IOException { + if (in.getVersion().before(Version.V_8_2_0)) { + new ShardId(in); + } + final var metadataSnapshot = Store.MetadataSnapshot.readFrom(in); + final var peerRecoveryRetentionLeases = in.readList(RetentionLease::new); + if (metadataSnapshot == Store.MetadataSnapshot.EMPTY && peerRecoveryRetentionLeases.isEmpty()) { + return EMPTY; + } else { + return new StoreFilesMetadata(metadataSnapshot, peerRecoveryRetentionLeases); + } } @Override public void writeTo(StreamOutput out) throws IOException { - shardId.writeTo(out); + if (out.getVersion().before(Version.V_8_2_0)) { + // no compatible version cares about the shard ID, we can just make one up + FAKE_SHARD_ID.writeTo(out); + + // NB only checked this for versions back to 7.17.0, we are assuming that we don't use this with earlier versions: + assert out.getVersion().onOrAfter(Version.V_7_17_0) : out.getVersion(); + } metadataSnapshot.writeTo(out); out.writeList(peerRecoveryRetentionLeases); } - public ShardId shardId() { - return this.shardId; - } - public boolean isEmpty() { return metadataSnapshot.size() == 0; } @@ -267,8 +274,6 @@ public String syncId() { @Override public String toString() { return "StoreFilesMetadata{" - + ", shardId=" - + shardId + ", metadataSnapshot{size=" + metadataSnapshot.size() + ", syncId=" @@ -385,7 +390,7 @@ public static class NodeStoreFilesMetadata extends BaseNodeResponse { public NodeStoreFilesMetadata(StreamInput in, DiscoveryNode node) throws IOException { super(in, node); - storeFilesMetadata = new StoreFilesMetadata(in); + storeFilesMetadata = StoreFilesMetadata.readFrom(in); } public NodeStoreFilesMetadata(DiscoveryNode node, StoreFilesMetadata storeFilesMetadata) { diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index bc2505b88323e..61c9921d74b7d 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -55,7 +55,8 @@ public final class IngestDocument { private final Set executedPipelines = new LinkedHashSet<>(); public IngestDocument(String index, String id, String routing, Long version, VersionType versionType, Map source) { - this.sourceAndMetadata = new HashMap<>(); + // source + at max 5 extra fields + this.sourceAndMetadata = Maps.newMapWithExpectedSize(source.size() + 5); this.sourceAndMetadata.putAll(source); this.sourceAndMetadata.put(Metadata.INDEX.getFieldName(), index); this.sourceAndMetadata.put(Metadata.ID.getFieldName(), id); diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index f20e0da799bfd..e2939d47bcebe 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -40,6 +40,7 @@ import org.elasticsearch.cluster.NodeConnectionsService; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.coordination.Coordinator; +import org.elasticsearch.cluster.coordination.InstanceHasMasterHealthIndicatorService; import org.elasticsearch.cluster.desirednodes.DesiredNodesSettingsValidator; import org.elasticsearch.cluster.metadata.IndexMetadataVerifier; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; @@ -95,6 +96,7 @@ import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.gateway.MetaStateService; import org.elasticsearch.gateway.PersistedClusterStateService; +import org.elasticsearch.health.HealthService; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.index.IndexSettingProviders; import org.elasticsearch.index.IndexSettings; @@ -135,6 +137,7 @@ import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.EnginePlugin; +import org.elasticsearch.plugins.HealthPlugin; import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.MapperPlugin; @@ -163,6 +166,7 @@ import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.shutdown.PluginShutdownService; import org.elasticsearch.snapshots.InternalSnapshotsInfoService; +import org.elasticsearch.snapshots.RepositoryIntegrityHealthIndicatorService; import org.elasticsearch.snapshots.RestoreService; import org.elasticsearch.snapshots.SnapshotShardsService; import org.elasticsearch.snapshots.SnapshotsInfoService; @@ -209,6 +213,7 @@ import javax.net.ssl.SNIHostName; import static java.util.stream.Collectors.toList; +import static org.elasticsearch.common.util.CollectionUtils.concatLists; import static org.elasticsearch.core.Types.forciblyCast; /** @@ -859,7 +864,7 @@ protected Node( metadataCreateIndexService, settingsModule.getIndexScopedSettings() ); - final List> builtinTaskExecutors = Arrays.asList(systemIndexMigrationExecutor); + final List> builtinTaskExecutors = List.of(systemIndexMigrationExecutor); final List> pluginTaskExectors = pluginsService.filterPlugins(PersistentTaskPlugin.class) .stream() .map( @@ -873,10 +878,9 @@ protected Node( ) .flatMap(List::stream) .collect(toList()); - final List> allTasksExectors = Stream.of(pluginTaskExectors, builtinTaskExecutors) - .flatMap(List::stream) - .collect(toList()); - final PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry(allTasksExectors); + final PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry( + concatLists(pluginTaskExectors, builtinTaskExecutors) + ); final PersistentTasksClusterService persistentTasksClusterService = new PersistentTasksClusterService( settings, registry, @@ -895,6 +899,8 @@ protected Node( clusterService.getClusterSettings() ); + HealthService healthService = createHealthService(clusterService); + modules.add(b -> { b.bind(Node.class).toInstance(this); b.bind(NodeService.class).toInstance(nodeService); @@ -975,6 +981,7 @@ protected Node( b.bind(ExecutorSelector.class).toInstance(executorSelector); b.bind(IndexSettingProviders.class).toInstance(indexSettingProviders); b.bind(DesiredNodesSettingsValidator.class).toInstance(desiredNodesSettingsValidator); + b.bind(HealthService.class).toInstance(healthService); }); injector = modules.createInjector(); @@ -1025,6 +1032,18 @@ protected Node( } } + private HealthService createHealthService(ClusterService clusterService) { + var serverHealthIndicatorServices = List.of( + new InstanceHasMasterHealthIndicatorService(clusterService), + new RepositoryIntegrityHealthIndicatorService(clusterService) + ); + var pluginHealthIndicatorServices = pluginsService.filterPlugins(HealthPlugin.class) + .stream() + .flatMap(plugin -> plugin.getHealthIndicatorServices().stream()) + .toList(); + return new HealthService(concatLists(serverHealthIndicatorServices, pluginHealthIndicatorServices)); + } + private RecoveryPlannerService getRecoveryPlannerService( ThreadPool threadPool, ClusterService clusterService, @@ -1045,8 +1064,7 @@ private RecoveryPlannerService getRecoveryPlannerService( threadPool, clusterService ); - final RecoveryPlannerPlugin recoveryPlannerPlugin = recoveryPlannerPlugins.get(0); - return recoveryPlannerPlugin.createRecoveryPlannerService(shardSnapshotsService); + return recoveryPlannerPlugins.get(0).createRecoveryPlannerService(shardSnapshotsService); } protected TransportService newTransportService( diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/PreProcessor.java b/server/src/main/java/org/elasticsearch/plugins/HealthPlugin.java similarity index 52% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/PreProcessor.java rename to server/src/main/java/org/elasticsearch/plugins/HealthPlugin.java index a9e21874313ad..c1b035ad50e0a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/PreProcessor.java +++ b/server/src/main/java/org/elasticsearch/plugins/HealthPlugin.java @@ -5,17 +5,17 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -package org.elasticsearch.client.ml.inference.preprocessing; -import org.elasticsearch.client.ml.inference.NamedXContentObject; +package org.elasticsearch.plugins; + +import org.elasticsearch.health.HealthIndicatorService; + +import java.util.Collection; /** - * Describes a pre-processor for a defined machine learning model + * An additional extension point for {@link Plugin}s that extends Elasticsearch's health indicators functionality. */ -public interface PreProcessor extends NamedXContentObject { +public interface HealthPlugin { - /** - * @return The name of the pre-processor - */ - String getName(); + Collection getHealthIndicatorServices(); } diff --git a/server/src/main/java/org/elasticsearch/plugins/Plugin.java b/server/src/main/java/org/elasticsearch/plugins/Plugin.java index e67809e2ece93..77f1c36ac1368 100644 --- a/server/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -48,6 +48,7 @@ *
  • {@link AnalysisPlugin} *
  • {@link ClusterPlugin} *
  • {@link DiscoveryPlugin} + *
  • {@link HealthPlugin} *
  • {@link IngestPlugin} *
  • {@link MapperPlugin} *
  • {@link NetworkPlugin} diff --git a/server/src/main/java/org/elasticsearch/plugins/RepositoryPlugin.java b/server/src/main/java/org/elasticsearch/plugins/RepositoryPlugin.java index 41e0e9b3704cb..476baf1c28f63 100644 --- a/server/src/main/java/org/elasticsearch/plugins/RepositoryPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/RepositoryPlugin.java @@ -8,6 +8,7 @@ package org.elasticsearch.plugins; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.env.Environment; @@ -17,6 +18,7 @@ import java.util.Collections; import java.util.Map; +import java.util.function.Consumer; /** * An extension point for {@link Plugin} implementations to add custom snapshot repositories. @@ -59,4 +61,13 @@ default Map getInternalRepositories( return Collections.emptyMap(); } + /** + * Returns a check that is run on restore. This allows plugins to prevent certain restores from happening. + * + * returns null if no check is provided + */ + default Consumer addPreRestoreCheck() { + return null; + } + } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java index 89992b20fe96f..21de3f2f961c6 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java @@ -8,6 +8,7 @@ package org.elasticsearch.repositories; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; @@ -18,10 +19,12 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Consumer; /** * Sets up classes for Snapshot/Restore. @@ -80,6 +83,14 @@ public RepositoriesModule( } } + List> preRestoreChecks = new ArrayList<>(); + for (RepositoryPlugin repoPlugin : repoPlugins) { + Consumer preRestoreCheck = repoPlugin.addPreRestoreCheck(); + if (preRestoreCheck != null) { + preRestoreChecks.add(preRestoreCheck); + } + } + Settings settings = env.settings(); Map repositoryTypes = Collections.unmodifiableMap(factories); Map internalRepositoryTypes = Collections.unmodifiableMap(internalFactories); @@ -89,7 +100,8 @@ public RepositoriesModule( transportService, repositoryTypes, internalRepositoryTypes, - transportService.getThreadPool() + transportService.getThreadPool(), + preRestoreChecks ); } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java index 6d1bcd0a131cd..6b837f20eb045 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java @@ -56,6 +56,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -97,13 +98,16 @@ public class RepositoriesService extends AbstractLifecycleComponent implements C private volatile Map repositories = Collections.emptyMap(); private final RepositoriesStatsArchive repositoriesStatsArchive; + private final List> preRestoreChecks; + public RepositoriesService( Settings settings, ClusterService clusterService, TransportService transportService, Map typesRegistry, Map internalTypesRegistry, - ThreadPool threadPool + ThreadPool threadPool, + List> preRestoreChecks ) { this.typesRegistry = typesRegistry; this.internalTypesRegistry = internalTypesRegistry; @@ -122,6 +126,7 @@ public RepositoriesService( REPOSITORIES_STATS_ARCHIVE_MAX_ARCHIVED_STATS.get(settings), threadPool::relativeTimeInMillis ); + this.preRestoreChecks = preRestoreChecks; } /** @@ -776,6 +781,10 @@ private static RepositoryConflictException newRepositoryConflictException(String ); } + public List> getPreRestoreChecks() { + return preRestoreChecks; + } + @Override protected void doStart() { diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 28e1897a0272d..b80a0124bc5d9 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -403,6 +403,7 @@ protected BlobStoreRepository( this.namedXContentRegistry = namedXContentRegistry; this.basePath = basePath; this.maxSnapshotCount = MAX_SNAPSHOTS_SETTING.get(metadata.settings()); + this.repoDataDeduplicator = new ResultDeduplicator<>(threadPool.getThreadContext()); } @Override @@ -1866,7 +1867,7 @@ public void clusterStateProcessed(ClusterState oldState, ClusterState newState) * {@link #bestEffortConsistency} must be {@code false}, in which case we can assume that the {@link RepositoryData} loaded is * unique for a given value of {@link #metadata} at any point in time. */ - private final ResultDeduplicator repoDataDeduplicator = new ResultDeduplicator<>(); + private final ResultDeduplicator repoDataDeduplicator; private void doGetRepositoryData(ActionListener listener) { // Retry loading RepositoryData in a loop in case we run into concurrent modifications of the repository. diff --git a/server/src/main/java/org/elasticsearch/rest/action/RestFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/rest/action/RestFieldCapabilitiesAction.java index eb98197793017..f39b3d3a3479c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/RestFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/RestFieldCapabilitiesAction.java @@ -50,6 +50,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC fieldRequest.indicesOptions(IndicesOptions.fromRequest(request, fieldRequest.indicesOptions())); fieldRequest.includeUnmapped(request.paramAsBoolean("include_unmapped", false)); + fieldRequest.filters(request.paramAsStringArray("filters", Strings.EMPTY_ARRAY)); + fieldRequest.allowedTypes(request.paramAsStringArray("types", Strings.EMPTY_ARRAY)); request.withContentOrSourceParamParserOrNull(parser -> { if (parser != null) { PARSER.parse(parser, fieldRequest, null); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java index 204aa3b6f002f..e6a85f30b76aa 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java @@ -49,7 +49,6 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; -import java.util.stream.StreamSupport; import static java.util.Arrays.asList; import static org.elasticsearch.action.support.master.MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; @@ -229,10 +228,10 @@ public void onResponse(final Collection responses) { .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); ClusterStateResponse stateResponse = extractResponse(responses, ClusterStateResponse.class); - Map indicesStates = StreamSupport.stream( - stateResponse.getState().getMetadata().spliterator(), - false - ).collect(Collectors.toMap(indexMetadata -> indexMetadata.getIndex().getName(), Function.identity())); + Map indicesStates = stateResponse.getState() + .getMetadata() + .stream() + .collect(Collectors.toMap(indexMetadata -> indexMetadata.getIndex().getName(), Function.identity())); ClusterHealthResponse healthResponse = extractResponse(responses, ClusterHealthResponse.class); Map indicesHealths = healthResponse.getIndices(); diff --git a/server/src/main/java/org/elasticsearch/rollup/RollupV2.java b/server/src/main/java/org/elasticsearch/rollup/RollupV2.java index 9c9566452c116..65775dcc58e0a 100644 --- a/server/src/main/java/org/elasticsearch/rollup/RollupV2.java +++ b/server/src/main/java/org/elasticsearch/rollup/RollupV2.java @@ -8,8 +8,11 @@ package org.elasticsearch.rollup; +import org.elasticsearch.Build; + public class RollupV2 { - public static final boolean ROLLUP_V2_FEATURE_FLAG_ENABLED = "true".equals(System.getProperty("es.rollup_v2_feature_flag_enabled")); + public static final boolean ROLLUP_V2_FEATURE_FLAG_ENABLED = Build.CURRENT.isSnapshot() + || "true".equals(System.getProperty("es.rollup_v2_feature_flag_enabled")); public static boolean isEnabled() { return ROLLUP_V2_FEATURE_FLAG_ENABLED; diff --git a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java index 1a41a4398536d..488910a9f700d 100644 --- a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java +++ b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java @@ -12,7 +12,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -23,6 +22,7 @@ import org.elasticsearch.geometry.utils.Geohash; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; +import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper.TimeSeriesIdBuilder; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileUtils; import java.io.IOException; @@ -38,8 +38,6 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; -import java.util.SortedMap; -import java.util.TreeMap; import java.util.function.LongSupplier; /** A formatter for values as returned by the fielddata/doc-values APIs. */ @@ -706,38 +704,34 @@ public BytesRef parseBytesRef(Object value) { } Map m = (Map) value; - SortedMap dimensionFields = new TreeMap<>(); + TimeSeriesIdBuilder builder = new TimeSeriesIdBuilder(); for (Map.Entry entry : m.entrySet()) { - String k = (String) entry.getKey(); + String f = entry.getKey().toString(); Object v = entry.getValue(); - BytesReference bytes; if (v instanceof String s) { - bytes = TimeSeriesIdFieldMapper.encodeTsidValue(s); + builder.addString(f, s); } else if (v instanceof Long || v instanceof Integer) { Long l = Long.valueOf(v.toString()); // For a long encoded number, we must check if the number can be the encoded value // of an unsigned_long. Number ul = (Number) UNSIGNED_LONG_SHIFTED.format(l); if (l == ul) { - bytes = TimeSeriesIdFieldMapper.encodeTsidValue(l); + builder.addLong(f, l); } else { long ll = UNSIGNED_LONG_SHIFTED.parseLong(String.valueOf(l), false, () -> 0L); - bytes = TimeSeriesIdFieldMapper.encodeTsidUnsignedLongValue(ll); + builder.addUnsignedLong(f, ll); } } else if (v instanceof BigInteger ul) { long ll = UNSIGNED_LONG_SHIFTED.parseLong(ul.toString(), false, () -> 0L); - bytes = TimeSeriesIdFieldMapper.encodeTsidUnsignedLongValue(ll); + builder.addUnsignedLong(f, ll); } else { throw new IllegalArgumentException("Unexpected value in tsid object [" + v + "]"); } - - assert bytes != null : "Could not parse fields in _tsid field [" + value + "]."; - dimensionFields.put(k, bytes); } try { - return TimeSeriesIdFieldMapper.buildTsidField(dimensionFields).toBytesRef(); + return builder.build().toBytesRef(); } catch (IOException e) { throw new IllegalArgumentException(e); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java index 8181d387a0dbb..77638ac0766f6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java @@ -14,6 +14,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree; import org.elasticsearch.search.aggregations.support.AggregationContext; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentParser; @@ -185,6 +186,19 @@ public static final class CommonFields extends ParseField.CommonFields { public static final ParseField VALUE_TYPE = new ParseField("value_type"); } + /** + * Does this aggregation support running with in a sampling context. + * + * By default, it's false for all aggregations. + * + * If the sub-classed builder supports sampling, be sure of the following that the resulting internal aggregation objects + * override the {@link InternalAggregation#finalizeSampling(SamplingContext)} and scales any values that require scaling. + * @return does this aggregation builder support sampling + */ + public boolean supportsSampling() { + return false; + } + @Override public String toString() { return Strings.toString(this); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java index ffcc971eeda7a..ce28ab0499d54 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java @@ -8,11 +8,14 @@ package org.elasticsearch.search.aggregations; import org.apache.lucene.search.Collector; +import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.search.SearchService; import org.elasticsearch.search.aggregations.timeseries.TimeSeriesIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.profile.query.CollectorResult; import org.elasticsearch.search.profile.query.InternalProfileCollector; +import org.elasticsearch.search.query.QueryPhase; import java.io.IOException; import java.util.ArrayList; @@ -40,7 +43,7 @@ public void preProcess(SearchContext context) { } if (context.aggregations().factories().context() != null && context.aggregations().factories().context().isInSortOrderExecutionRequired()) { - TimeSeriesIndexSearcher searcher = new TimeSeriesIndexSearcher(context.searcher()); + TimeSeriesIndexSearcher searcher = new TimeSeriesIndexSearcher(context.searcher(), getCancellationChecks(context)); try { searcher.search(context.rewrittenQuery(), bucketCollector); } catch (IOException e) { @@ -55,6 +58,36 @@ public void preProcess(SearchContext context) { } } + private List getCancellationChecks(SearchContext context) { + List cancellationChecks = new ArrayList<>(); + if (context.lowLevelCancellation()) { + // This searching doesn't live beyond this phase, so we don't need to remove query cancellation + cancellationChecks.add(() -> { + final SearchShardTask task = context.getTask(); + if (task != null) { + task.ensureNotCancelled(); + } + }); + } + + boolean timeoutSet = context.scrollContext() == null + && context.timeout() != null + && context.timeout().equals(SearchService.NO_TIMEOUT) == false; + + if (timeoutSet) { + final long startTime = context.getRelativeTimeInMillis(); + final long timeout = context.timeout().millis(); + final long maxTime = startTime + timeout; + cancellationChecks.add(() -> { + final long time = context.getRelativeTimeInMillis(); + if (time > maxTime) { + throw new QueryPhase.TimeExceededException(); + } + }); + } + return cancellationChecks; + } + public void execute(SearchContext context) { if (context.aggregations() == null) { context.queryResult().aggregations(null); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java index 8df70a847c347..8d38ad08035f6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java @@ -15,6 +15,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree; import org.elasticsearch.search.aggregations.support.AggregationPath; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -121,7 +122,16 @@ public InternalAggregation reducePipelines( public abstract InternalAggregation reduce(List aggregations, AggregationReduceContext reduceContext); /** - * Signal the framework if the {@linkplain InternalAggregation#reduce(List, ReduceContext)} phase needs to be called + * Called by the parent sampling context. Should only ever be called once as some aggregations scale their internal values + * @param samplingContext the current sampling context + * @return new aggregation with the sampling context applied, could be the same aggregation instance if nothing needs to be done + */ + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + throw new UnsupportedOperationException(getWriteableName() + " aggregation [" + getName() + "] does not support sampling"); + } + + /** + * Signal the framework if the {@linkplain InternalAggregation#reduce(List, AggregationReduceContext)} phase needs to be called * when there is only one {@linkplain InternalAggregation}. */ protected abstract boolean mustReduceOnSingleInternalAgg(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java index 85b6ce2c3893d..6644a5cf7fe15 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java @@ -7,7 +7,6 @@ */ package org.elasticsearch.search.aggregations; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.common.Strings; @@ -259,9 +258,4 @@ public String toString() { public PipelineAggregationBuilder rewrite(QueryRewriteContext context) throws IOException { return this; } - - @Override - public Version getMinimalSupportedVersion() { - return Version.V_EMPTY; - } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java index a99c78d7150a9..ad026ea9a194c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java @@ -318,15 +318,30 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) ); } + private Bucket createBucket(Bucket prototype, InternalAggregations aggregations, long docCount) { + return new Bucket( + format, + prototype.key, + prototype.keyed, + prototype.isIpv6, + prototype.prefixLength, + prototype.appendPrefixLength, + docCount, + aggregations + ); + } + @Override protected Bucket reduceBucket(List buckets, AggregationReduceContext context) { assert buckets.size() > 0; List aggregations = new ArrayList<>(buckets.size()); + long docCount = 0; for (InternalIpPrefix.Bucket bucket : buckets) { + docCount += bucket.docCount; aggregations.add(bucket.getAggregations()); } InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); - return createBucket(aggs, buckets.get(0)); + return createBucket(buckets.get(0), aggs, docCount); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java index 4765f00f41fdb..939e664c080e2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java @@ -70,7 +70,11 @@ private static String generateKey(BytesRef from, BytesRef to, DocValueFormat for } private static Bucket createFromStream(StreamInput in, DocValueFormat format, boolean keyed) throws IOException { - String key = in.getVersion().onOrAfter(Version.V_7_17_1) ? in.readOptionalString() : in.readString(); + // NOTE: the key is required in version == 8.0.0 and version <= 7.17.0, + // while it is optional for all subsequent versions. + String key = in.getVersion().equals(Version.V_8_0_0) ? in.readString() + : in.getVersion().onOrAfter(Version.V_7_17_1) ? in.readOptionalString() + : in.readString(); BytesRef from = in.readBoolean() ? in.readBytesRef() : null; BytesRef to = in.readBoolean() ? in.readBytesRef() : null; long docCount = in.readLong(); @@ -81,7 +85,9 @@ private static Bucket createFromStream(StreamInput in, DocValueFormat format, bo @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getVersion().onOrAfter(Version.V_7_17_1)) { + if (out.getVersion().equals(Version.V_8_0_0)) { + out.writeString(key == null ? generateKey(from, to, format) : key); + } else if (out.getVersion().onOrAfter(Version.V_7_17_1)) { out.writeOptionalString(key); } else { out.writeString(key == null ? generateKey(from, to, format) : key); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java index 9d93e0bfdafb5..21a7e8a1fc470 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java @@ -157,7 +157,11 @@ private static String generateKey(double from, double to, DocValueFormat format) @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getVersion().onOrAfter(Version.V_7_17_1)) { + // NOTE: the key is required in version == 8.0.0 and version <= 7.17.0, + // while it is optional for all subsequent versions. + if (out.getVersion().equals(Version.V_8_0_0)) { + out.writeString(key == null ? generateKey(from, to, format) : key); + } else if (out.getVersion().onOrAfter(Version.V_7_17_1)) { out.writeOptionalString(key); } else { out.writeString(key == null ? generateKey(from, to, format) : key); @@ -263,14 +267,28 @@ public InternalRange(StreamInput in) throws IOException { int size = in.readVInt(); List ranges = new ArrayList<>(size); for (int i = 0; i < size; i++) { - String key = in.getVersion().onOrAfter(Version.V_7_17_1) ? in.readOptionalString() : in.readString(); + // NOTE: the key is required in version == 8.0.0 and version <= 7.17.0, + // while it is optional for all subsequent versions. + final String key = in.getVersion().equals(Version.V_8_0_0) ? in.readString() + : in.getVersion().onOrAfter(Version.V_7_17_1) ? in.readOptionalString() + : in.readString(); double from = in.readDouble(); if (in.getVersion().onOrAfter(Version.V_7_17_0)) { - in.readOptionalDouble(); + final Double originalFrom = in.readOptionalDouble(); + if (originalFrom != null) { + from = originalFrom; + } else { + from = Double.NEGATIVE_INFINITY; + } } double to = in.readDouble(); if (in.getVersion().onOrAfter(Version.V_7_17_0)) { - in.readOptionalDouble(); + final Double originalTo = in.readOptionalDouble(); + if (originalTo != null) { + to = originalTo; + } else { + to = Double.POSITIVE_INFINITY; + } } long docCount = in.readVLong(); InternalAggregations aggregations = InternalAggregations.readFrom(in); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/InternalRandomSampler.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/InternalRandomSampler.java index 15547dd2a9c8b..326a5fe29d2b2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/InternalRandomSampler.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/InternalRandomSampler.java @@ -10,23 +10,38 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.aggregations.AggregationReduceContext; +import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregation; import org.elasticsearch.search.aggregations.bucket.sampler.Sampler; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import java.util.Map; +import java.util.stream.Collectors; public class InternalRandomSampler extends InternalSingleBucketAggregation implements Sampler { public static final String NAME = "mapped_random_sampler"; public static final String PARSER_NAME = "random_sampler"; private final int seed; + private final double probability; - InternalRandomSampler(String name, long docCount, int seed, InternalAggregations subAggregations, Map metadata) { + InternalRandomSampler( + String name, + long docCount, + int seed, + double probability, + InternalAggregations subAggregations, + Map metadata + ) { super(name, docCount, subAggregations, metadata); this.seed = seed; + this.probability = probability; } /** @@ -35,12 +50,14 @@ public class InternalRandomSampler extends InternalSingleBucketAggregation imple public InternalRandomSampler(StreamInput in) throws IOException { super(in); this.seed = in.readInt(); + this.probability = in.readDouble(); } @Override protected void doWriteTo(StreamOutput out) throws IOException { super.doWriteTo(out); out.writeInt(seed); + out.writeDouble(probability); } @Override @@ -55,12 +72,36 @@ public String getType() { @Override protected InternalSingleBucketAggregation newAggregation(String name, long docCount, InternalAggregations subAggregations) { - return new InternalRandomSampler(name, docCount, seed, subAggregations, metadata); + return new InternalRandomSampler(name, docCount, seed, probability, subAggregations, metadata); + } + + @Override + public InternalAggregation reduce(List aggregations, AggregationReduceContext reduceContext) { + long docCount = 0L; + List subAggregationsList = new ArrayList<>(aggregations.size()); + for (InternalAggregation aggregation : aggregations) { + docCount += ((InternalSingleBucketAggregation) aggregation).getDocCount(); + subAggregationsList.add(((InternalSingleBucketAggregation) aggregation).getAggregations()); + } + InternalAggregations aggs = InternalAggregations.reduce(subAggregationsList, reduceContext); + if (reduceContext.isFinalReduce() && aggs != null) { + SamplingContext context = buildContext(); + aggs = InternalAggregations.from( + aggs.asList().stream().map(agg -> ((InternalAggregation) agg).finalizeSampling(context)).collect(Collectors.toList()) + ); + } + + return newAggregation(getName(), docCount, aggs); + } + + public SamplingContext buildContext() { + return new SamplingContext(probability, seed); } @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.field(RandomSamplerAggregationBuilder.SEED.getPreferredName(), seed); + builder.field(RandomSamplerAggregationBuilder.PROBABILITY.getPreferredName(), probability); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); getAggregations().toXContentInternal(builder, params); return builder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregationBuilder.java index f6e7903c52222..fa890c39b61f5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregationBuilder.java @@ -16,10 +16,6 @@ import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; -import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -78,6 +74,10 @@ public RandomSamplerAggregationBuilder(StreamInput in) throws IOException { this.seed = in.readInt(); } + public double getProbability() { + return p; + } + protected RandomSamplerAggregationBuilder( RandomSamplerAggregationBuilder clone, AggregatorFactories.Builder factoriesBuilder, @@ -118,10 +118,7 @@ protected AggregatorFactory doBuild( } recursivelyCheckSubAggs(subfactoriesBuilder.getAggregatorFactories(), builder -> { // TODO add a method or interface to aggregation builder that defaults to false - if (builder instanceof CardinalityAggregationBuilder - || builder instanceof NestedAggregationBuilder - || builder instanceof SamplerAggregationBuilder - || builder instanceof DiversifiedAggregationBuilder) { + if (builder.supportsSampling() == false) { throw new IllegalArgumentException( "[random_sampler] aggregation [" + getName() @@ -136,6 +133,10 @@ protected AggregatorFactory doBuild( return new RandomSamplerAggregatorFactory(name, seed, p, context, parent, subfactoriesBuilder, metadata); } + public int getSeed() { + return seed; + } + @Override protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java index 2c28d96648678..366a06f55ed61 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java @@ -29,11 +29,13 @@ public class RandomSamplerAggregator extends BucketsAggregator implements SingleBucketAggregator { private final int seed; + private final double probability; private final CheckedSupplier weightSupplier; RandomSamplerAggregator( String name, int seed, + double probability, CheckedSupplier weightSupplier, AggregatorFactories factories, AggregationContext context, @@ -43,6 +45,7 @@ public class RandomSamplerAggregator extends BucketsAggregator implements Single ) throws IOException { super(name, factories, context, parent, cardinalityUpperBound, metadata); this.seed = seed; + this.probability = probability; if (this.subAggregators().length == 0) { throw new IllegalArgumentException( RandomSamplerAggregationBuilder.NAME + " aggregation [" + name + "] must have sub aggregations configured" @@ -59,6 +62,7 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I name, bucketDocCount(owningBucketOrd), seed, + probability, subAggregationResults, metadata() ) @@ -67,7 +71,7 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I @Override public InternalAggregation buildEmptyAggregation() { - return new InternalRandomSampler(name, 0, seed, buildEmptySubAggregations(), metadata()); + return new InternalRandomSampler(name, 0, seed, probability, buildEmptySubAggregations(), metadata()); } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java index 0776ad1f91698..1e5990b6bab57 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java @@ -44,7 +44,7 @@ public class RandomSamplerAggregatorFactory extends AggregatorFactory { @Override public Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) throws IOException { - return new RandomSamplerAggregator(name, seed, this::getWeight, factories, context, parent, cardinality, metadata); + return new RandomSamplerAggregator(name, seed, probability, this::getWeight, factories, context, parent, cardinality, metadata); } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalHDRPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalHDRPercentiles.java index 15f703c9e71c4..48c6a85bd60cf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalHDRPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalHDRPercentiles.java @@ -14,6 +14,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -133,6 +134,11 @@ public AbstractInternalHDRPercentiles reduce(List aggregati return createReduced(getName(), keys, merged, keyed, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + protected abstract AbstractInternalHDRPercentiles createReduced( String name, double[] keys, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalTDigestPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalTDigestPercentiles.java index 551e36f057c11..342d6cabedb08 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalTDigestPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalTDigestPercentiles.java @@ -13,6 +13,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -116,6 +117,11 @@ public AbstractInternalTDigestPercentiles reduce(List aggre return createReduced(getName(), keys, merged, keyed, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + protected abstract AbstractInternalTDigestPercentiles createReduced( String name, double[] keys, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java index 1e5a710d3b560..6acab88ca2558 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java @@ -157,6 +157,11 @@ public static > ConstructingO } } + @Override + public boolean supportsSampling() { + return true; + } + @Override protected void innerWriteTo(StreamOutput out) throws IOException { out.writeDoubleArray(values); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregationBuilder.java index b31c9b443bf8b..bbabe974bad20 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregationBuilder.java @@ -65,6 +65,11 @@ public AvgAggregationBuilder(StreamInput in) throws IOException { super(in); } + @Override + public boolean supportsSampling() { + return true; + } + @Override protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBuilder, Map metadata) { return new AvgAggregationBuilder(this, factoriesBuilder, metadata); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java index 6653151f03590..33cc4052c34e9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java @@ -77,6 +77,11 @@ public ExtendedStatsAggregationBuilder(StreamInput in) throws IOException { sigma = in.readDouble(); } + @Override + public boolean supportsSampling() { + return true; + } + @Override public Set metricNames() { return InternalExtendedStats.METRIC_NAMES; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java index 621e6d4d59649..3201426cc4f41 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java @@ -67,6 +67,11 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu return new GeoBoundsAggregationBuilder(this, factoriesBuilder, metadata); } + @Override + public boolean supportsSampling() { + return true; + } + /** * Read from a stream. */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java index a0e62eb3b8fb6..1d5f7fbb761e6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java @@ -77,6 +77,11 @@ public GeoCentroidAggregationBuilder(StreamInput in) throws IOException { super(in); } + @Override + public boolean supportsSampling() { + return true; + } + @Override protected void innerWriteTo(StreamOutput out) { // Do nothing, no extra state to write to stream diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalAvg.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalAvg.java index 9d74fc0b99612..fd706e92f19a5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalAvg.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalAvg.java @@ -12,6 +12,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -88,6 +89,11 @@ public InternalAvg reduce(List aggregations, AggregationRed return new InternalAvg(getName(), kahanSummation.value(), count, format, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.field(CommonFields.VALUE.getPreferredName(), count != 0 ? getValue() : null); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java index 393149239cbab..ba3c36c7c386e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java @@ -12,6 +12,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -273,6 +274,21 @@ public InternalExtendedStats reduce(List aggregations, Aggr ); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return new InternalExtendedStats( + name, + samplingContext.inverseScale(count), + samplingContext.inverseScale(sum), + min, + max, + samplingContext.inverseScale(sumOfSqrs), + sigma, + format, + getMetadata() + ); + } + static class Fields { public static final String SUM_OF_SQRS = "sum_of_squares"; public static final String SUM_OF_SQRS_AS_STRING = "sum_of_squares_as_string"; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBounds.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBounds.java index b2d1bb4198c5d..113ac7a562c0c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBounds.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBounds.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -115,6 +116,11 @@ public InternalAggregation reduce(List aggregations, Aggreg return new InternalGeoBounds(name, top, bottom, posLeft, posRight, negLeft, negRight, wrapLongitude, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + @Override protected boolean mustReduceOnSingleInternalAgg() { return false; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java index 4fc6ddad9a532..d4a3249177d61 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -124,6 +125,11 @@ public InternalGeoCentroid reduce(List aggregations, Aggreg return new InternalGeoCentroid(name, result, totalCount, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return new InternalGeoCentroid(name, centroid, samplingContext.inverseScale(count), getMetadata()); + } + @Override protected boolean mustReduceOnSingleInternalAgg() { return false; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMax.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMax.java index 50dde6105b3fc..6dec5d6603795 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMax.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMax.java @@ -12,6 +12,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -67,6 +68,11 @@ public InternalMax reduce(List aggregations, AggregationRed return new InternalMax(name, max, format, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { boolean hasValue = Double.isInfinite(max) == false; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java index 04084bcd435ac..0d5f3b737b9f1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java @@ -13,6 +13,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -74,6 +75,11 @@ public InternalAggregation reduce(List aggregations, Aggreg return new InternalMedianAbsoluteDeviation(name, metadata, format, valueMerged); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { final boolean anyResults = valuesSketch.size() > 0; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMin.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMin.java index 768c45902d70d..bb0f0ecd7750a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMin.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMin.java @@ -12,6 +12,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -67,6 +68,11 @@ public InternalMin reduce(List aggregations, AggregationRed return new InternalMin(getName(), min, this.format, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { boolean hasValue = Double.isInfinite(min) == false; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java index 55fc00873de41..7c1c5e5dcafab 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java @@ -16,6 +16,7 @@ import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -120,6 +121,11 @@ public InternalAggregation reduce(List aggregations, Aggreg return new InternalScriptedMetric(firstAggregation.getName(), aggregation, firstAggregation.reduceScript, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + @Override protected boolean mustReduceOnSingleInternalAgg() { return true; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalStats.java index 97298df7b3785..441b06de21371 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalStats.java @@ -12,6 +12,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -173,6 +174,19 @@ public InternalStats reduce(List aggregations, AggregationR return new InternalStats(name, count, kahanSummation.value(), min, max, format, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return new InternalStats( + name, + samplingContext.inverseScale(count), + samplingContext.inverseScale(sum), + min, + max, + format, + getMetadata() + ); + } + static class Fields { public static final String COUNT = "count"; public static final String MIN = "min"; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java index 87ab160025ebe..b87df7c87ee27 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java @@ -21,6 +21,7 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -169,6 +170,11 @@ public InternalAggregation reduce(List aggregations, Aggreg ); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + @Override protected boolean mustReduceOnSingleInternalAgg() { return true; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalValueCount.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalValueCount.java index f453243ab29ee..c6ab257ac0fba 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalValueCount.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalValueCount.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -66,6 +67,11 @@ public InternalAggregation reduce(List aggregations, Aggreg return new InternalValueCount(name, valueCount, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return new InternalValueCount(name, samplingContext.inverseScale(value), getMetadata()); + } + @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.field(CommonFields.VALUE.getPreferredName(), value); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvg.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvg.java index bc772cf8efd57..261bfc5a47ad8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvg.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvg.java @@ -12,6 +12,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -90,6 +91,11 @@ public InternalWeightedAvg reduce(List aggregations, Aggreg return new InternalWeightedAvg(getName(), sumCompensation.value(), weightCompensation.value(), format, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.field(CommonFields.VALUE.getPreferredName(), weight != 0 ? getValue() : null); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregationBuilder.java index adcd5d85738d6..ccbf7e69c1f8d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregationBuilder.java @@ -74,6 +74,11 @@ public MaxAggregationBuilder(StreamInput in) throws IOException { super(in); } + @Override + public boolean supportsSampling() { + return true; + } + @Override protected void innerWriteTo(StreamOutput out) { // Do nothing, no extra state to write to stream diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java index 32af21ba7b881..a63c7bde037a8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java @@ -99,6 +99,11 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu return new MedianAbsoluteDeviationAggregationBuilder(this, factoriesBuilder, metadata); } + @Override + public boolean supportsSampling() { + return true; + } + @Override protected ValuesSourceType defaultValueSourceType() { return CoreValuesSourceType.NUMERIC; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilder.java index df76bb0fee2f9..c72685d6302ae 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilder.java @@ -79,6 +79,11 @@ protected void innerWriteTo(StreamOutput out) { // Do nothing, no extra state to write to stream } + @Override + public boolean supportsSampling() { + return true; + } + @Override protected MinAggregatorFactory innerBuild( AggregationContext context, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java index 0895672584da7..daa7259a47fac 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java @@ -110,6 +110,11 @@ protected void doWriteTo(StreamOutput out) throws IOException { } } + @Override + public boolean supportsSampling() { + return true; + } + /** * Set the {@code init} script. */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregationBuilder.java index 6910e10aa89c6..d3306bf732def 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregationBuilder.java @@ -71,6 +71,11 @@ public StatsAggregationBuilder(StreamInput in) throws IOException { super(in); } + @Override + public boolean supportsSampling() { + return true; + } + @Override public Set metricNames() { return InternalStats.METRIC_NAMES; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Sum.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Sum.java index 3f9f9b5ca46a8..93557168407f2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Sum.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Sum.java @@ -12,6 +12,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -65,6 +66,11 @@ public Sum reduce(List aggregations, AggregationReduceConte return new Sum(name, kahanSummation.value(), format, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return new Sum(name, samplingContext.inverseScale(sum), format, getMetadata()); + } + @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.field(CommonFields.VALUE.getPreferredName(), sum); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregationBuilder.java index d9d938a116aaa..d00ce1024a261 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregationBuilder.java @@ -62,6 +62,11 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu return new SumAggregationBuilder(this, factoriesBuilder, metadata); } + @Override + public boolean supportsSampling() { + return true; + } + /** * Read from a stream. */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java index 69bb5d53220f8..1f341e8fb8f76 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java @@ -596,6 +596,11 @@ public TopHitsAggregationBuilder subAggregations(Builder subFactories) { ); } + @Override + public boolean supportsSampling() { + return true; + } + @Override public BucketCardinality bucketCardinality() { return BucketCardinality.NONE; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregationBuilder.java index 9f1e9b628ccbe..f391c808eb2aa 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregationBuilder.java @@ -70,6 +70,11 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu return new ValueCountAggregationBuilder(this, factoriesBuilder, metadata); } + @Override + public boolean supportsSampling() { + return true; + } + /** * Read from a stream. */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregationBuilder.java index 8b559dcc89b97..fb426be5b05b5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregationBuilder.java @@ -86,6 +86,11 @@ protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map cancellations; - public TimeSeriesIndexSearcher(IndexSearcher searcher) { + public TimeSeriesIndexSearcher(IndexSearcher searcher, List cancellations) { this.searcher = searcher; + this.cancellations = cancellations; } public void search(Query query, BucketCollector bucketCollector) throws IOException { + int seen = 0; query = searcher.rewrite(query); Weight weight = searcher.createWeight(query, bucketCollector.scoreMode(), 1); // Create LeafWalker for each subreader List leafWalkers = new ArrayList<>(); for (LeafReaderContext leaf : searcher.getIndexReader().leaves()) { + if (++seen % CHECK_CANCELLED_SCORER_INTERVAL == 0) { + checkCancelled(); + } LeafBucketCollector leafCollector = bucketCollector.getLeafCollector(leaf); Scorer scorer = weight.scorer(leaf); if (scorer != null) { @@ -75,6 +83,9 @@ protected boolean lessThan(LeafWalker a, LeafWalker b) { // walkers are ordered by timestamp. while (populateQueue(leafWalkers, queue)) { do { + if (++seen % CHECK_CANCELLED_SCORER_INTERVAL == 0) { + checkCancelled(); + } LeafWalker walker = queue.top(); walker.collectCurrent(); if (walker.nextDoc() == DocIdSetIterator.NO_MORE_DOCS || walker.shouldPop()) { @@ -99,7 +110,7 @@ private boolean populateQueue(List leafWalkers, PriorityQueue queue, BytesRef tsid) return true; } + private void checkCancelled() { + for (Runnable r : cancellations) { + r.run(); + } + } + private static class LeafWalker { private final LeafCollector collector; private final Bits liveDocs; private final DocIdSetIterator iterator; private final SortedDocValues tsids; private final SortedNumericDocValues timestamps; // TODO can we have this just a NumericDocValues? + private final BytesRefBuilder scratch = new BytesRefBuilder(); int docId = -1; int tsidOrd; long timestamp; @@ -168,6 +186,11 @@ int nextDoc() throws IOException { return docId; } + BytesRef getTsid() throws IOException { + scratch.copyBytes(tsids.lookupOrd(tsids.ordValue())); + return scratch.get(); + } + // invalid if the doc is deleted or if it doesn't have a tsid or timestamp entry private boolean isInvalidDoc(int docId) throws IOException { return (liveDocs != null && liveDocs.get(docId) == false) diff --git a/server/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java b/server/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java index c80276696c63f..7b3eb0120c271 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.search.lookup; +import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.ElasticsearchParseException; @@ -15,6 +16,7 @@ import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.MemoizedSupplier; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.fieldvisitor.FieldsVisitor; @@ -26,13 +28,14 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Supplier; import static java.util.Collections.emptyMap; public class SourceLookup implements Map { private LeafReader reader; - CheckedBiConsumer fieldReader; + private CheckedBiConsumer fieldReader; private int docId = -1; @@ -104,19 +107,23 @@ public static Map sourceAsMap(BytesReference source) throws Elas } public void setSegmentAndDocument(LeafReaderContext context, int docId) { + // if we are called with the same document, don't invalidate source if (this.reader == context.reader() && this.docId == docId) { - // if we are called with the same document, don't invalidate source return; } + + // only reset reader and fieldReader when reader changes if (this.reader != context.reader()) { this.reader = context.reader(); - // only reset reader and fieldReader when reader changes + + // All the docs to fetch are adjacent but Lucene stored fields are optimized + // for random access and don't optimize for sequential access - except for merging. + // So we do a little hack here and pretend we're going to do merges in order to + // get better sequential access. if (context.reader()instanceof SequentialStoredFieldsLeafReader lf) { - // All the docs to fetch are adjacent but Lucene stored fields are optimized - // for random access and don't optimize for sequential access - except for merging. - // So we do a little hack here and pretend we're going to do merges in order to - // get better sequential access. - fieldReader = lf.getSequentialStoredFieldsReader()::visitDocument; + // Avoid eagerly loading the stored fields reader, since this can be expensive + Supplier supplier = new MemoizedSupplier<>(lf::getSequentialStoredFieldsReader); + fieldReader = (d, v) -> supplier.get().visitDocument(d, v); } else { fieldReader = context.reader()::document; } diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 0b72df78a510f..937378719ff81 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -267,5 +267,5 @@ private static boolean canEarlyTerminate(IndexReader reader, SortAndFormats sort return true; } - static class TimeExceededException extends RuntimeException {} + public static class TimeExceededException extends RuntimeException {} } diff --git a/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java new file mode 100644 index 0000000000000..d744dc215b3ac --- /dev/null +++ b/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java @@ -0,0 +1,103 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.snapshots; + +import org.elasticsearch.cluster.metadata.RepositoriesMetadata; +import org.elasticsearch.cluster.metadata.RepositoryMetadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.health.HealthIndicatorDetails; +import org.elasticsearch.health.HealthIndicatorResult; +import org.elasticsearch.health.HealthIndicatorService; +import org.elasticsearch.health.SimpleHealthIndicatorDetails; +import org.elasticsearch.repositories.RepositoryData; + +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.common.Strings.collectionToDelimitedStringWithLimit; +import static org.elasticsearch.common.util.CollectionUtils.limitSize; +import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.RED; +import static org.elasticsearch.health.ServerHealthComponents.SNAPSHOT; + +/** + * This indicator reports health for snapshot repositories. + * + * Indicator will report RED status when any of snapshot repositories is marked as corrupted. + * Data might not be backed up in such cases. + * + * Corrupted repository most likely need to be manually cleaned and a new snapshot needs to be created from scratch. + */ +public class RepositoryIntegrityHealthIndicatorService implements HealthIndicatorService { + + public static final String NAME = "repository_integrity"; + + private final ClusterService clusterService; + + public RepositoryIntegrityHealthIndicatorService(ClusterService clusterService) { + this.clusterService = clusterService; + } + + @Override + public String name() { + return NAME; + } + + @Override + public String component() { + return SNAPSHOT; + } + + @Override + public HealthIndicatorResult calculate() { + var snapshotMetadata = clusterService.state().metadata().custom(RepositoriesMetadata.TYPE, RepositoriesMetadata.EMPTY); + + if (snapshotMetadata.repositories().isEmpty()) { + return createIndicator(GREEN, "No repositories configured.", HealthIndicatorDetails.EMPTY); + } + + var corrupted = snapshotMetadata.repositories() + .stream() + .filter(repository -> repository.generation() == RepositoryData.CORRUPTED_REPO_GEN) + .map(RepositoryMetadata::name) + .toList(); + + var totalRepositories = snapshotMetadata.repositories().size(); + var corruptedRepositories = corrupted.size(); + + if (corrupted.isEmpty()) { + return createIndicator( + GREEN, + "No corrupted repositories.", + new SimpleHealthIndicatorDetails(Map.of("total_repositories", totalRepositories)) + ); + } + + return createIndicator( + RED, + createCorruptedRepositorySummary(corrupted), + new SimpleHealthIndicatorDetails( + Map.of( + "total_repositories", + totalRepositories, + "corrupted_repositories", + corruptedRepositories, + "corrupted", + limitSize(corrupted, 10) + ) + ) + ); + } + + private static String createCorruptedRepositorySummary(List corrupted) { + var message = new StringBuilder().append("Detected [").append(corrupted.size()).append("] corrupted repositories: "); + collectionToDelimitedStringWithLimit(corrupted, ",", "[", "].", 1024, message); + return message.toString(); + } +} diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index b9e600e228de5..de137cde1f331 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.StepListener; @@ -978,7 +977,8 @@ static void validateSnapshotRestorable(RestoreSnapshotRequest request, Repositor + "]" ); } - if (skipVersionChecks(repository) == false && snapshotInfo.version().before(Version.CURRENT.minimumIndexCompatibilityVersion())) { + if (ALLOW_BWC_INDICES_SETTING.get(repository.settings()) == false + && snapshotInfo.version().before(Version.CURRENT.minimumIndexCompatibilityVersion())) { throw new SnapshotRestoreException( new Snapshot(repository.name(), snapshotInfo.snapshotId()), "the snapshot was created with Elasticsearch version [" @@ -1002,19 +1002,6 @@ static void validateSnapshotRestorable(RestoreSnapshotRequest request, Repositor Setting.Property.NodeScope ); - private static boolean skipVersionChecks(RepositoryMetadata repositoryMetadata) { - if (Build.CURRENT.isSnapshot()) { - return ALLOW_BWC_INDICES_SETTING.get(repositoryMetadata.settings()); - } else { - if (ALLOW_BWC_INDICES_SETTING.exists(repositoryMetadata.settings())) { - throw new IllegalArgumentException( - "Repository setting [" + ALLOW_BWC_INDICES_SETTING.getKey() + "] only allowed in release builds" - ); - } - return false; - } - } - public static boolean failed(SnapshotInfo snapshot, String index) { for (SnapshotShardFailure failure : snapshot.shardFailures()) { if (index.equals(failure.index())) { @@ -1289,9 +1276,11 @@ public ClusterState execute(ClusterState currentState) { final String localNodeId = clusterService.state().nodes().getLocalNodeId(); for (Map.Entry indexEntry : indicesToRestore.entrySet()) { final IndexId index = indexEntry.getValue(); + final IndexMetadata originalIndexMetadata = metadata.index(index.getName()); + repositoriesService.getPreRestoreChecks().forEach(check -> check.accept(originalIndexMetadata)); IndexMetadata snapshotIndexMetadata = updateIndexSettings( snapshot, - metadata.index(index.getName()), + originalIndexMetadata, request.indexSettings(), request.ignoreIndexSettings() ); @@ -1591,39 +1580,40 @@ private IndexMetadata convertLegacyIndex(IndexMetadata snapshotIndexMetadata, Cl if (snapshotIndexMetadata.getCreationVersion().before(Version.fromString("5.0.0"))) { throw new IllegalArgumentException("can't restore an index created before version 5.0.0"); } + IndexMetadata.Builder convertedIndexMetadata = IndexMetadata.builder(snapshotIndexMetadata); MappingMetadata mappingMetadata = snapshotIndexMetadata.mapping(); - Map loadedMappingSource = mappingMetadata.rawSourceAsMap(); - - // store old mapping under _meta/legacy_mappings - Map legacyMapping = new LinkedHashMap<>(); - boolean sourceOnlySnapshot = snapshotIndexMetadata.getSettings().getAsBoolean("index.source_only", false); - if (sourceOnlySnapshot) { - // actual mapping is under "_meta" (but strip type first) - Object sourceOnlyMeta = mappingMetadata.sourceAsMap().get("_meta"); - if (sourceOnlyMeta instanceof Map sourceOnlyMetaMap) { - legacyMapping.put("legacy_mappings", sourceOnlyMetaMap); + if (mappingMetadata != null) { + Map loadedMappingSource = mappingMetadata.rawSourceAsMap(); + + // store old mapping under _meta/legacy_mappings + Map legacyMapping = new LinkedHashMap<>(); + boolean sourceOnlySnapshot = snapshotIndexMetadata.getSettings().getAsBoolean("index.source_only", false); + if (sourceOnlySnapshot) { + // actual mapping is under "_meta" (but strip type first) + Object sourceOnlyMeta = mappingMetadata.sourceAsMap().get("_meta"); + if (sourceOnlyMeta instanceof Map sourceOnlyMetaMap) { + legacyMapping.put("legacy_mappings", sourceOnlyMetaMap); + } + } else { + legacyMapping.put("legacy_mappings", loadedMappingSource); } - } else { - legacyMapping.put("legacy_mappings", loadedMappingSource); - } - Map newMappingSource = new LinkedHashMap<>(); - newMappingSource.put("_meta", legacyMapping); + Map newMappingSource = new LinkedHashMap<>(); + newMappingSource.put("_meta", legacyMapping); - Map newMapping = new LinkedHashMap<>(); - newMapping.put(mappingMetadata.type(), newMappingSource); + Map newMapping = new LinkedHashMap<>(); + newMapping.put(mappingMetadata.type(), newMappingSource); + + convertedIndexMetadata.putMapping(new MappingMetadata(mappingMetadata.type(), newMapping)); + } + + convertedIndexMetadata.settings( + Settings.builder() + .put(snapshotIndexMetadata.getSettings()) + .put(IndexMetadata.SETTING_INDEX_VERSION_COMPATIBILITY.getKey(), clusterState.getNodes().getSmallestNonClientNodeVersion()) + ); // TODO: _routing? Perhaps we don't need to obey any routing here as stuff is read-only anyway and get API will be disabled - return IndexMetadata.builder(snapshotIndexMetadata) - .putMapping(new MappingMetadata(mappingMetadata.type(), newMapping)) - .settings( - Settings.builder() - .put(snapshotIndexMetadata.getSettings()) - .put( - IndexMetadata.SETTING_INDEX_VERSION_COMPATIBILITY.getKey(), - clusterState.getNodes().getSmallestNonClientNodeVersion() - ) - ) - .build(); + return convertedIndexMetadata.build(); } private static IndexMetadata.Builder restoreToCreateNewIndex(IndexMetadata snapshotIndexMetadata, String renamedIndexName) { diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index 646df885cb48c..4223a4239c3a3 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -82,8 +82,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements private final Map> shardSnapshots = new HashMap<>(); // A map of snapshots to the shardIds that we already reported to the master as failed - private final ResultDeduplicator remoteFailedRequestDeduplicator = - new ResultDeduplicator<>(); + private final ResultDeduplicator remoteFailedRequestDeduplicator; public SnapshotShardsService( Settings settings, @@ -97,6 +96,7 @@ public SnapshotShardsService( this.transportService = transportService; this.clusterService = clusterService; this.threadPool = transportService.getThreadPool(); + this.remoteFailedRequestDeduplicator = new ResultDeduplicator<>(threadPool.getThreadContext()); if (DiscoveryNode.canContainData(settings)) { // this is only useful on the nodes that can hold data clusterService.addListener(this); diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 77885fb624e4c..70da66185f370 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterChangedEvent; @@ -2288,7 +2289,8 @@ private static boolean isWritingToRepository(SnapshotsInProgress.Entry entry) { } private void addDeleteListener(String deleteUUID, ActionListener listener) { - snapshotDeletionListeners.computeIfAbsent(deleteUUID, k -> new CopyOnWriteArrayList<>()).add(listener); + snapshotDeletionListeners.computeIfAbsent(deleteUUID, k -> new CopyOnWriteArrayList<>()) + .add(ContextPreservingActionListener.wrapPreservingContext(listener, threadPool.getThreadContext())); } /** @@ -2957,7 +2959,8 @@ static Map filterDataStreamAliases( * @param listener listener */ private void addListener(Snapshot snapshot, ActionListener> listener) { - snapshotCompletionListeners.computeIfAbsent(snapshot, k -> new CopyOnWriteArrayList<>()).add(listener); + snapshotCompletionListeners.computeIfAbsent(snapshot, k -> new CopyOnWriteArrayList<>()) + .add(ContextPreservingActionListener.wrapPreservingContext(listener, threadPool.getThreadContext())); } @Override @@ -3010,11 +3013,13 @@ public boolean assertAllListenersResolved() { * * Package private to allow for tests. */ - static final ClusterStateTaskExecutor SHARD_STATE_EXECUTOR = ( - currentState, - tasks) -> ClusterStateTaskExecutor.ClusterTasksResult.builder() - .successes(tasks) - .build(new SnapshotShardsUpdateContext(currentState, tasks).computeUpdatedState()); + static final ClusterStateTaskExecutor SHARD_STATE_EXECUTOR = (currentState, tasks) -> { + final var builder = ClusterStateTaskExecutor.ClusterTasksResult.builder(); + for (var task : tasks) { + builder.success(task, new ClusterStateTaskExecutor.LegacyClusterTaskResultActionListener(task, currentState)); + } + return builder.build(new SnapshotShardsUpdateContext(currentState, tasks).computeUpdatedState()); + }; private static boolean isQueued(@Nullable ShardSnapshotStatus status) { return status != null && status.state() == ShardState.QUEUED; diff --git a/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java b/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java index b318d485317b7..9010a9d99d3c4 100644 --- a/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java +++ b/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java @@ -12,6 +12,7 @@ import org.elasticsearch.core.Nullable; import java.util.Map; +import java.util.concurrent.ConcurrentLinkedQueue; /** * A task that can be cancelled @@ -20,6 +21,7 @@ public class CancellableTask extends Task { private volatile String reason; private volatile boolean isCancelled; + private final ConcurrentLinkedQueue listeners = new ConcurrentLinkedQueue<>(); public CancellableTask(long id, String type, String action, String description, TaskId parentTaskId, Map headers) { super(id, type, action, description, parentTaskId, headers); @@ -37,6 +39,7 @@ final void cancel(String reason) { this.isCancelled = true; this.reason = reason; } + listeners.forEach(CancellationListener::onCancelled); onCancelled(); } @@ -67,6 +70,20 @@ public final String getReasonCancelled() { return reason; } + /** + * This method adds a listener that needs to be notified if this task is cancelled. + */ + public final void addListener(CancellationListener listener) { + synchronized (this) { + if (this.isCancelled == false) { + listeners.add(listener); + } + } + if (isCancelled) { + listener.onCancelled(); + } + } + /** * Called after the task is cancelled so that it can take any actions that it has to take. */ @@ -103,4 +120,11 @@ private TaskCancelledException getTaskCancelledException() { assert reason != null; return new TaskCancelledException("task cancelled [" + reason + ']'); } + + /** + * This interface is implemented by any class that needs to react to the cancellation of this task. + */ + public interface CancellationListener { + void onCancelled(); + } } diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java b/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java index bd6078ec558e5..cd5bbd56a315a 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java @@ -44,11 +44,12 @@ public class TaskCancellationService { private static final Logger logger = LogManager.getLogger(TaskCancellationService.class); private final TransportService transportService; private final TaskManager taskManager; - private final ResultDeduplicator deduplicator = new ResultDeduplicator<>(); + private final ResultDeduplicator deduplicator; public TaskCancellationService(TransportService transportService) { this.transportService = transportService; this.taskManager = transportService.getTaskManager(); + this.deduplicator = new ResultDeduplicator<>(transportService.getThreadPool().getThreadContext()); transportService.registerRequestHandler( BAN_PARENT_ACTION_NAME, ThreadPool.Names.SAME, diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java index ef04ad960e607..82677663b01c0 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java @@ -187,6 +187,19 @@ protected NodeResponse nodeOperation(CancellableNodeRequest request, Task task) } } + /** + * Simulates a cancellation listener and sets a flag to true if the task was cancelled + */ + static class CancellableTestCancellationListener implements CancellableTask.CancellationListener { + + final AtomicBoolean calledUponCancellation = new AtomicBoolean(false); + + @Override + public void onCancelled() { + calledUponCancellation.set(true); + } + } + private Task startCancellableTestNodesAction( boolean waitForActionToStart, int runNodesCount, @@ -252,6 +265,7 @@ public void testBasicTaskCancellation() throws Exception { setupTestNodes(Settings.EMPTY); connectNodes(testNodes); CountDownLatch responseLatch = new CountDownLatch(1); + AtomicBoolean listenerCalledUponCancellation = new AtomicBoolean(false); boolean waitForActionToStart = randomBoolean(); logger.info("waitForActionToStart is set to {}", waitForActionToStart); final AtomicReference responseReference = new AtomicReference<>(); @@ -260,24 +274,23 @@ public void testBasicTaskCancellation() throws Exception { // Block at least 1 node, otherwise it's quite easy to end up in a race condition where the node tasks // have finished before the cancel request has arrived int blockedNodesCount = randomIntBetween(1, runNodesCount); - Task mainTask = startCancellableTestNodesAction( - waitForActionToStart, - runNodesCount, - blockedNodesCount, - new ActionListener() { - @Override - public void onResponse(NodesResponse listTasksResponse) { - responseReference.set(listTasksResponse); - responseLatch.countDown(); - } + Task mainTask = startCancellableTestNodesAction(waitForActionToStart, runNodesCount, blockedNodesCount, new ActionListener<>() { + @Override + public void onResponse(NodesResponse listTasksResponse) { + responseReference.set(listTasksResponse); + responseLatch.countDown(); + } - @Override - public void onFailure(Exception e) { - throwableReference.set(e); - responseLatch.countDown(); - } + @Override + public void onFailure(Exception e) { + throwableReference.set(e); + responseLatch.countDown(); } - ); + }); + + assert mainTask instanceof CancellableTask; + CancellableTestCancellationListener listenerAddedBeforeCancellation = new CancellableTestCancellationListener(); + ((CancellableTask) mainTask).addListener(listenerAddedBeforeCancellation); // Cancel main task CancelTasksRequest request = new CancelTasksRequest(); @@ -311,6 +324,13 @@ public void onFailure(Exception e) { for (TaskInfo taskInfo : response.getTasks()) { assertTrue(taskInfo.cancellable()); } + + CancellableTestCancellationListener listenerAddedAfterCancellation = new CancellableTestCancellationListener(); + ((CancellableTask) mainTask).addListener(listenerAddedAfterCancellation); + + // Verify both cancellation listeners have been notified + assertTrue(listenerAddedBeforeCancellation.calledUponCancellation.get()); + assertTrue(listenerAddedAfterCancellation.calledUponCancellation.get()); } // Make sure that tasks are no longer running @@ -337,7 +357,7 @@ public void testChildTasksCancellation() throws Exception { final AtomicReference throwableReference = new AtomicReference<>(); int runNodesCount = randomIntBetween(1, nodesCount); int blockedNodesCount = randomIntBetween(0, runNodesCount); - Task mainTask = startCancellableTestNodesAction(true, runNodesCount, blockedNodesCount, new ActionListener() { + Task mainTask = startCancellableTestNodesAction(true, runNodesCount, blockedNodesCount, new ActionListener<>() { @Override public void onResponse(NodesResponse listTasksResponse) { responseReference.set(listTasksResponse); diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFilterTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFilterTests.java new file mode 100644 index 0000000000000..d0ca8a7bf5d33 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFilterTests.java @@ -0,0 +1,214 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MapperServiceTestCase; +import org.elasticsearch.index.query.SearchExecutionContext; + +import java.io.IOException; +import java.util.Map; +import java.util.function.Predicate; + +public class FieldCapabilitiesFilterTests extends MapperServiceTestCase { + + public void testExcludeNestedFields() throws IOException { + MapperService mapperService = createMapperService(""" + { "_doc" : { + "properties" : { + "field1" : { "type" : "keyword" }, + "field2" : { + "type" : "nested", + "properties" : { + "field3" : { "type" : "keyword" } + } + }, + "field4" : { "type" : "keyword" } + } + } } + """); + SearchExecutionContext sec = createSearchExecutionContext(mapperService); + + Map response = FieldCapabilitiesFetcher.retrieveFieldCaps( + sec, + new String[] { "*" }, + new String[] { "-nested" }, + Strings.EMPTY_ARRAY, + f -> true + ); + + assertNotNull(response.get("field1")); + assertNotNull(response.get("field4")); + assertNull(response.get("field2")); + assertNull(response.get("field2.field3")); + } + + public void testMetadataFilters() throws IOException { + MapperService mapperService = createMapperService(""" + { "_doc" : { + "properties" : { + "field1" : { "type" : "keyword" }, + "field2" : { "type" : "keyword" } + } + } } + """); + SearchExecutionContext sec = createSearchExecutionContext(mapperService); + + { + Map response = FieldCapabilitiesFetcher.retrieveFieldCaps( + sec, + new String[] { "*" }, + new String[] { "+metadata" }, + Strings.EMPTY_ARRAY, + f -> true + ); + assertNotNull(response.get("_index")); + assertNull(response.get("field1")); + } + { + Map response = FieldCapabilitiesFetcher.retrieveFieldCaps( + sec, + new String[] { "*" }, + new String[] { "-metadata" }, + Strings.EMPTY_ARRAY, + f -> true + ); + assertNull(response.get("_index")); + assertNotNull(response.get("field1")); + } + } + + public void testExcludeMultifields() throws IOException { + MapperService mapperService = createMapperService(""" + { "_doc" : { + "properties" : { + "field1" : { + "type" : "text", + "fields" : { + "keyword" : { "type" : "keyword" } + } + }, + "field2" : { "type" : "keyword" } + }, + "runtime" : { + "field2.keyword" : { "type" : "keyword" } + } + } } + """); + SearchExecutionContext sec = createSearchExecutionContext(mapperService); + + Map response = FieldCapabilitiesFetcher.retrieveFieldCaps( + sec, + new String[] { "*" }, + new String[] { "-multifield" }, + Strings.EMPTY_ARRAY, + f -> true + ); + assertNotNull(response.get("field1")); + assertNull(response.get("field1.keyword")); + assertNotNull(response.get("field2")); + assertNotNull(response.get("field2.keyword")); + assertNotNull(response.get("_index")); + } + + public void testDontIncludeParentInfo() throws IOException { + MapperService mapperService = createMapperService(""" + { "_doc" : { + "properties" : { + "parent" : { + "properties" : { + "field1" : { "type" : "keyword" }, + "field2" : { "type" : "keyword" } + } + } + } + } } + """); + SearchExecutionContext sec = createSearchExecutionContext(mapperService); + + Map response = FieldCapabilitiesFetcher.retrieveFieldCaps( + sec, + new String[] { "*" }, + new String[] { "-parent" }, + Strings.EMPTY_ARRAY, + f -> true + ); + assertNotNull(response.get("parent.field1")); + assertNotNull(response.get("parent.field2")); + assertNull(response.get("parent")); + } + + public void testSecurityFilter() throws IOException { + MapperService mapperService = createMapperService(""" + { "_doc" : { + "properties" : { + "permitted1" : { "type" : "keyword" }, + "permitted2" : { "type" : "keyword" }, + "forbidden" : { "type" : "keyword" } + } + } } + """); + SearchExecutionContext sec = createSearchExecutionContext(mapperService); + Predicate securityFilter = f -> f.startsWith("permitted"); + + { + Map response = FieldCapabilitiesFetcher.retrieveFieldCaps( + sec, + new String[] { "*" }, + Strings.EMPTY_ARRAY, + Strings.EMPTY_ARRAY, + securityFilter + ); + + assertNotNull(response.get("permitted1")); + assertNull(response.get("forbidden")); + assertNotNull(response.get("_index")); // security filter doesn't apply to metadata + } + + { + Map response = FieldCapabilitiesFetcher.retrieveFieldCaps( + sec, + new String[] { "*" }, + new String[] { "-metadata" }, + Strings.EMPTY_ARRAY, + securityFilter + ); + + assertNotNull(response.get("permitted1")); + assertNull(response.get("forbidden")); + assertNull(response.get("_index")); // -metadata filter applies on top + } + } + + public void testFieldTypeFiltering() throws IOException { + MapperService mapperService = createMapperService(""" + { "_doc" : { + "properties" : { + "field1" : { "type" : "keyword" }, + "field2" : { "type" : "long" }, + "field3" : { "type" : "text" } + } + } } + """); + SearchExecutionContext sec = createSearchExecutionContext(mapperService); + + Map response = FieldCapabilitiesFetcher.retrieveFieldCaps( + sec, + new String[] { "*" }, + Strings.EMPTY_ARRAY, + new String[] { "text", "keyword" }, + f -> true + ); + assertNotNull(response.get("field1")); + assertNull(response.get("field2")); + assertNotNull(response.get("field3")); + assertNull(response.get("_index")); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponseTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponseTests.java new file mode 100644 index 0000000000000..39715f26d9dc7 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponseTests.java @@ -0,0 +1,114 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Supplier; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.hamcrest.Matchers.equalTo; + +public class FieldCapabilitiesIndexResponseTests extends ESTestCase { + + public void testShareResponsesUsingMappingHash() throws Exception { + final Supplier> randomFieldCaps = () -> { + final Map fieldCaps = new HashMap<>(); + final List fields = randomList(1, 5, () -> randomAlphaOfLength(5)); + for (String field : fields) { + final IndexFieldCapabilities fieldCap = new IndexFieldCapabilities( + field, + randomAlphaOfLengthBetween(5, 20), + randomBoolean(), + randomBoolean(), + randomBoolean(), + false, + null, + Map.of() + ); + fieldCaps.put(field, fieldCap); + } + return fieldCaps; + }; + final List inList = new ArrayList<>(); + int numGroups = randomIntBetween(0, 20); + Map> mappingHashToIndices = new HashMap<>(); + for (int i = 0; i < numGroups; i++) { + String groupName = "group_" + i; + String hashing = UUIDs.randomBase64UUID(); + List indices = IntStream.range(0, randomIntBetween(1, 5)).mapToObj(n -> groupName + "_" + n).toList(); + mappingHashToIndices.put(hashing, indices); + Map fieldCaps = randomFieldCaps.get(); + for (String index : indices) { + inList.add(new FieldCapabilitiesIndexResponse(index, hashing, fieldCaps, true)); + } + } + int numUngroups = randomIntBetween(0, 5); + for (int i = 0; i < numUngroups; i++) { + String index = "ungrouped_" + i; + final String hashing; + final boolean canMatch; + Map fieldCaps = Map.of(); + if (randomBoolean()) { + canMatch = false; + hashing = UUIDs.randomBase64UUID(); + } else { + canMatch = randomBoolean(); + hashing = null; + if (canMatch) { + fieldCaps = randomFieldCaps.get(); + } + } + inList.add(new FieldCapabilitiesIndexResponse(index, hashing, fieldCaps, canMatch)); + } + Randomness.shuffle(inList); + final List serializedList; + try (BytesStreamOutput output = new BytesStreamOutput()) { + FieldCapabilitiesIndexResponse.writeList(output, inList); + try ( + StreamInput in = new NamedWriteableAwareStreamInput( + output.bytes().streamInput(), + new NamedWriteableRegistry(Collections.emptyList()) + ) + ) { + serializedList = FieldCapabilitiesIndexResponse.readList(in); + } + } + assertThat( + serializedList.stream().sorted(Comparator.comparing(FieldCapabilitiesIndexResponse::getIndexName)).toList(), + equalTo(inList.stream().sorted(Comparator.comparing(FieldCapabilitiesIndexResponse::getIndexName)).toList()) + ); + Map> groupedResponses = serializedList.stream() + .filter(r -> r.canMatch() && r.getIndexMappingHash() != null) + .collect(Collectors.groupingBy(FieldCapabilitiesIndexResponse::getIndexMappingHash)); + assertThat(groupedResponses.keySet(), equalTo(mappingHashToIndices.keySet())); + for (Map.Entry> e : groupedResponses.entrySet()) { + List indices = mappingHashToIndices.get(e.getKey()); + List rs = e.getValue(); + assertThat(rs.stream().map(FieldCapabilitiesIndexResponse::getIndexName).sorted().toList(), equalTo(indices)); + for (FieldCapabilitiesIndexResponse r : rs) { + assertTrue(r.canMatch()); + assertSame(r.get(), rs.get(0).get()); + } + } + } +} diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequestTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequestTests.java index 8810cdb3a7a25..549bd4e299737 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequestTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; @@ -31,6 +32,8 @@ public class FieldCapabilitiesNodeRequestTests extends AbstractWireSerializingTe protected FieldCapabilitiesNodeRequest createTestInstance() { List randomShards = randomShardIds(randomIntBetween(1, 5)); String[] randomFields = randomFields(randomIntBetween(1, 20)); + String[] randomFilter = randomBoolean() ? Strings.EMPTY_ARRAY : new String[] { "-nested" }; + String[] randomTypeFilter = randomBoolean() ? Strings.EMPTY_ARRAY : new String[] { "keyword" }; OriginalIndices originalIndices = randomOriginalIndices(randomIntBetween(0, 20)); QueryBuilder indexFilter = randomBoolean() ? QueryBuilders.termQuery("field", randomAlphaOfLength(5)) : null; @@ -40,7 +43,16 @@ protected FieldCapabilitiesNodeRequest createTestInstance() { ? Collections.singletonMap(randomAlphaOfLength(5), randomAlphaOfLength(5)) : null; - return new FieldCapabilitiesNodeRequest(randomShards, randomFields, originalIndices, indexFilter, nowInMillis, runtimeFields); + return new FieldCapabilitiesNodeRequest( + randomShards, + randomFields, + randomFilter, + randomTypeFilter, + originalIndices, + indexFilter, + nowInMillis, + runtimeFields + ); } private List randomShardIds(int numShards) { @@ -81,12 +93,14 @@ protected Writeable.Reader instanceReader() { @Override protected FieldCapabilitiesNodeRequest mutateInstance(FieldCapabilitiesNodeRequest instance) throws IOException { - switch (random().nextInt(5)) { + switch (random().nextInt(7)) { case 0 -> { List shardIds = randomShardIds(instance.shardIds().size() + 1); return new FieldCapabilitiesNodeRequest( shardIds, instance.fields(), + instance.filters(), + instance.allowedTypes(), instance.originalIndices(), instance.indexFilter(), instance.nowInMillis(), @@ -98,6 +112,8 @@ protected FieldCapabilitiesNodeRequest mutateInstance(FieldCapabilitiesNodeReque return new FieldCapabilitiesNodeRequest( instance.shardIds(), fields, + instance.filters(), + instance.allowedTypes(), instance.originalIndices(), instance.indexFilter(), instance.nowInMillis(), @@ -109,6 +125,8 @@ protected FieldCapabilitiesNodeRequest mutateInstance(FieldCapabilitiesNodeReque return new FieldCapabilitiesNodeRequest( instance.shardIds(), instance.fields(), + instance.filters(), + instance.allowedTypes(), originalIndices, instance.indexFilter(), instance.nowInMillis(), @@ -120,6 +138,8 @@ protected FieldCapabilitiesNodeRequest mutateInstance(FieldCapabilitiesNodeReque return new FieldCapabilitiesNodeRequest( instance.shardIds(), instance.fields(), + instance.filters(), + instance.allowedTypes(), instance.originalIndices(), indexFilter, instance.nowInMillis(), @@ -131,6 +151,8 @@ protected FieldCapabilitiesNodeRequest mutateInstance(FieldCapabilitiesNodeReque return new FieldCapabilitiesNodeRequest( instance.shardIds(), instance.fields(), + instance.filters(), + instance.allowedTypes(), instance.originalIndices(), instance.indexFilter(), nowInMillis, @@ -144,13 +166,41 @@ protected FieldCapabilitiesNodeRequest mutateInstance(FieldCapabilitiesNodeReque return new FieldCapabilitiesNodeRequest( instance.shardIds(), instance.fields(), + instance.filters(), + instance.allowedTypes(), instance.originalIndices(), instance.indexFilter(), instance.nowInMillis(), runtimeFields ); } - default -> throw new IllegalStateException("The test should only allow 5 parameters mutated"); + case 6 -> { + String[] randomFilter = instance.filters().length > 0 ? Strings.EMPTY_ARRAY : new String[] { "-nested" }; + return new FieldCapabilitiesNodeRequest( + instance.shardIds(), + instance.fields(), + randomFilter, + instance.allowedTypes(), + instance.originalIndices(), + instance.indexFilter(), + instance.nowInMillis(), + instance.runtimeFields() + ); + } + case 7 -> { + String[] randomType = instance.allowedTypes().length > 0 ? Strings.EMPTY_ARRAY : new String[] { "text" }; + return new FieldCapabilitiesNodeRequest( + instance.shardIds(), + instance.fields(), + instance.filters(), + randomType, + instance.originalIndices(), + instance.indexFilter(), + instance.nowInMillis(), + instance.runtimeFields() + ); + } + default -> throw new IllegalStateException("The test should only allow 7 parameters mutated"); } } } diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java index 566fa43a8da62..dc72d180ef0c2 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; @@ -146,14 +147,20 @@ public void testValidation() { public void testGetDescription() { final FieldCapabilitiesRequest request = new FieldCapabilitiesRequest(); - assertThat(request.getDescription(), equalTo("indices[], fields[]")); + assertThat(request.getDescription(), equalTo("indices[], fields[], filters[], types[]")); request.fields("a", "b"); - assertThat(request.getDescription(), anyOf(equalTo("indices[], fields[a,b]"), equalTo("indices[], fields[b,a]"))); + assertThat( + request.getDescription(), + anyOf(equalTo("indices[], fields[a,b], filters[], types[]"), equalTo("indices[], fields[b,a], filters[], types[]")) + ); request.indices("x", "y", "z"); request.fields("a"); - assertThat(request.getDescription(), equalTo("indices[x,y,z], fields[a]")); + assertThat(request.getDescription(), equalTo("indices[x,y,z], fields[a], filters[], types[]")); + + request.filters("-metadata", "-multifields"); + assertThat(request.getDescription(), endsWith("filters[-metadata,-multifields], types[]")); final String[] lots = new String[between(1024, 2048)]; for (int i = 0; i < lots.length; i++) { @@ -162,6 +169,7 @@ public void testGetDescription() { request.indices("x", "y", "z"); request.fields(lots); + request.filters(Strings.EMPTY_ARRAY); assertThat( request.getDescription(), allOf( @@ -173,7 +181,7 @@ public void testGetDescription() { ); assertThat( request.getDescription().length(), - lessThanOrEqualTo(1024 + ("indices[x,y,z], fields[" + "s9999,... (9999 in total, 9999 omitted)]").length()) + lessThanOrEqualTo(1024 + ("indices[x,y,z], fields[" + "s9999,... (9999 in total, 9999 omitted)], filters[], types[]").length()) ); request.fields("a"); @@ -185,12 +193,12 @@ public void testGetDescription() { containsString("..."), containsString(lots.length + " in total"), containsString("omitted"), - endsWith("], fields[a]") + endsWith("], fields[a], filters[], types[]") ) ); assertThat( request.getDescription().length(), - lessThanOrEqualTo(1024 + ("indices[" + "s9999,... (9999 in total, 9999 omitted)], fields[a]").length()) + lessThanOrEqualTo(1024 + ("indices[" + "s9999,... (9999 in total, 9999 omitted)], fields[a], filters[], types[]").length()) ); final FieldCapabilitiesRequest randomRequest = createTestInstance(); diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java index fbea856caface..1f43f88c192ae 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java @@ -60,7 +60,7 @@ public static FieldCapabilitiesIndexResponse randomIndexResponse(String index, b for (String field : fields) { responses.put(field, randomFieldCaps(field)); } - return new FieldCapabilitiesIndexResponse(index, responses, canMatch); + return new FieldCapabilitiesIndexResponse(index, null, responses, canMatch); } public static IndexFieldCapabilities randomFieldCaps(String fieldName) { diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesTests.java index 92d97085a251b..549c643f5c2bc 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesTests.java @@ -9,6 +9,8 @@ package org.elasticsearch.action.fieldcaps; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.iterable.Iterables; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.mapper.TimeSeriesParams; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; @@ -16,9 +18,15 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.stream.IntStream; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class FieldCapabilitiesTests extends AbstractSerializingTestCase { @@ -158,6 +166,140 @@ public void testBuilder() { } } + public void testRandomBuilder() { + List indices = IntStream.range(0, randomIntBetween(1, 50)) + .mapToObj(n -> String.format(Locale.ROOT, "index_%2d", n)) + .toList(); + Set searchableIndices = new HashSet<>(randomSubsetOf(indices)); + Set aggregatableIndices = new HashSet<>(randomSubsetOf(indices)); + Set dimensionIndices = new HashSet<>(randomSubsetOf(indices)); + FieldCapabilities.Builder builder = new FieldCapabilities.Builder("field", "type"); + for (String index : indices) { + builder.add( + index, + randomBoolean(), + searchableIndices.contains(index), + aggregatableIndices.contains(index), + dimensionIndices.contains(index), + null, + Map.of() + ); + } + FieldCapabilities fieldCaps = builder.build(randomBoolean()); + // search + if (searchableIndices.isEmpty()) { + assertFalse(fieldCaps.isSearchable()); + assertNull(fieldCaps.nonSearchableIndices()); + } else if (searchableIndices.size() == indices.size()) { + assertTrue(fieldCaps.isSearchable()); + assertNull(fieldCaps.nonSearchableIndices()); + } else { + assertFalse(fieldCaps.isSearchable()); + assertThat( + Sets.newHashSet(fieldCaps.nonSearchableIndices()), + equalTo(Sets.difference(Sets.newHashSet(indices), searchableIndices)) + ); + } + // aggregate + if (aggregatableIndices.isEmpty()) { + assertFalse(fieldCaps.isAggregatable()); + assertNull(fieldCaps.nonAggregatableIndices()); + } else if (aggregatableIndices.size() == indices.size()) { + assertTrue(fieldCaps.isAggregatable()); + assertNull(fieldCaps.nonAggregatableIndices()); + } else { + assertFalse(fieldCaps.isAggregatable()); + assertThat( + Sets.newHashSet(fieldCaps.nonAggregatableIndices()), + equalTo(Sets.difference(Sets.newHashSet(indices), aggregatableIndices)) + ); + } + // dimension + if (dimensionIndices.isEmpty()) { + assertFalse(fieldCaps.isDimension()); + assertNull(fieldCaps.nonDimensionIndices()); + } else if (dimensionIndices.size() == indices.size()) { + assertTrue(fieldCaps.isDimension()); + assertNull(fieldCaps.nonDimensionIndices()); + } else { + assertFalse(fieldCaps.isDimension()); + assertThat( + Sets.newHashSet(fieldCaps.nonDimensionIndices()), + equalTo(Sets.difference(Sets.newHashSet(indices), dimensionIndices)) + ); + } + } + + public void testBuilderSingleMetricType() { + List indices = IntStream.range(0, randomIntBetween(1, 50)) + .mapToObj(n -> String.format(Locale.ROOT, "index_%2d", n)) + .toList(); + TimeSeriesParams.MetricType metric = randomBoolean() ? null : randomFrom(TimeSeriesParams.MetricType.values()); + FieldCapabilities.Builder builder = new FieldCapabilities.Builder("field", "type"); + for (String index : indices) { + builder.add(index, randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), metric, Map.of()); + } + FieldCapabilities fieldCaps = builder.build(randomBoolean()); + assertThat(fieldCaps.getMetricType(), equalTo(metric)); + assertNull(fieldCaps.metricConflictsIndices()); + } + + public void testBuilderMixedMetricType() { + List indices = IntStream.range(0, randomIntBetween(1, 50)) + .mapToObj(n -> String.format(Locale.ROOT, "index_%2d", n)) + .toList(); + Map metricTypes = new HashMap<>(); + for (String index : indices) { + if (randomBoolean()) { + metricTypes.put(index, randomFrom(TimeSeriesParams.MetricType.values())); + } + } + FieldCapabilities.Builder builder = new FieldCapabilities.Builder("field", "type"); + for (String index : indices) { + builder.add(index, randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), metricTypes.get(index), Map.of()); + } + FieldCapabilities fieldCaps = builder.build(randomBoolean()); + if (metricTypes.isEmpty()) { + assertNull(fieldCaps.getMetricType()); + assertNull(fieldCaps.metricConflictsIndices()); + } else if (metricTypes.size() == indices.size() && metricTypes.values().size() == 1) { + assertThat(fieldCaps.getMetricType(), equalTo(Iterables.get(metricTypes.values(), 0))); + assertNull(fieldCaps.metricConflictsIndices()); + } else { + assertNull(fieldCaps.getMetricType()); + assertThat(fieldCaps.metricConflictsIndices(), equalTo(indices.toArray(String[]::new))); + } + } + + public void testOutOfOrderIndices() { + FieldCapabilities.Builder builder = new FieldCapabilities.Builder("field", "type"); + int numIndex = randomIntBetween(1, 5); + for (int i = 1; i <= numIndex; i++) { + builder.add( + "index-" + i, + randomBoolean(), + randomBoolean(), + randomBoolean(), + randomBoolean(), + randomFrom(TimeSeriesParams.MetricType.values()), + Map.of() + ); + } + final String outOfOrderIndex = randomBoolean() ? "abc" : "index-" + randomIntBetween(1, numIndex); + AssertionError error = expectThrows(AssertionError.class, () -> { + builder.add( + outOfOrderIndex, + randomBoolean(), + randomBoolean(), + randomBoolean(), + randomBoolean(), + randomFrom(TimeSeriesParams.MetricType.values()), + Map.of() + ); + }); + assertThat(error.getMessage(), containsString("indices aren't sorted")); + } + static FieldCapabilities randomFieldCaps(String fieldName) { String[] indices = null; if (randomBoolean()) { diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/ResponseRewriterTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/ResponseRewriterTests.java new file mode 100644 index 0000000000000..830f9bb42912b --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/ResponseRewriterTests.java @@ -0,0 +1,158 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; +import java.util.Map; + +public class ResponseRewriterTests extends ESTestCase { + + public void testExcludeMetadata() { + Map oldResponse = Map.of( + "field", + fieldCaps("field", "keyword", false), + "_index", + fieldCaps("_index", "_index", true) + ); + + Map rewritten = ResponseRewriter.rewriteOldResponses( + Version.V_8_0_0, + oldResponse, + new String[] { "-metadata" }, + Strings.EMPTY_ARRAY, + f -> f.startsWith("_") + ); + + assertTrue(rewritten.containsKey("field")); + assertFalse(rewritten.containsKey("_index")); + } + + public void testIncludeOnlyMetadata() { + Map oldResponse = Map.of( + "field", + fieldCaps("field", "keyword", false), + "_index", + fieldCaps("_index", "_index", true) + ); + + Map rewritten = ResponseRewriter.rewriteOldResponses( + Version.V_8_0_0, + oldResponse, + new String[] { "+metadata" }, + Strings.EMPTY_ARRAY, + f -> f.startsWith("_") + ); + + assertFalse(rewritten.containsKey("field")); + assertTrue(rewritten.containsKey("_index")); + } + + public void testExcludeNested() { + Map oldResponse = Map.of( + "field", + fieldCaps("field", "keyword", false), + "parent", + fieldCaps("parent", "nested", false), + "parent.child", + fieldCaps("parent.child", "keyword", false) + ); + + Map rewritten = ResponseRewriter.rewriteOldResponses( + Version.V_8_0_0, + oldResponse, + new String[] { "-nested" }, + Strings.EMPTY_ARRAY, + f -> f.startsWith("_") + ); + + assertTrue(rewritten.containsKey("field")); + assertFalse(rewritten.containsKey("parent.child")); + assertFalse(rewritten.containsKey("parent")); + } + + public void testExcludeMultifield() { + Map oldResponse = Map.of( + "field", + fieldCaps("field", "text", false), + "field.keyword", + fieldCaps("field.keyword", "keyword", false), + "parent", + fieldCaps("parent", "object", false), + "parent.child", + fieldCaps("parent.child", "keyword", false) + ); + + Map rewritten = ResponseRewriter.rewriteOldResponses( + Version.V_8_0_0, + oldResponse, + new String[] { "-multifield" }, + Strings.EMPTY_ARRAY, + f -> f.startsWith("_") + ); + + assertTrue(rewritten.containsKey("field")); + assertFalse(rewritten.containsKey("field.keyword")); + assertTrue(rewritten.containsKey("parent.child")); + } + + public void testExcludeParents() { + Map oldResponse = Map.of( + "field", + fieldCaps("field", "text", false), + "parent", + fieldCaps("parent", "object", false), + "parent.child", + fieldCaps("parent.child", "keyword", false) + ); + + Map rewritten = ResponseRewriter.rewriteOldResponses( + Version.V_8_0_0, + oldResponse, + new String[] { "-parent" }, + Strings.EMPTY_ARRAY, + f -> f.startsWith("_") + ); + + assertTrue(rewritten.containsKey("field")); + assertFalse(rewritten.containsKey("parent")); + assertTrue(rewritten.containsKey("parent.child")); + } + + public void testAllowedTypes() { + Map oldResponse = Map.of( + "text", + fieldCaps("text", "text", false), + "long", + fieldCaps("long", "long", false), + "keyword", + fieldCaps("keyword", "keyword", false) + ); + + Map rewritten = ResponseRewriter.rewriteOldResponses( + Version.V_8_0_0, + oldResponse, + Strings.EMPTY_ARRAY, + new String[] { "text", "keyword" }, + f -> f.startsWith("_") + ); + + assertTrue(rewritten.containsKey("text")); + assertTrue(rewritten.containsKey("keyword")); + assertFalse(rewritten.containsKey("long")); + } + + private static IndexFieldCapabilities fieldCaps(String name, String type, boolean isMetadata) { + return new IndexFieldCapabilities(name, type, isMetadata, true, true, false, null, Collections.emptyMap()); + } + +} diff --git a/server/src/test/java/org/elasticsearch/action/support/NodeResponseTrackerTests.java b/server/src/test/java/org/elasticsearch/action/support/NodeResponseTrackerTests.java new file mode 100644 index 0000000000000..11d2ee1f12a04 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/support/NodeResponseTrackerTests.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.support; + +import org.elasticsearch.test.ESTestCase; + +public class NodeResponseTrackerTests extends ESTestCase { + + public void testAllResponsesReceived() throws Exception { + int nodes = randomIntBetween(1, 10); + NodeResponseTracker intermediateNodeResponses = new NodeResponseTracker(nodes); + for (int i = 0; i < nodes; i++) { + boolean isLast = i == nodes - 1; + assertEquals( + isLast, + intermediateNodeResponses.trackResponseAndCheckIfLast(i, randomBoolean() ? i : new Exception("from node " + i)) + ); + } + + assertFalse(intermediateNodeResponses.responsesDiscarded()); + assertEquals(nodes, intermediateNodeResponses.getExpectedResponseCount()); + for (int i = 0; i < nodes; i++) { + assertNotNull(intermediateNodeResponses.getResponse(i)); + if (intermediateNodeResponses.getResponse(i)instanceof Integer nodeResponse) { + assertEquals(i, nodeResponse.intValue()); + } + } + } + + public void testDiscardingResults() { + int nodes = randomIntBetween(1, 10); + int cancelAt = randomIntBetween(0, Math.max(0, nodes - 2)); + NodeResponseTracker intermediateNodeResponses = new NodeResponseTracker(nodes); + for (int i = 0; i < nodes; i++) { + if (i == cancelAt) { + intermediateNodeResponses.discardIntermediateResponses(new Exception("simulated")); + } + boolean isLast = i == nodes - 1; + assertEquals( + isLast, + intermediateNodeResponses.trackResponseAndCheckIfLast(i, randomBoolean() ? i : new Exception("from node " + i)) + ); + } + + assertTrue(intermediateNodeResponses.responsesDiscarded()); + assertEquals(nodes, intermediateNodeResponses.getExpectedResponseCount()); + expectThrows(NodeResponseTracker.DiscardedResponsesException.class, () -> intermediateNodeResponses.getResponse(0)); + } + + public void testResponseIsRegisteredOnlyOnce() { + NodeResponseTracker intermediateNodeResponses = new NodeResponseTracker(1); + assertTrue(intermediateNodeResponses.trackResponseAndCheckIfLast(0, "response1")); + expectThrows(AssertionError.class, () -> intermediateNodeResponses.trackResponseAndCheckIfLast(0, "response2")); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index 948288fe06281..93defb70ec466 100644 --- a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -537,14 +537,23 @@ public void testResultAggregation() throws ExecutionException, InterruptedExcept public void testNoResultAggregationIfTaskCancelled() { Request request = new Request(new String[] { TEST_INDEX }); PlainActionFuture listener = new PlainActionFuture<>(); - action.new AsyncAction(cancelledTask(), request, listener).start(); + final CancellableTask task = new CancellableTask(randomLong(), "transport", "action", "", null, emptyMap()); + TransportBroadcastByNodeAction.AsyncAction asyncAction = + action.new AsyncAction(task, request, listener); + asyncAction.start(); Map> capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear(); - + int cancelAt = randomIntBetween(0, Math.max(0, capturedRequests.size() - 2)); + int i = 0; for (Map.Entry> entry : capturedRequests.entrySet()) { + if (cancelAt == i) { + TaskCancelHelper.cancel(task, "simulated"); + } transport.handleRemoteError(entry.getValue().get(0).requestId(), new ElasticsearchException("simulated")); + i++; } assertTrue(listener.isDone()); + assertTrue(asyncAction.getNodeResponseTracker().responsesDiscarded()); expectThrows(ExecutionException.class, TaskCancelledException.class, listener::get); } diff --git a/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java index ee43aaa5b5e90..def2e4558bd23 100644 --- a/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.NodeResponseTracker; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeActionTests; import org.elasticsearch.cluster.ClusterName; @@ -47,7 +48,6 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReferenceArray; import java.util.function.Supplier; import static java.util.Collections.emptyMap; @@ -94,14 +94,14 @@ public void testNodesSelectors() { assertEquals(clusterService.state().nodes().resolveNodes(finalNodesIds).length, capturedRequests.size()); } - public void testNewResponseNullArray() { + public void testNewResponseNullArray() throws Exception { TransportNodesAction action = getTestTransportNodesAction(); final PlainActionFuture future = new PlainActionFuture<>(); action.newResponse(new Task(1, "test", "test", "", null, emptyMap()), new TestNodesRequest(), null, future); expectThrows(NullPointerException.class, future::actionGet); } - public void testNewResponse() { + public void testNewResponse() throws Exception { TestTransportNodesAction action = getTestTransportNodesAction(); TestNodesRequest request = new TestNodesRequest(); List expectedNodeResponses = mockList(TestNodeResponse::new, randomIntBetween(0, 2)); @@ -120,10 +120,10 @@ public void testNewResponse() { Collections.shuffle(allResponses, random()); - AtomicReferenceArray atomicArray = new AtomicReferenceArray<>(allResponses.toArray()); + NodeResponseTracker nodeResponseCollector = new NodeResponseTracker(allResponses); final PlainActionFuture future = new PlainActionFuture<>(); - action.newResponse(new Task(1, "test", "test", "", null, emptyMap()), request, atomicArray, future); + action.newResponse(new Task(1, "test", "test", "", null, emptyMap()), request, nodeResponseCollector, future); TestNodesResponse response = future.actionGet(); assertSame(request, response.request); @@ -146,7 +146,7 @@ public void testCustomResolving() throws Exception { assertEquals(clusterService.state().nodes().getDataNodes().size(), capturedRequests.size()); } - public void testTaskCancellationThrowsException() { + public void testTaskCancellation() { TransportNodesAction action = getTestTransportNodesAction(); List nodeIds = new ArrayList<>(); for (DiscoveryNode node : clusterService.state().nodes()) { @@ -156,10 +156,16 @@ public void testTaskCancellationThrowsException() { TestNodesRequest request = new TestNodesRequest(nodeIds.toArray(new String[0])); PlainActionFuture listener = new PlainActionFuture<>(); CancellableTask cancellableTask = new CancellableTask(randomLong(), "transport", "action", "", null, emptyMap()); - TaskCancelHelper.cancel(cancellableTask, "simulated"); - action.doExecute(cancellableTask, request, listener); + TransportNodesAction.AsyncAction asyncAction = + action.new AsyncAction(cancellableTask, request, listener); + asyncAction.start(); Map> capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear(); + int cancelAt = randomIntBetween(0, Math.max(0, capturedRequests.values().size() - 2)); + int requestCount = 0; for (List requests : capturedRequests.values()) { + if (requestCount == cancelAt) { + TaskCancelHelper.cancel(cancellableTask, "simulated"); + } for (CapturingTransport.CapturedRequest capturedRequest : requests) { if (randomBoolean()) { transport.handleResponse(capturedRequest.requestId(), new TestNodeResponse(capturedRequest.node())); @@ -167,9 +173,11 @@ public void testTaskCancellationThrowsException() { transport.handleRemoteError(capturedRequest.requestId(), new TaskCancelledException("simulated")); } } + requestCount++; } assertTrue(listener.isDone()); + assertTrue(asyncAction.getNodeResponseTracker().responsesDiscarded()); expectThrows(ExecutionException.class, TaskCancelledException.class, listener::get); } diff --git a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java index 767b10a2ade73..544be2c438706 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java @@ -51,6 +51,8 @@ public class ShardFailedClusterStateTaskExecutorTests extends ESAllocationTestCase { + private static final ActionListener NO_OP_TASK_LISTENER = ActionListener.wrap(() -> {}); + private static final String INDEX = "INDEX"; private AllocationService allocationService; private int numberOfReplicas; @@ -136,7 +138,7 @@ ClusterState applyFailedShards(ClusterState currentState, List fail ); } for (FailedShardUpdateTask nonExistentTask : nonExistentTasks) { - taskResultList.add(Tuple.tuple(nonExistentTask, ClusterStateTaskExecutor.TaskResult.success())); + taskResultList.add(Tuple.tuple(nonExistentTask, ClusterStateTaskExecutor.TaskResult.success(NO_OP_TASK_LISTENER))); } assertTaskResults(taskResultList, result, currentState, false); } @@ -303,7 +305,7 @@ private static void assertTasksSuccessful( boolean clusterStateChanged ) { List> taskResultList = tasks.stream() - .map(t -> Tuple.tuple(t, ClusterStateTaskExecutor.TaskResult.success())) + .map(t -> Tuple.tuple(t, ClusterStateTaskExecutor.TaskResult.success(NO_OP_TASK_LISTENER))) .collect(Collectors.toList()); assertTaskResults(taskResultList, result, clusterState, clusterStateChanged); } diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinTaskExecutorTests.java index 0fe9752c9979a..b5a0b429b2c9a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinTaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinTaskExecutorTests.java @@ -172,10 +172,10 @@ public void testUpdatesNodeWithNewRoles() throws Exception { .nodes(DiscoveryNodes.builder().add(masterNode).localNodeId(masterNode.getId()).masterNodeId(masterNode.getId()).add(bwcNode)) .build(); - final ClusterStateTaskExecutor.ClusterTasksResult result = joinTaskExecutor.execute( + final ClusterStateTaskExecutor.ClusterTasksResult result = joinTaskExecutor.execute( clusterState, List.of( - new JoinTaskExecutor.Task( + JoinTask.singleNode( actualNode, "test", ActionListener.wrap(() -> { throw new AssertionError("should not complete publication"); }) diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java index 257203b75364a..f06bf27a9c806 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java @@ -205,7 +205,9 @@ public String toString() { leaderChecker.updateLeader(leader2); { checkCount.set(0); - final long maxCheckCount = randomLongBetween(2, 1000); + // run at least leaderCheckRetryCount iterations to ensure at least one success so that we reset the counters and clear out + // anything left over from the previous run + final long maxCheckCount = randomLongBetween(leaderCheckRetryCount, 1000); logger.info("--> checking again that no failure is detected in {} checks", maxCheckCount); while (checkCount.get() < maxCheckCount) { deterministicTaskQueue.runAllRunnableTasks(); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java index 128035c187089..db1410d2c3c48 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java @@ -248,7 +248,7 @@ public void testOnlyAutoExpandAllocationFilteringAfterAllNodesUpgraded() { // is the // master - state = cluster.addNodes(state, Collections.singletonList(newNode)); + state = cluster.addNode(state, newNode); // use allocation filtering state = cluster.updateSettings( diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java index 472d69b19c619..63b73cbaa77cd 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.test.AbstractSerializingTestCase; @@ -60,7 +61,7 @@ protected DataStream createTestInstance() { public void testRollover() { DataStream ds = DataStreamTestHelper.randomInstance().promoteDataStream(); Tuple newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA); - final DataStream rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2()); + final DataStream rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), null); assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getTimeStampField(), equalTo(ds.getTimeStampField())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); @@ -86,13 +87,69 @@ public void testRolloverWithConflictingBackingIndexName() { } final Tuple newCoordinates = ds.nextWriteIndexAndGeneration(builder.build()); - final DataStream rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2()); + final DataStream rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), null); assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getTimeStampField(), equalTo(ds.getTimeStampField())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + numConflictingIndices + 1)); assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size() + 1)); assertTrue(rolledDs.getIndices().containsAll(ds.getIndices())); assertTrue(rolledDs.getIndices().contains(rolledDs.getWriteIndex())); + assertThat(rolledDs.getIndexMode(), equalTo(ds.getIndexMode())); + } + + public void testRolloverIndexMode() { + IndexMode indexMode = randomBoolean() ? IndexMode.STANDARD : null; + DataStream ds = DataStreamTestHelper.randomInstance().promoteDataStream(); + // Unsure index_mode=null + ds = new DataStream( + ds.getName(), + ds.getTimeStampField(), + ds.getIndices(), + ds.getGeneration(), + ds.getMetadata(), + ds.isHidden(), + ds.isReplicated(), + ds.isSystem(), + ds.isAllowCustomRouting(), + indexMode + ); + var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA); + + var rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), IndexMode.TIME_SERIES); + assertThat(rolledDs.getName(), equalTo(ds.getName())); + assertThat(rolledDs.getTimeStampField(), equalTo(ds.getTimeStampField())); + assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); + assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size() + 1)); + assertTrue(rolledDs.getIndices().containsAll(ds.getIndices())); + assertTrue(rolledDs.getIndices().contains(rolledDs.getWriteIndex())); + assertThat(rolledDs.getIndexMode(), equalTo(IndexMode.TIME_SERIES)); + } + + public void testRolloverIndexMode_keepIndexMode() { + DataStream ds = DataStreamTestHelper.randomInstance().promoteDataStream(); + ds = new DataStream( + ds.getName(), + ds.getTimeStampField(), + ds.getIndices(), + ds.getGeneration(), + ds.getMetadata(), + ds.isHidden(), + ds.isReplicated(), + ds.isSystem(), + ds.isAllowCustomRouting(), + IndexMode.TIME_SERIES + ); + var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA); + + IndexMode indexMode = randomBoolean() ? IndexMode.STANDARD : null; + var rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), indexMode); + assertThat(rolledDs.getName(), equalTo(ds.getName())); + assertThat(rolledDs.getTimeStampField(), equalTo(ds.getTimeStampField())); + assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); + assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size() + 1)); + assertTrue(rolledDs.getIndices().containsAll(ds.getIndices())); + assertTrue(rolledDs.getIndices().contains(rolledDs.getWriteIndex())); + assertThat(rolledDs.getIndexMode(), equalTo(IndexMode.TIME_SERIES)); } public void testRemoveBackingIndex() { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 939fda916d40b..4db0885f37c0f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -95,6 +95,7 @@ import static org.elasticsearch.cluster.metadata.MetadataCreateIndexService.resolveAndValidateAliases; import static org.elasticsearch.index.IndexSettings.INDEX_SOFT_DELETES_SETTING; import static org.elasticsearch.indices.ShardLimitValidatorTests.createTestShardLimitService; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; @@ -1019,6 +1020,14 @@ public void testClusterStateCreateIndex() { assertThat(updatedClusterState.blocks().getIndexBlockWithId("test", INDEX_READ_ONLY_BLOCK.id()), is(INDEX_READ_ONLY_BLOCK)); assertThat(updatedClusterState.routingTable().index("test"), is(notNullValue())); assertThat(allocationRerouted.get(), is(true)); + + Metadata metadata = updatedClusterState.metadata(); + IndexAbstraction alias = metadata.getIndicesLookup().get("alias1"); + assertNotNull(alias); + assertThat(alias.getType(), equalTo(IndexAbstraction.Type.ALIAS)); + Index index = metadata.index("test").getIndex(); + assertThat(alias.getIndices(), contains(index)); + assertThat(metadata.aliasedIndices("alias1"), contains(index)); } public void testClusterStateCreateIndexWithMetadataTransaction() { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java index d9fd497cb27e1..d704ef141cc60 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java @@ -37,6 +37,7 @@ import java.util.stream.IntStream; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -143,6 +144,7 @@ public void testDeleteIndexWithAnAlias() { assertNull(after.routingTable().index(index)); assertNull(after.blocks().indices().get(index)); assertNull(after.metadata().getIndicesLookup().get(alias)); + assertThat(after.metadata().aliasedIndices(alias), empty()); } public void testDeleteBackingIndexForDataStream() { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java index d88f0a8b3530c..da49f58f8a3ae 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java @@ -33,6 +33,7 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -77,6 +78,7 @@ public void testAddAndRemove() { assertThat(alias.getType(), equalTo(IndexAbstraction.Type.ALIAS)); assertThat(alias.getIndices(), contains(after.metadata().index(index).getIndex())); assertAliasesVersionIncreased(index, before, after); + assertThat(after.metadata().aliasedIndices("test"), contains(after.metadata().index(index).getIndex())); // Remove the alias from it while adding another one before = after; @@ -85,17 +87,21 @@ public void testAddAndRemove() { Arrays.asList(new AliasAction.Remove(index, "test", null), new AliasAction.Add(index, "test_2", null, null, null, null, null)) ); assertNull(after.metadata().getIndicesLookup().get("test")); + assertThat(after.metadata().aliasedIndices("test"), empty()); alias = after.metadata().getIndicesLookup().get("test_2"); assertNotNull(alias); assertThat(alias.getType(), equalTo(IndexAbstraction.Type.ALIAS)); assertThat(alias.getIndices(), contains(after.metadata().index(index).getIndex())); assertAliasesVersionIncreased(index, before, after); + assertThat(after.metadata().aliasedIndices("test_2"), contains(after.metadata().index(index).getIndex())); // Now just remove on its own before = after; after = service.applyAliasActions(before, singletonList(new AliasAction.Remove(index, "test_2", randomBoolean()))); assertNull(after.metadata().getIndicesLookup().get("test")); + assertThat(after.metadata().aliasedIndices("test"), empty()); assertNull(after.metadata().getIndicesLookup().get("test_2")); + assertThat(after.metadata().aliasedIndices("test_2"), empty()); assertAliasesVersionIncreased(index, before, after); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java index aa1b4c4f4ba6b..f12b325c535bd 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.cluster.ClusterModule; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.coordination.CoordinationMetadata; import org.elasticsearch.cluster.coordination.CoordinationMetadata.VotingConfigExclusion; @@ -52,15 +53,15 @@ import java.util.Objects; import java.util.Set; import java.util.SortedMap; -import java.util.TreeMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; +import java.util.stream.IntStream; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createBackingIndex; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createFirstBackingIndex; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createTimestampField; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.newInstance; -import static org.elasticsearch.cluster.metadata.Metadata.Builder.validateDataStreams; +import static org.elasticsearch.cluster.metadata.Metadata.Builder.assertDataStreams; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -1178,7 +1179,7 @@ public void testBuilderRejectsAliasThatRefersToDataStreamBackingIndex() { .put(idx, false) .put(newInstance(dataStreamName, createTimestampField("@timestamp"), List.of(idx.getIndex()))); - IllegalStateException e = expectThrows(IllegalStateException.class, b::build); + AssertionError e = expectThrows(AssertionError.class, b::build); assertThat(e.getMessage(), containsString("aliases [" + conflictingName + "] cannot refer to backing indices of data streams")); } @@ -1399,7 +1400,7 @@ public void testSerialization() throws IOException { public void testValidateDataStreamsNoConflicts() { Metadata metadata = createIndices(5, 10, "foo-datastream").metadata; // don't expect any exception when validating a system without indices that would conflict with future backing indices - validateDataStreams(metadata.getIndicesLookup(), (DataStreamMetadata) metadata.customs().get(DataStreamMetadata.TYPE)); + assertDataStreams(metadata.getIndices(), (DataStreamMetadata) metadata.customs().get(DataStreamMetadata.TYPE)); } public void testValidateDataStreamsIgnoresIndicesWithoutCounter() { @@ -1423,7 +1424,7 @@ public void testValidateDataStreamsIgnoresIndicesWithoutCounter() { .build(); // don't expect any exception when validating against non-backing indices that don't conform to the backing indices naming // convention - validateDataStreams(metadata.getIndicesLookup(), (DataStreamMetadata) metadata.customs().get(DataStreamMetadata.TYPE)); + assertDataStreams(metadata.getIndices(), (DataStreamMetadata) metadata.customs().get(DataStreamMetadata.TYPE)); } public void testValidateDataStreamsAllowsNamesThatStartsWithPrefix() { @@ -1437,51 +1438,7 @@ public void testValidateDataStreamsAllowsNamesThatStartsWithPrefix() { .build(); // don't expect any exception when validating against (potentially backing) indices that can't create conflict because of // additional text before number - validateDataStreams(metadata.getIndicesLookup(), (DataStreamMetadata) metadata.customs().get(DataStreamMetadata.TYPE)); - } - - public void testValidateDataStreamsAllowsPrefixedBackingIndices() { - String dataStreamName = "foo-datastream"; - int generations = 10; - List backingIndices = new ArrayList<>(generations); - for (int i = 1; i <= generations; i++) { - IndexMetadata idx; - if (i % 2 == 0 && i < generations) { - idx = IndexMetadata.builder("shrink-" + DataStream.getDefaultBackingIndexName(dataStreamName, i)) - .settings(ESTestCase.settings(Version.CURRENT).put("index.hidden", true)) - .numberOfShards(1) - .numberOfReplicas(1) - .build(); - } else { - idx = createBackingIndex(dataStreamName, i).build(); - } - backingIndices.add(idx); - } - DataStream dataStream = newInstance( - dataStreamName, - createTimestampField("@timestamp"), - backingIndices.stream().map(IndexMetadata::getIndex).collect(Collectors.toList()) - ); - - IndexAbstraction.DataStream dataStreamAbstraction = new IndexAbstraction.DataStream(dataStream, List.of()); - // manually building the indices lookup as going through Metadata.Builder#build would trigger the validate method already - SortedMap indicesLookup = new TreeMap<>(); - for (IndexMetadata indexMeta : backingIndices) { - indicesLookup.put(indexMeta.getIndex().getName(), new IndexAbstraction.ConcreteIndex(indexMeta, dataStreamAbstraction)); - } - - for (int i = 1; i <= generations; i++) { - // for the indices that we added in the data stream with a "shrink-" prefix, add the non-prefixed indices to the lookup - if (i % 2 == 0 && i < generations) { - IndexMetadata indexMeta = createBackingIndex(dataStreamName, i).build(); - indicesLookup.put(indexMeta.getIndex().getName(), new IndexAbstraction.ConcreteIndex(indexMeta, dataStreamAbstraction)); - } - } - DataStreamMetadata dataStreamMetadata = new DataStreamMetadata(Map.of(dataStreamName, dataStream), Map.of()); - - // prefixed indices with a lower generation than the data stream's generation are allowed even if the non-prefixed, matching the - // data stream backing indices naming pattern, indices are already in the system - validateDataStreams(indicesLookup, dataStreamMetadata); + assertDataStreams(metadata.getIndices(), (DataStreamMetadata) metadata.customs().get(DataStreamMetadata.TYPE)); } public void testValidateDataStreamsForNullDataStreamMetadata() { @@ -1490,7 +1447,7 @@ public void testValidateDataStreamsForNullDataStreamMetadata() { .build(); try { - validateDataStreams(metadata.getIndicesLookup(), null); + assertDataStreams(metadata.getIndices(), null); } catch (Exception e) { fail("did not expect exception when validating a system without any data streams but got " + e.getMessage()); } @@ -1895,6 +1852,114 @@ public void testReuseIndicesLookup() { } } + public void testAliasedIndices() { + int numAliases = randomIntBetween(32, 64); + int numIndicesPerAlias = randomIntBetween(8, 16); + + Metadata.Builder builder = Metadata.builder(); + for (int i = 0; i < numAliases; i++) { + String aliasName = "alias-" + i; + for (int j = 0; j < numIndicesPerAlias; j++) { + AliasMetadata.Builder alias = new AliasMetadata.Builder(aliasName); + if (j == 0) { + alias.writeIndex(true); + } + + String indexName = aliasName + "-" + j; + builder.put( + IndexMetadata.builder(indexName) + .settings(settings(Version.CURRENT)) + .creationDate(randomNonNegativeLong()) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(alias) + ); + } + } + + Metadata metadata = builder.build(); + for (int i = 0; i < numAliases; i++) { + String aliasName = "alias-" + i; + Set result = metadata.aliasedIndices(aliasName); + Index[] expected = IntStream.range(0, numIndicesPerAlias) + .mapToObj(j -> aliasName + "-" + j) + .map(name -> new Index(name, ClusterState.UNKNOWN_UUID)) + .toArray(Index[]::new); + assertThat(result, containsInAnyOrder(expected)); + } + + // Add a new alias and index + builder = Metadata.builder(metadata); + String newAliasName = "alias-new"; + { + builder.put( + IndexMetadata.builder(newAliasName + "-1") + .settings(settings(Version.CURRENT)) + .creationDate(randomNonNegativeLong()) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(new AliasMetadata.Builder(newAliasName).writeIndex(true)) + ); + } + metadata = builder.build(); + assertThat(metadata.aliasedIndices(), hasSize(numAliases + 1)); + assertThat(metadata.aliasedIndices(newAliasName), contains(new Index(newAliasName + "-1", ClusterState.UNKNOWN_UUID))); + + // Remove the new alias/index + builder = Metadata.builder(metadata); + { + builder.remove(newAliasName + "-1"); + } + metadata = builder.build(); + assertThat(metadata.aliasedIndices(), hasSize(numAliases)); + assertThat(metadata.aliasedIndices(newAliasName), empty()); + + // Add a new alias that points to existing indices + builder = Metadata.builder(metadata); + { + IndexMetadata.Builder imBuilder = new IndexMetadata.Builder(metadata.index("alias-1-0")); + imBuilder.putAlias(new AliasMetadata.Builder(newAliasName)); + builder.put(imBuilder); + + imBuilder = new IndexMetadata.Builder(metadata.index("alias-2-1")); + imBuilder.putAlias(new AliasMetadata.Builder(newAliasName)); + builder.put(imBuilder); + + imBuilder = new IndexMetadata.Builder(metadata.index("alias-3-2")); + imBuilder.putAlias(new AliasMetadata.Builder(newAliasName)); + builder.put(imBuilder); + } + metadata = builder.build(); + assertThat(metadata.aliasedIndices(), hasSize(numAliases + 1)); + assertThat( + metadata.aliasedIndices(newAliasName), + containsInAnyOrder( + new Index("alias-1-0", ClusterState.UNKNOWN_UUID), + new Index("alias-2-1", ClusterState.UNKNOWN_UUID), + new Index("alias-3-2", ClusterState.UNKNOWN_UUID) + ) + ); + + // Remove the new alias that points to existing indices + builder = Metadata.builder(metadata); + { + IndexMetadata.Builder imBuilder = new IndexMetadata.Builder(metadata.index("alias-1-0")); + imBuilder.removeAlias(newAliasName); + builder.put(imBuilder); + + imBuilder = new IndexMetadata.Builder(metadata.index("alias-2-1")); + imBuilder.removeAlias(newAliasName); + builder.put(imBuilder); + + imBuilder = new IndexMetadata.Builder(metadata.index("alias-3-2")); + imBuilder.removeAlias(newAliasName); + builder.put(imBuilder); + } + metadata = builder.build(); + assertThat(metadata.aliasedIndices(), hasSize(numAliases)); + assertThat(metadata.aliasedIndices(newAliasName), empty()); + } + public static final String SYSTEM_ALIAS_NAME = "system_alias"; public void testHiddenAliasValidation() { diff --git a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java index a0117fb5692b6..ec65219a79b31 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterName; @@ -55,7 +56,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; @@ -69,10 +69,11 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; -import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; public class MasterServiceTests extends ESTestCase { @@ -259,42 +260,35 @@ public void onAckTimeout() { */ public void testClusterStateTaskListenerThrowingExceptionIsOkay() throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); - AtomicBoolean published = new AtomicBoolean(); try (MasterService masterService = createMasterService(true)) { - ClusterStateTaskListener update = new ClusterStateTaskListener() { - @Override - public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - throw new RuntimeException("testing exception handling"); - } - - @Override - public void onFailure(Exception e) {} - }; masterService.submitStateUpdateTask( "testClusterStateTaskListenerThrowingExceptionIsOkay", - update, + new ExpectSuccessTask(), ClusterStateTaskConfig.build(Priority.NORMAL), new ClusterStateTaskExecutor<>() { @Override - public ClusterTasksResult execute( - ClusterState currentState, - List tasks - ) { - ClusterState newClusterState = ClusterState.builder(currentState).build(); - return ClusterTasksResult.builder().successes(tasks).build(newClusterState); + public ClusterTasksResult execute(ClusterState currentState, List tasks) { + var builder = ClusterTasksResult.builder(); + for (final var task : tasks) { + builder.success( + task, + EXPECT_SUCCESS_LISTENER.delegateFailure( + (delegate, cs) -> { throw new RuntimeException("testing exception handling"); } + ) + ); + } + return builder.build(ClusterState.builder(currentState).build()); } @Override public void clusterStatePublished(ClusterStatePublicationEvent clusterStatePublicationEvent) { - published.set(true); latch.countDown(); } } ); - latch.await(); - assertTrue(published.get()); + assertTrue(latch.await(10, TimeUnit.SECONDS)); } } @@ -467,17 +461,10 @@ public void onFailure(Exception e) { public void testMultipleSubmissionBatching() throws Exception { - class Task implements ClusterStateTaskListener { - @Override - public void onFailure(Exception e) { - throw new AssertionError(e); - } - } - final int executorCount = between(1, 5); final var executionCountDown = new CountDownLatch(executorCount); - class Executor implements ClusterStateTaskExecutor { + class Executor implements ClusterStateTaskExecutor { final AtomicBoolean executed = new AtomicBoolean(); @@ -488,11 +475,16 @@ public void addExpectedTaskCount(int taskCount) { } @Override - public ClusterTasksResult execute(ClusterState currentState, List tasks) throws Exception { + public ClusterTasksResult execute(ClusterState currentState, List tasks) + throws Exception { assertTrue("Should execute all tasks at once", executed.compareAndSet(false, true)); assertThat("Should execute all tasks at once", tasks.size(), equalTo(expectedTaskCount)); executionCountDown.countDown(); - return ClusterTasksResult.builder().successes(tasks).build(currentState); + var builder = ClusterTasksResult.builder(); + for (final var task : tasks) { + builder.success(task, EXPECT_SUCCESS_LISTENER); + } + return builder.build(currentState); } } @@ -507,12 +499,16 @@ public ClusterTasksResult execute(ClusterState currentState, List ta masterService.submitStateUpdateTask( "block", - new Task(), + new ExpectSuccessTask(), ClusterStateTaskConfig.build(Priority.NORMAL), (currentState, tasks) -> { executionBarrier.await(10, TimeUnit.SECONDS); // notify test thread that the master service is blocked executionBarrier.await(10, TimeUnit.SECONDS); // wait for test thread to release us - return ClusterTasksResult.builder().successes(tasks).build(currentState); + var builder = ClusterTasksResult.builder(); + for (final var task : tasks) { + builder.success(task, EXPECT_SUCCESS_LISTENER); + } + return builder.build(currentState); } ); @@ -523,14 +519,14 @@ public ClusterTasksResult execute(ClusterState currentState, List ta final var submitThreads = new Thread[between(1, 10)]; for (int i = 0; i < submitThreads.length; i++) { final var executor = randomFrom(executors); - final var tasks = randomList(1, 10, Task::new); - executor.addExpectedTaskCount(tasks.size()); + final var task = new ExpectSuccessTask(); + executor.addExpectedTaskCount(1); submitThreads[i] = new Thread(() -> { try { assertTrue(submissionLatch.await(10, TimeUnit.SECONDS)); - masterService.submitStateUpdateTasks( + masterService.submitStateUpdateTask( Thread.currentThread().getName(), - tasks, + task, ClusterStateTaskConfig.build(randomFrom(Priority.values())), executor ); @@ -602,13 +598,12 @@ public void execute() { @Override public void onFailure(Exception e) { - throw new AssertionError(e); + throw new AssertionError("should not be called", e); } @Override public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - processedStates.incrementAndGet(); - processedStatesLatch.get().countDown(); + throw new AssertionError("should not be called"); } @Override @@ -645,21 +640,13 @@ class TaskExecutor implements ClusterStateTaskExecutor { private final AtomicInteger assigned = new AtomicInteger(); private final AtomicInteger batches = new AtomicInteger(); private final AtomicInteger published = new AtomicInteger(); - private final List> assignments = new ArrayList<>(); + private final List assignments = new ArrayList<>(); @Override public ClusterTasksResult execute(ClusterState currentState, List tasks) throws Exception { - int totalCount = 0; - for (Set group : assignments) { - long count = tasks.stream().filter(group::contains).count(); - assertThat( - "batched set should be executed together or not at all. Expected " + group + "s. Executing " + tasks, - count, - anyOf(equalTo(0L), equalTo((long) group.size())) - ); - totalCount += count; + for (Task task : tasks) { + assertThat("All tasks should belong to this executor", assignments, hasItem(task)); } - assertThat("All tasks should belong to this executor", totalCount, equalTo(tasks.size())); tasks.forEach(Task::execute); executed.addAndGet(tasks.size()); ClusterState maybeUpdatedClusterState = currentState; @@ -672,7 +659,23 @@ public ClusterTasksResult execute(ClusterState currentState, List ta equalTo(true) ); } - return ClusterTasksResult.builder().successes(tasks).build(maybeUpdatedClusterState); + var builder = ClusterTasksResult.builder(); + for (final var task : tasks) { + builder = builder.success(task, new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + processedStates.incrementAndGet(); + processedStatesLatch.get().countDown(); + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError("should not be called", e); + } + }); + } + + return builder.build(maybeUpdatedClusterState); } @Override @@ -688,16 +691,16 @@ public void clusterStatePublished(ClusterStatePublicationEvent clusterPublicatio } // randomly assign tasks to executors - List>> assignments = new ArrayList<>(); + List> assignments = new ArrayList<>(); AtomicInteger totalTasks = new AtomicInteger(); for (int i = 0; i < numberOfThreads; i++) { for (int j = 0; j < taskSubmissionsPerThread; j++) { var executor = randomFrom(executors); - var tasks = Set.copyOf(randomList(1, 3, () -> new Task(totalTasks.getAndIncrement()))); + var task = new Task(totalTasks.getAndIncrement()); - assignments.add(Tuple.tuple(executor, tasks)); - executor.assigned.addAndGet(tasks.size()); - executor.assignments.add(tasks); + assignments.add(Tuple.tuple(executor, task)); + executor.assigned.incrementAndGet(); + executor.assignments.add(task); } } processedStatesLatch.set(new CountDownLatch(totalTasks.get())); @@ -712,24 +715,15 @@ public void clusterStatePublished(ClusterStatePublicationEvent clusterPublicatio barrier.await(); for (int j = 0; j < taskSubmissionsPerThread; j++) { var assignment = assignments.get(index * taskSubmissionsPerThread + j); - var tasks = assignment.v2(); + var task = assignment.v2(); var executor = assignment.v1(); - submittedTasks.addAndGet(tasks.size()); - if (tasks.size() == 1) { - masterService.submitStateUpdateTask( - threadName, - tasks.iterator().next(), - ClusterStateTaskConfig.build(randomFrom(Priority.values())), - executor - ); - } else { - masterService.submitStateUpdateTasks( - threadName, - tasks, - ClusterStateTaskConfig.build(randomFrom(Priority.values())), - executor - ); - } + submittedTasks.incrementAndGet(); + masterService.submitStateUpdateTask( + threadName, + task, + ClusterStateTaskConfig.build(randomFrom(Priority.values())), + executor + ); } barrier.await(); } catch (BrokenBarrierException | InterruptedException e) { @@ -762,41 +756,248 @@ public void clusterStatePublished(ClusterStatePublicationEvent clusterPublicatio } } + public void testTaskFailureNotification() throws Exception { + + final String testContextHeaderName = "test-context-header"; + final ThreadContext threadContext = threadPool.getThreadContext(); + final int taskCount = between(1, 10); + final CountDownLatch taskCountDown = new CountDownLatch(taskCount); + + class Task implements ClusterStateTaskListener { + + private final String expectedHeaderValue; + + Task(String expectedHeaderValue) { + this.expectedHeaderValue = expectedHeaderValue; + } + + @Override + public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + throw new AssertionError("should not complete task"); + } + + @Override + public void onFailure(Exception e) { + assertThat(e, instanceOf(RuntimeException.class)); + assertThat(e.getMessage(), equalTo("simulated")); + assertThat(threadContext.getHeader(testContextHeaderName), equalTo(expectedHeaderValue)); + taskCountDown.countDown(); + } + } + + final ClusterStateTaskExecutor executor = (currentState, tasks) -> { + if (randomBoolean()) { + throw new RuntimeException("simulated"); + } else { + return ClusterTasksResult.builder().failures(tasks, new RuntimeException("simulated")).build(currentState); + } + }; + + final var executionBarrier = new CyclicBarrier(2); + final ClusterStateUpdateTask blockMasterTask = new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + executionBarrier.await(10, TimeUnit.SECONDS); // notify test thread that the master service is blocked + executionBarrier.await(10, TimeUnit.SECONDS); // wait for test thread to release us + return currentState; + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + }; + + try (var masterService = createMasterService(true)) { + + masterService.submitStateUpdateTask("block", blockMasterTask, ClusterStateTaskExecutor.unbatched()); + executionBarrier.await(10, TimeUnit.SECONDS); // wait for the master service to be blocked + + masterService.setClusterStatePublisher( + (clusterStatePublicationEvent, publishListener, ackListener) -> { + throw new AssertionError("should not publish any states"); + } + ); + + for (int i = 0; i < taskCount; i++) { + try (ThreadContext.StoredContext ignored = threadContext.newStoredContext(false)) { + final String testContextHeaderValue = randomAlphaOfLength(10); + threadContext.putHeader(testContextHeaderName, testContextHeaderValue); + final var task = new Task(testContextHeaderValue); + final var clusterStateTaskConfig = ClusterStateTaskConfig.build(Priority.NORMAL); + masterService.submitStateUpdateTask("test", task, clusterStateTaskConfig, executor); + } + } + + executionBarrier.await(10, TimeUnit.SECONDS); // release block on master service + assertTrue(taskCountDown.await(10, TimeUnit.SECONDS)); + } + } + + public void testTaskNotificationAfterPublication() throws Exception { + + class Task implements ClusterStateTaskListener { + + final ActionListener publishListener; + + Task(ActionListener publishListener) { + this.publishListener = publishListener; + } + + @Override + public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + throw new AssertionError("should not complete task"); + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + } + + final String testContextHeaderName = "test-context-header"; + final ThreadContext threadContext = threadPool.getThreadContext(); + + final ClusterStateTaskExecutor executor = (currentState, tasks) -> { + final ClusterTasksResult.Builder builder = ClusterTasksResult.builder(); + for (Task task : tasks) { + builder.success(task, task.publishListener); + } + return builder.build(ClusterState.builder(currentState).build()); + }; + + final var executionBarrier = new CyclicBarrier(2); + final ClusterStateUpdateTask blockMasterTask = new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + executionBarrier.await(10, TimeUnit.SECONDS); // notify test thread that the master service is blocked + executionBarrier.await(10, TimeUnit.SECONDS); // wait for test thread to release us + return currentState; + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + }; + + try (var masterService = createMasterService(true)) { + + // success case: submit some tasks, possibly in different contexts, and verify that the expected listener is completed + + masterService.submitStateUpdateTask("block", blockMasterTask, ClusterStateTaskExecutor.unbatched()); + executionBarrier.await(10, TimeUnit.SECONDS); // wait for the master service to be blocked + + final AtomicReference publishedState = new AtomicReference<>(); + masterService.setClusterStatePublisher((clusterStatePublicationEvent, publishListener, ackListener) -> { + assertTrue(publishedState.compareAndSet(null, clusterStatePublicationEvent.getNewState())); + ClusterServiceUtils.setAllElapsedMillis(clusterStatePublicationEvent); + publishListener.onResponse(null); + }); + + int toSubmit = between(1, 10); + final CountDownLatch publishSuccessCountdown = new CountDownLatch(toSubmit); + + for (int i = 0; i < toSubmit; i++) { + try (ThreadContext.StoredContext ignored = threadContext.newStoredContext(false)) { + final var testContextHeaderValue = randomAlphaOfLength(10); + threadContext.putHeader(testContextHeaderName, testContextHeaderValue); + final var task = new Task(new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + assertEquals(testContextHeaderValue, threadContext.getHeader(testContextHeaderName)); + assertSame(publishedState.get(), clusterState); + publishSuccessCountdown.countDown(); + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + }); + + final ClusterStateTaskConfig clusterStateTaskConfig = ClusterStateTaskConfig.build(Priority.NORMAL); + masterService.submitStateUpdateTask("test", task, clusterStateTaskConfig, executor); + } + } + + executionBarrier.await(10, TimeUnit.SECONDS); // release block on master service + assertTrue(publishSuccessCountdown.await(10, TimeUnit.SECONDS)); + + // failure case: submit some tasks, possibly in different contexts, and verify that the expected listener is completed + + masterService.submitStateUpdateTask("block", blockMasterTask, ClusterStateTaskExecutor.unbatched()); + executionBarrier.await(10, TimeUnit.SECONDS); // wait for the master service to be blocked + + final String exceptionMessage = "simulated"; + masterService.setClusterStatePublisher((clusterStatePublicationEvent, publishListener, ackListener) -> { + ClusterServiceUtils.setAllElapsedMillis(clusterStatePublicationEvent); + publishListener.onFailure(new FailedToCommitClusterStateException(exceptionMessage)); + }); + + toSubmit = between(1, 10); + final CountDownLatch publishFailureCountdown = new CountDownLatch(toSubmit); + + for (int i = 0; i < toSubmit; i++) { + try (ThreadContext.StoredContext ignored = threadContext.newStoredContext(false)) { + final String testContextHeaderValue = randomAlphaOfLength(10); + threadContext.putHeader(testContextHeaderName, testContextHeaderValue); + final var task = new Task(new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + throw new AssertionError("should not succeed"); + } + + @Override + public void onFailure(Exception e) { + assertEquals(testContextHeaderValue, threadContext.getHeader(testContextHeaderName)); + assertThat(e, instanceOf(FailedToCommitClusterStateException.class)); + assertThat(e.getMessage(), equalTo(exceptionMessage)); + publishFailureCountdown.countDown(); + } + }); + + final ClusterStateTaskConfig clusterStateTaskConfig = ClusterStateTaskConfig.build(Priority.NORMAL); + masterService.submitStateUpdateTask("test", task, clusterStateTaskConfig, executor); + } + } + + executionBarrier.await(10, TimeUnit.SECONDS); // release block on master service + assertTrue(publishFailureCountdown.await(10, TimeUnit.SECONDS)); + } + } + public void testBlockingCallInClusterStateTaskListenerFails() throws InterruptedException { assumeTrue("assertions must be enabled for this test to work", BaseFuture.class.desiredAssertionStatus()); final CountDownLatch latch = new CountDownLatch(1); final AtomicReference assertionRef = new AtomicReference<>(); try (MasterService masterService = createMasterService(true)) { - ClusterStateTaskListener update = new ClusterStateTaskListener() { - @Override - public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - BaseFuture future = new BaseFuture() { - }; - try { - if (randomBoolean()) { - future.get(1L, TimeUnit.SECONDS); - } else { - future.get(); - } - } catch (Exception e) { - throw new RuntimeException(e); - } catch (AssertionError e) { - assertionRef.set(e); - latch.countDown(); - } - } - - @Override - public void onFailure(Exception e) {} - }; masterService.submitStateUpdateTask( "testBlockingCallInClusterStateTaskListenerFails", - update, + new ExpectSuccessTask(), ClusterStateTaskConfig.build(Priority.NORMAL), (currentState, tasks) -> { - ClusterState newClusterState = ClusterState.builder(currentState).build(); - return ClusterTasksResult.builder().successes(tasks).build(newClusterState); + var builder = ClusterTasksResult.builder(); + for (final var task : tasks) { + builder = builder.success(task, EXPECT_SUCCESS_LISTENER.delegateFailure((delegate, cs) -> { + BaseFuture future = new BaseFuture() { + }; + try { + if (randomBoolean()) { + future.get(1L, TimeUnit.SECONDS); + } else { + future.get(); + } + } catch (Exception e) { + throw new RuntimeException(e); + } catch (AssertionError e) { + assertionRef.set(e); + latch.countDown(); + } + })); + } + return builder.build(ClusterState.builder(currentState).build()); } ); @@ -1298,4 +1499,32 @@ public TimeValue masterNodeTimeout() { }; } + /** + * Listener that asserts it does not fail. + */ + private static final ActionListener EXPECT_SUCCESS_LISTENER = new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) {} + + @Override + public void onFailure(Exception e) { + throw new AssertionError("should not be called", e); + } + }; + + /** + * Task that asserts it does not fail. + */ + private static class ExpectSuccessTask implements ClusterStateTaskListener { + @Override + public void onFailure(Exception e) { + throw new AssertionError("should not be called", e); + } + + @Override + public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + // see parent method javadoc, we use dedicated listeners rather than calling this method + throw new AssertionError("should not be called"); + } + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/service/TaskBatcherTests.java b/server/src/test/java/org/elasticsearch/cluster/service/TaskBatcherTests.java index 54894fcd65980..7eeae04ceba9e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/TaskBatcherTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/TaskBatcherTests.java @@ -9,32 +9,25 @@ package org.elasticsearch.cluster.service; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.cluster.ClusterStateTaskConfig; import org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException; import org.elasticsearch.common.Priority; import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.Tuple; import org.junit.Before; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; -import java.util.Map; -import java.util.Set; import java.util.concurrent.BrokenBarrierException; -import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasToString; @@ -47,7 +40,7 @@ public void setUpBatchingTaskExecutor() throws Exception { taskBatcher = new TestTaskBatcher(logger, threadExecutor); } - class TestTaskBatcher extends TaskBatcher { + static class TestTaskBatcher extends TaskBatcher { TestTaskBatcher(Logger logger, PrioritizedEsThreadPoolExecutor threadExecutor) { super(logger, threadExecutor); @@ -58,20 +51,13 @@ class TestTaskBatcher extends TaskBatcher { protected void run(Object batchingKey, List tasks, String tasksSummary) { List updateTasks = (List) tasks; ((TestExecutor) batchingKey).execute(updateTasks.stream().map(t -> t.task).collect(Collectors.toList())); - updateTasks.forEach(updateTask -> updateTask.listener.processed(updateTask.source)); + updateTasks.forEach(updateTask -> updateTask.listener.processed()); } @Override - protected void onTimeout(List tasks, TimeValue timeout) { + protected void onTimeout(BatchedTask task, TimeValue timeout) { threadPool.generic() - .execute( - () -> tasks.forEach( - task -> ((UpdateTask) task).listener.onFailure( - task.source, - new ProcessClusterEventTimeoutException(timeout, task.source) - ) - ) - ); + .execute(() -> ((UpdateTask) task).listener.onFailure(new ProcessClusterEventTimeoutException(timeout, task.source))); } class UpdateTask extends BatchedTask { @@ -99,20 +85,7 @@ protected void submitTask(String source, TestTask testTask) { } private void submitTask(String source, T task, ClusterStateTaskConfig config, TestExecutor executor, TestListener listener) { - submitTasks(source, Collections.singletonMap(task, listener), config, executor); - } - - private void submitTasks( - final String source, - final Map tasks, - final ClusterStateTaskConfig config, - final TestExecutor executor - ) { - List safeTasks = tasks.entrySet() - .stream() - .map(e -> taskBatcher.new UpdateTask(config.priority(), source, e.getKey(), e.getValue(), executor)) - .collect(Collectors.toList()); - taskBatcher.submitTasks(safeTasks, config.timeout()); + taskBatcher.submitTask(taskBatcher.new UpdateTask(config.priority(), source, task, listener, executor), config.timeout()); } @Override @@ -144,7 +117,7 @@ public void execute(List tasks) { TaskExecutor executorB = new TaskExecutor(); final ClusterStateTaskConfig config = ClusterStateTaskConfig.build(Priority.NORMAL); - final TestListener noopListener = (source, e) -> { throw new AssertionError(e); }; + final TestListener noopListener = e -> { throw new AssertionError(e); }; // this blocks the cluster state queue, so we can set it up right submitTask("0", "A0", config, executorA, noopListener); // wait to be processed @@ -196,19 +169,16 @@ public void testTasksAreExecutedInOrder() throws BrokenBarrierException, Interru int tasksSubmittedPerThread = randomIntBetween(2, 1024); - CopyOnWriteArrayList> failures = new CopyOnWriteArrayList<>(); CountDownLatch updateLatch = new CountDownLatch(numberOfThreads * tasksSubmittedPerThread); final TestListener listener = new TestListener() { @Override - public void onFailure(String source, Exception e) { - logger.error(() -> new ParameterizedMessage("unexpected failure: [{}]", source), e); - failures.add(new Tuple<>(source, e)); - updateLatch.countDown(); + public void onFailure(Exception e) { + throw new AssertionError(e); } @Override - public void processed(String source) { + public void processed() { updateLatch.countDown(); } }; @@ -242,9 +212,7 @@ public void processed(String source) { // wait for all threads to finish barrier.await(); - updateLatch.await(); - - assertThat(failures, empty()); + assertTrue(updateLatch.await(10, TimeUnit.SECONDS)); for (int i = 0; i < numberOfThreads; i++) { assertEquals(tasksSubmittedPerThread, executors[i].tasks.size()); @@ -255,34 +223,24 @@ public void processed(String source) { } } - public void testSingleBatchSubmission() throws InterruptedException { - Map tasks = new HashMap<>(); - final int numOfTasks = randomInt(10); - final CountDownLatch latch = new CountDownLatch(numOfTasks); - Set usedKeys = new HashSet<>(numOfTasks); - for (int i = 0; i < numOfTasks; i++) { - int key = randomValueOtherThanMany(k -> usedKeys.contains(k), () -> randomInt(1024)); - tasks.put(key, new TestListener() { - @Override - public void processed(String source) { - latch.countDown(); - } - - @Override - public void onFailure(String source, Exception e) { - throw new AssertionError(e); - } - }); - usedKeys.add(key); - } - assert usedKeys.size() == numOfTasks; - + public void testSingleTaskSubmission() throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + final Integer task = randomInt(1024); TestExecutor executor = taskList -> { - assertThat(taskList.size(), equalTo(tasks.size())); - assertThat(taskList.stream().collect(Collectors.toSet()), equalTo(tasks.keySet())); + assertThat(taskList.size(), equalTo(1)); + assertThat(taskList.get(0), equalTo(task)); }; - submitTasks("test", tasks, ClusterStateTaskConfig.build(Priority.LANGUID), executor); + submitTask("test", task, ClusterStateTaskConfig.build(randomFrom(Priority.values())), executor, new TestListener() { + @Override + public void processed() { + latch.countDown(); + } + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + }); latch.await(); } @@ -295,12 +253,12 @@ public void testDuplicateSubmission() throws InterruptedException { SimpleTask task = new SimpleTask(1); TestListener listener = new TestListener() { @Override - public void processed(String source) { + public void processed() { latch.countDown(); } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { throw new AssertionError(e); } }; diff --git a/server/src/test/java/org/elasticsearch/cluster/service/TaskExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/service/TaskExecutorTests.java index 9fd8de3679b9c..8872bab724020 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/TaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/TaskExecutorTests.java @@ -70,9 +70,9 @@ public void shutDownThreadExecutor() { } protected interface TestListener { - void onFailure(String source, Exception e); + void onFailure(Exception e); - default void processed(String source) { + default void processed() { // do nothing by default } } @@ -129,7 +129,7 @@ class UpdateTask extends SourcePrioritizedRunnable { public void run() { logger.trace("will process {}", source); testTask.execute(Collections.singletonList(testTask)); - testTask.processed(source); + testTask.processed(); } } @@ -140,7 +140,7 @@ protected void submitTask(String source, TestTask testTask) { if (timeout != null) { threadExecutor.execute(task, timeout, () -> threadPool.generic().execute(() -> { logger.debug("task [{}] timed out after [{}]", task, timeout); - testTask.onFailure(source, new ProcessClusterEventTimeoutException(timeout, source)); + testTask.onFailure(new ProcessClusterEventTimeoutException(timeout, source)); })); } else { threadExecutor.execute(task); @@ -163,7 +163,7 @@ public void run() { } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { throw new RuntimeException(e); } }; @@ -178,7 +178,7 @@ public void run() { } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { block2.countDown(); } @@ -207,7 +207,7 @@ public void run() { } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { throw new RuntimeException(e); } }; @@ -228,7 +228,7 @@ public void run() { } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { timedOut.countDown(); } }; @@ -245,7 +245,7 @@ public void run() { } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { throw new RuntimeException(e); } }; @@ -312,7 +312,7 @@ public void run() { } @Override - public void onFailure(String source, Exception e) {} + public void onFailure(Exception e) {} @Override public Priority priority() { @@ -349,7 +349,7 @@ public Priority priority() { } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { latch.countDown(); } } diff --git a/server/src/test/java/org/elasticsearch/common/bytes/ZeroBytesReference.java b/server/src/test/java/org/elasticsearch/common/bytes/ZeroBytesReference.java index 2876220483e2c..5c06e5ed5ebee 100644 --- a/server/src/test/java/org/elasticsearch/common/bytes/ZeroBytesReference.java +++ b/server/src/test/java/org/elasticsearch/common/bytes/ZeroBytesReference.java @@ -20,12 +20,14 @@ public class ZeroBytesReference extends AbstractBytesReference { private final int length; public ZeroBytesReference(int length) { + assert 0 <= length : length; this.length = length; } @Override public int indexOf(byte marker, int from) { - if (marker == 0) { + assert 0 <= from && from <= length : from + " vs " + length; + if (marker == 0 && from < length) { return from; } else { return -1; @@ -34,6 +36,7 @@ public int indexOf(byte marker, int from) { @Override public byte get(int index) { + assert 0 <= index && index < length : index + " vs " + length; return 0; } @@ -44,6 +47,7 @@ public int length() { @Override public BytesReference slice(int from, int length) { + assert from + length <= this.length : from + " and " + length + " vs " + this.length; return new ZeroBytesReference(length); } diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeometryIndexerTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeometryIndexerTests.java index aa67871541e83..c9765880d24cb 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeometryIndexerTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeometryIndexerTests.java @@ -198,6 +198,11 @@ public void testPolygonOrientation() throws IOException, ParseException { expected("POLYGON ((180 29, 180 38, 180 56, 180 53, 178 47, 177 23, 180 29))"), actual("POLYGON ((180 38, 180.0 56, 180.0 53, 178 47, 177 23, 180 29, 180 36, 180 37, 180 38))", randomBoolean()) ); + + assertEquals( + expected("POLYGON ((-135 85, 135 85, 45 85, -45 85, -135 85))"), + actual("POLYGON ((-45 85, -135 85, 135 85, 45 85, -45 85))", randomBoolean()) + ); } public void testInvalidSelfCrossingPolygon() { @@ -218,6 +223,15 @@ public void testCrossingDateline() { assertTrue(geometry instanceof MultiPolygon); } + public void testPolygonAllCollinearPoints() { + Polygon polygon = new Polygon(new LinearRing(new double[] { 0, 1, -1, 0 }, new double[] { 0, 1, -1, 0 })); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> indexer.indexShape(polygon)); + assertEquals( + "Unable to Tessellate shape [[1.0, 1.0] [-1.0, -1.0] [0.0, 0.0] [1.0, 1.0] ]. Possible malformed shape detected.", + e.getMessage() + ); + } + private XContentBuilder polygon(Boolean orientation, double... val) throws IOException { XContentBuilder pointGeoJson = XContentFactory.jsonBuilder().startObject(); { diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeometryNormalizerTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeometryNormalizerTests.java index e8080e92de6aa..2a9d8f30dee0f 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeometryNormalizerTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeometryNormalizerTests.java @@ -375,6 +375,21 @@ public void testPolygon() { polygon = new Polygon(new LinearRing(new double[] { 170, 190, 190, 170, 170 }, new double[] { -10, -10, 10, 10, -10 })); assertEquals(indexed, GeometryNormalizer.apply(Orientation.CCW, polygon)); assertEquals(true, GeometryNormalizer.needsNormalize(Orientation.CCW, polygon)); + + polygon = new Polygon( + new LinearRing( + new double[] { -107.88180702965093, -107.88179936541891, -107.88180701456989, -107.88180702965093 }, + new double[] { 37.289285907909985, 37.289278246132682, 37.289285918063491, 37.289285907909985 } + ) + ); + indexed = new Polygon( + new LinearRing( + new double[] { -107.88179936541891, -107.88180701456989, -107.88180702965093, -107.88179936541891 }, + new double[] { 37.289278246132682, 37.289285918063491, 37.289285907909985, 37.289278246132682 } + ) + ); + assertEquals(indexed, GeometryNormalizer.apply(Orientation.CCW, polygon)); + } public void testMultiPolygon() { diff --git a/server/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java b/server/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java index 18af051fa70f1..8ab7d03fec62f 100644 --- a/server/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java @@ -27,6 +27,7 @@ import static java.util.Collections.emptyMap; import static org.elasticsearch.common.util.CollectionUtils.eagerPartition; +import static org.elasticsearch.common.util.CollectionUtils.limitSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -177,4 +178,14 @@ public void testEnsureNoSelfReferences() { } } + + public void testLimitSizeOfShortList() { + var shortList = randomList(0, 10, () -> "item"); + assertThat(limitSize(shortList, 10), equalTo(shortList)); + } + + public void testLimitSizeOfLongList() { + var longList = randomList(10, 100, () -> "item"); + assertThat(limitSize(longList, 10), equalTo(longList.subList(0, 10))); + } } diff --git a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java index aa7ae1804cc2a..1c256f00e2dc1 100644 --- a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java @@ -480,9 +480,11 @@ public void testDataOnlyNodePersistence() throws Exception { // generate a series of updates and check if batching works final String indexName = randomAlphaOfLength(10); long currentTerm = state.term(); + boolean wroteState = false; final int iterations = randomIntBetween(1, 1000); for (int i = 0; i < iterations; i++) { - if (rarely()) { + final boolean mustWriteState = wroteState == false && i == iterations - 1; + if (rarely() && mustWriteState == false) { // bump term currentTerm = currentTerm + (rarely() ? randomIntBetween(1, 5) : 0L); persistedState.setCurrentTerm(currentTerm); @@ -496,8 +498,10 @@ public void testDataOnlyNodePersistence() throws Exception { Metadata.builder().coordinationMetadata(createCoordinationMetadata(term)).put(indexMetadata, false).build() ); persistedState.setLastAcceptedState(state); + wroteState = true; } } + assertTrue(wroteState); // must write it at least once assertEquals(currentTerm, persistedState.getCurrentTerm()); assertClusterStateEqual(state, persistedState.getLastAcceptedState()); assertBusy(() -> assertTrue(gateway.allPendingAsyncStatesWritten())); diff --git a/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java b/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java index d386e080606a7..758da73def503 100644 --- a/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java @@ -684,7 +684,6 @@ TestAllocator addData( data.put( node, new TransportNodesListShardStoreMetadata.StoreFilesMetadata( - shardId, new Store.MetadataSnapshot(unmodifiableMap(filesAsMap), unmodifiableMap(commitData), randomInt()), peerRecoveryRetentionLeases ) diff --git a/server/src/test/java/org/elasticsearch/health/HealthComponentResultTests.java b/server/src/test/java/org/elasticsearch/health/HealthComponentResultTests.java deleted file mode 100644 index 3f6f271530ada..0000000000000 --- a/server/src/test/java/org/elasticsearch/health/HealthComponentResultTests.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.health; - -import org.elasticsearch.test.ESTestCase; - -public class HealthComponentResultTests extends ESTestCase { - - public void testGroupIndicators() { - - // var indicator1 = new HealthIndicatorResult("indicator1", "component1", GREEN, null, null); - // var indicator2 = new HealthIndicatorResult("indicator2", "component1", YELLOW, null, null); - // var indicator3 = new HealthIndicatorResult("indicator3", "component2", GREEN, null, null); - // - // var components = HealthComponentResult.createComponentsFromIndicators(List.of(indicator1, indicator2, indicator3)); - // - // assertThat( - // components, - // anyOf( - // hasItems( - // new HealthComponentResult("component1", YELLOW, List.of(indicator2, indicator1)), - // new HealthComponentResult("component2", GREEN, List.of(indicator3)) - // ), - // hasItems( - // new HealthComponentResult("component1", YELLOW, List.of(indicator1, indicator2)), - // new HealthComponentResult("component2", GREEN, List.of(indicator3)) - // ) - // ) - // ); - } -} diff --git a/server/src/test/java/org/elasticsearch/health/HealthServiceTests.java b/server/src/test/java/org/elasticsearch/health/HealthServiceTests.java new file mode 100644 index 0000000000000..3edb276d0fe77 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/health/HealthServiceTests.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.health; + +import org.elasticsearch.test.ESTestCase; + +import java.util.List; + +import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.YELLOW; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.hasItems; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class HealthServiceTests extends ESTestCase { + + public void testShouldReturnGroupedIndicators() { + + var indicator1 = new HealthIndicatorResult("indicator1", "component1", GREEN, null, null); + var indicator2 = new HealthIndicatorResult("indicator2", "component1", YELLOW, null, null); + var indicator3 = new HealthIndicatorResult("indicator3", "component2", GREEN, null, null); + + var service = new HealthService( + List.of( + createMockHealthIndicatorService(indicator1), + createMockHealthIndicatorService(indicator2), + createMockHealthIndicatorService(indicator3) + ) + ); + + assertThat( + service.getHealth(), + anyOf( + hasItems( + new HealthComponentResult("component1", YELLOW, List.of(indicator2, indicator1)), + new HealthComponentResult("component2", GREEN, List.of(indicator3)) + ), + hasItems( + new HealthComponentResult("component1", YELLOW, List.of(indicator1, indicator2)), + new HealthComponentResult("component2", GREEN, List.of(indicator3)) + ) + ) + ); + } + + private static HealthIndicatorService createMockHealthIndicatorService(HealthIndicatorResult result) { + var healthIndicatorService = mock(HealthIndicatorService.class); + when(healthIndicatorService.calculate()).thenReturn(result); + return healthIndicatorService; + } +} diff --git a/server/src/test/java/org/elasticsearch/index/TimeSeriesModeTests.java b/server/src/test/java/org/elasticsearch/index/TimeSeriesModeTests.java index 8184ce2dc97d8..2b6f29d2e79f1 100644 --- a/server/src/test/java/org/elasticsearch/index/TimeSeriesModeTests.java +++ b/server/src/test/java/org/elasticsearch/index/TimeSeriesModeTests.java @@ -144,8 +144,22 @@ public void testValidateAliasWithSearchRouting() { assertThat(e.getMessage(), equalTo("routing is forbidden on CRUD operations that target indices in [index.mode=time_series]")); } - public void testRoutingPathMatchesObject() { - Settings s = getSettings(randomBoolean() ? "dim.o" : "dim.*"); + public void testRoutingPathMatchesObject() throws IOException { + Settings s = getSettings("dim.o*"); + createMapperService(s, mapping(b -> { + b.startObject("dim").startObject("properties"); + { + b.startObject("o").startObject("properties"); + b.startObject("inner_dim").field("type", "keyword").field("time_series_dimension", true).endObject(); + b.endObject().endObject(); + } + b.startObject("dim").field("type", "keyword").field("time_series_dimension", true).endObject(); + b.endObject().endObject(); + })); + } + + public void testRoutingPathEqualsObjectNameError() { + Settings s = getSettings("dim.o"); Exception e = expectThrows(IllegalArgumentException.class, () -> createMapperService(s, mapping(b -> { b.startObject("dim").startObject("properties"); { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java index 6ff78b0900238..ad47dee581a1a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.lucene.search.function.ScriptScoreQuery; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.fielddata.BooleanScriptFieldData; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.query.SearchExecutionContext; @@ -319,7 +320,13 @@ public void testDualingQueries() throws IOException { String source = "{\"foo\": " + values + "}"; XContentParser parser = createParser(JsonXContent.jsonXContent, source); SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); - DocumentParserContext ctx = new TestDocumentParserContext(MappingLookup.EMPTY, null, null, null, sourceToParse) { + DocumentParserContext ctx = new TestDocumentParserContext( + MappingLookup.EMPTY, + MapperTestCase.createIndexSettings(Version.CURRENT, Settings.EMPTY), + null, + null, + sourceToParse + ) { @Override public XContentParser parser() { return parser; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index 46426c45aeab7..6e4d30a3cb8a8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.IndexAnalyzers; @@ -38,6 +39,9 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentLocation; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.json.JsonXContent; import org.hamcrest.FeatureMatcher; import org.hamcrest.Matcher; @@ -45,7 +49,11 @@ import org.hamcrest.core.CombinableMatcher; import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.Function; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -135,7 +143,6 @@ public void testPostingsFormat() throws IOException { } public void testDefaultConfiguration() throws IOException { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("field"); @@ -158,7 +165,6 @@ public void testDefaultConfiguration() throws IOException { } public void testCompletionAnalyzerSettings() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { b.field("type", "completion"); b.field("analyzer", "simple"); @@ -192,7 +198,6 @@ public void testCompletionAnalyzerSettings() throws Exception { @SuppressWarnings("unchecked") public void testTypeParsing() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { b.field("type", "completion"); b.field("analyzer", "simple"); @@ -218,7 +223,6 @@ public void testTypeParsing() throws Exception { } public void testParsingMinimal() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("field"); @@ -228,7 +232,6 @@ public void testParsingMinimal() throws Exception { } public void testParsingFailure() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); MapperParsingException e = expectThrows( @@ -239,7 +242,6 @@ public void testParsingFailure() throws Exception { } public void testKeywordWithSubCompletionAndContext() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { b.field("type", "keyword"); b.startObject("fields"); @@ -284,7 +286,6 @@ public void testKeywordWithSubCompletionAndContext() throws Exception { } public void testCompletionWithContextAndSubCompletion() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { b.field("type", "completion"); b.startArray("contexts"); @@ -366,8 +367,7 @@ public void testCompletionWithContextAndSubCompletion() throws Exception { } } - public void testKeywordWithSubCompletionAndStringInsert() throws Exception { - + public void testGeoHashWithSubCompletionAndStringInsert() throws Exception { DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { b.field("type", "geo_point"); b.startObject("fields"); @@ -386,13 +386,19 @@ public void testKeywordWithSubCompletionAndStringInsert() throws Exception { // unable to assert about geofield content, covered in a REST test } - public void testCompletionTypeWithSubCompletionFieldAndStringInsert() throws Exception { + public void testCompletionTypeWithSubfieldsAndStringInsert() throws Exception { + List> builders = new ArrayList<>(); + builders.add(b -> b.startObject("analyzed1").field("type", "keyword").endObject()); + builders.add(b -> b.startObject("analyzed2").field("type", "keyword").endObject()); + builders.add(b -> b.startObject("subsuggest1").field("type", "completion").endObject()); + builders.add(b -> b.startObject("subsuggest2").field("type", "completion").endObject()); + Collections.shuffle(builders, random()); DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { b.field("type", "completion"); b.startObject("fields"); - { - b.startObject("subsuggest").field("type", "completion").endObject(); + for (CheckedConsumer builder : builders) { + builder.accept(b); } b.endObject(); })); @@ -401,42 +407,80 @@ public void testCompletionTypeWithSubCompletionFieldAndStringInsert() throws Exc LuceneDocument indexableFields = parsedDocument.rootDoc(); assertThat(indexableFields.getFields("field"), arrayContainingInAnyOrder(suggestField("suggestion"))); - assertThat(indexableFields.getFields("field.subsuggest"), arrayContainingInAnyOrder(suggestField("suggestion"))); + assertThat(indexableFields.getFields("field.subsuggest1"), arrayContainingInAnyOrder(suggestField("suggestion"))); + assertThat(indexableFields.getFields("field.subsuggest2"), arrayContainingInAnyOrder(suggestField("suggestion"))); + assertThat( + indexableFields.getFields("field.analyzed1"), + arrayContainingInAnyOrder(keywordField("suggestion"), sortedSetDocValuesField("suggestion")) + ); + assertThat( + indexableFields.getFields("field.analyzed2"), + arrayContainingInAnyOrder(keywordField("suggestion"), sortedSetDocValuesField("suggestion")) + ); } - public void testCompletionTypeWithSubCompletionFieldAndObjectInsert() throws Exception { + public void testCompletionTypeWithSubfieldsAndArrayInsert() throws Exception { + List> builders = new ArrayList<>(); + builders.add(b -> b.startObject("analyzed1").field("type", "keyword").endObject()); + builders.add(b -> b.startObject("analyzed2").field("type", "keyword").endObject()); + builders.add(b -> b.startObject("subcompletion1").field("type", "completion").endObject()); + builders.add(b -> b.startObject("subcompletion2").field("type", "completion").endObject()); + Collections.shuffle(builders, random()); DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { b.field("type", "completion"); b.startObject("fields"); - { - b.startObject("analyzed").field("type", "completion").endObject(); + for (CheckedConsumer builder : builders) { + builder.accept(b); } b.endObject(); })); - ParsedDocument parsedDocument = defaultMapper.parse(source(b -> { - b.startObject("field"); - { - b.array("input", "New York", "NY"); - b.field("weight", 34); - } - b.endObject(); - })); + ParsedDocument parsedDocument = defaultMapper.parse(source(b -> b.array("field", "New York", "NY"))); LuceneDocument indexableFields = parsedDocument.rootDoc(); assertThat(indexableFields.getFields("field"), arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY"))); - assertThat(indexableFields.getFields("field.analyzed"), arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY"))); - // unable to assert about weight, covered in a REST test + assertThat( + indexableFields.getFields("field.subcompletion1"), + arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY")) + ); + assertThat( + indexableFields.getFields("field.subcompletion2"), + arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY")) + ); + assertThat( + indexableFields.getFields("field.analyzed1"), + arrayContainingInAnyOrder( + keywordField("New York"), + sortedSetDocValuesField("New York"), + keywordField("NY"), + sortedSetDocValuesField("NY") + ) + ); + assertThat( + indexableFields.getFields("field.analyzed2"), + arrayContainingInAnyOrder( + keywordField("New York"), + sortedSetDocValuesField("New York"), + keywordField("NY"), + sortedSetDocValuesField("NY") + ) + ); } - public void testCompletionTypeWithSubKeywordFieldAndObjectInsert() throws Exception { + public void testCompletionTypeWithSubfieldsAndObjectInsert() throws Exception { + List> builders = new ArrayList<>(); + builders.add(b -> b.startObject("analyzed1").field("type", "keyword").endObject()); + builders.add(b -> b.startObject("analyzed2").field("type", "keyword").endObject()); + builders.add(b -> b.startObject("subcompletion1").field("type", "completion").endObject()); + builders.add(b -> b.startObject("subcompletion2").field("type", "completion").endObject()); + Collections.shuffle(builders, random()); DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { b.field("type", "completion"); b.startObject("fields"); - { - b.startObject("analyzed").field("type", "keyword").endObject(); + for (CheckedConsumer builder : builders) { + builder.accept(b); } b.endObject(); })); @@ -453,7 +497,15 @@ public void testCompletionTypeWithSubKeywordFieldAndObjectInsert() throws Except LuceneDocument indexableFields = parsedDocument.rootDoc(); assertThat(indexableFields.getFields("field"), arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY"))); assertThat( - indexableFields.getFields("field.analyzed"), + indexableFields.getFields("field.subcompletion1"), + arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY")) + ); + assertThat( + indexableFields.getFields("field.subcompletion2"), + arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY")) + ); + assertThat( + indexableFields.getFields("field.analyzed1"), arrayContainingInAnyOrder( keywordField("New York"), sortedSetDocValuesField("New York"), @@ -461,32 +513,19 @@ public void testCompletionTypeWithSubKeywordFieldAndObjectInsert() throws Except sortedSetDocValuesField("NY") ) ); - // unable to assert about weight, covered in a REST test - } - - public void testCompletionTypeWithSubKeywordFieldAndStringInsert() throws Exception { - - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { - b.field("type", "completion"); - b.startObject("fields"); - { - b.startObject("analyzed").field("type", "keyword").endObject(); - } - b.endObject(); - })); - - ParsedDocument parsedDocument = defaultMapper.parse(source(b -> b.field("field", "suggestion"))); - - LuceneDocument indexableFields = parsedDocument.rootDoc(); - assertThat(indexableFields.getFields("field"), arrayContainingInAnyOrder(suggestField("suggestion"))); assertThat( - indexableFields.getFields("field.analyzed"), - arrayContainingInAnyOrder(keywordField("suggestion"), sortedSetDocValuesField("suggestion")) + indexableFields.getFields("field.analyzed2"), + arrayContainingInAnyOrder( + keywordField("New York"), + sortedSetDocValuesField("New York"), + keywordField("NY"), + sortedSetDocValuesField("NY") + ) ); + // unable to assert about weight, covered in a REST test } public void testParsingMultiValued() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("field"); @@ -497,7 +536,6 @@ public void testParsingMultiValued() throws Exception { } public void testParsingWithWeight() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("field"); @@ -515,7 +553,6 @@ public void testParsingWithWeight() throws Exception { } public void testParsingMultiValueWithWeight() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("field"); @@ -536,7 +573,6 @@ public void testParsingMultiValueWithWeight() throws Exception { } public void testParsingWithGeoFieldAlias() throws Exception { - MapperService mapperService = createMapperService(mapping(b -> { b.startObject("completion"); { @@ -574,7 +610,6 @@ public void testParsingWithGeoFieldAlias() throws Exception { } public void testParsingFull() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("field"); @@ -596,7 +631,6 @@ public void testParsingFull() throws Exception { } public void testParsingMixed() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("field"); @@ -640,7 +674,6 @@ public void testParsingMixed() throws Exception { } public void testNonContextEnabledParsingWithContexts() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); MapperParsingException e = expectThrows(MapperParsingException.class, () -> defaultMapper.parse(source(b -> { b.startObject("field"); @@ -656,7 +689,6 @@ public void testNonContextEnabledParsingWithContexts() throws Exception { } public void testFieldValueValidation() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); CharsRefBuilder charsRefBuilder = new CharsRefBuilder(); charsRefBuilder.append("sugg"); @@ -790,6 +822,96 @@ public void testLimitOfContextMappings() throws Throwable { ); } + private static CompletionFieldMapper.CompletionInputMetadata randomCompletionMetadata() { + Map> contexts = randomBoolean() + ? Collections.emptyMap() + : Collections.singletonMap("filter", Collections.singleton("value")); + return new CompletionFieldMapper.CompletionInputMetadata("text", contexts, 10); + } + + private static XContentParser documentParser(CompletionFieldMapper.CompletionInputMetadata metadata) throws IOException { + XContentBuilder docBuilder = JsonXContent.contentBuilder(); + if (randomBoolean()) { + docBuilder.prettyPrint(); + } + docBuilder.startObject(); + docBuilder.field("field"); + docBuilder.map(metadata.toMap()); + docBuilder.endObject(); + String document = Strings.toString(docBuilder); + XContentParser docParser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, document); + docParser.nextToken(); + docParser.nextToken(); + assertEquals(XContentParser.Token.START_OBJECT, docParser.nextToken()); + return docParser; + } + + public void testMultiFieldParserSimpleValue() throws IOException { + CompletionFieldMapper.CompletionInputMetadata metadata = randomCompletionMetadata(); + XContentParser documentParser = documentParser(metadata); + XContentParser multiFieldParser = new CompletionFieldMapper.MultiFieldParser( + metadata, + documentParser.currentName(), + documentParser.getTokenLocation() + ); + // we don't check currentToken here because it returns START_OBJECT that is inconsistent with returning a value + assertEquals("text", multiFieldParser.textOrNull()); + assertEquals(documentParser.getTokenLocation(), multiFieldParser.getTokenLocation()); + assertEquals(documentParser.currentName(), multiFieldParser.currentName()); + } + + public void testMultiFieldParserCompletionSubfield() throws IOException { + CompletionFieldMapper.CompletionInputMetadata metadata = randomCompletionMetadata(); + XContentParser documentParser = documentParser(metadata); + // compare the object structure with the original metadata, this implicitly verifies that the xcontent read is valid + XContentBuilder multiFieldBuilder = JsonXContent.contentBuilder() + .copyCurrentStructure( + new CompletionFieldMapper.MultiFieldParser(metadata, documentParser.currentName(), documentParser.getTokenLocation()) + ); + XContentBuilder metadataBuilder = JsonXContent.contentBuilder().map(metadata.toMap()); + String jsonMetadata = Strings.toString(metadataBuilder); + assertEquals(jsonMetadata, Strings.toString(multiFieldBuilder)); + // advance token by token and verify currentName as well as getTokenLocation + XContentParser multiFieldParser = new CompletionFieldMapper.MultiFieldParser( + metadata, + documentParser.currentName(), + documentParser.getTokenLocation() + ); + XContentParser expectedParser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, jsonMetadata); + assertEquals(expectedParser.nextToken(), multiFieldParser.currentToken()); + XContentLocation expectedTokenLocation = documentParser.getTokenLocation(); + while (expectedParser.nextToken() != null) { + XContentParser.Token token = multiFieldParser.nextToken(); + assertEquals(expectedParser.currentToken(), token); + assertEquals(expectedParser.currentToken(), multiFieldParser.currentToken()); + assertEquals(expectedTokenLocation, multiFieldParser.getTokenLocation()); + assertEquals(documentParser.nextToken(), multiFieldParser.currentToken()); + assertEquals(documentParser.currentName(), multiFieldParser.currentName()); + } + assertNull(multiFieldParser.nextToken()); + } + + public void testMultiFieldParserMixedSubfields() throws IOException { + CompletionFieldMapper.CompletionInputMetadata metadata = randomCompletionMetadata(); + XContentParser documentParser = documentParser(metadata); + // simulate 10 sub-fields which may either read simple values or the full object structure + for (int i = 0; i < 10; i++) { + XContentParser multiFieldParser = new CompletionFieldMapper.MultiFieldParser( + metadata, + documentParser.currentName(), + documentParser.getTokenLocation() + ); + if (randomBoolean()) { + assertEquals("text", multiFieldParser.textOrNull()); + } else { + XContentBuilder multiFieldBuilder = JsonXContent.contentBuilder().copyCurrentStructure(multiFieldParser); + XContentBuilder metadataBuilder = JsonXContent.contentBuilder().map(metadata.toMap()); + String jsonMetadata = Strings.toString(metadataBuilder); + assertEquals(jsonMetadata, Strings.toString(multiFieldBuilder)); + } + } + } + private Matcher suggestField(String value) { return Matchers.allOf(hasProperty(IndexableField::stringValue, equalTo(value)), Matchers.instanceOf(SuggestField.class)); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java index 922e20b410873..420ef2a8fab44 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java @@ -8,9 +8,13 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.json.JsonXContent; import org.hamcrest.CoreMatchers; import java.io.IOException; @@ -235,6 +239,53 @@ public void testMultiFieldWithMultipleValues() throws Exception { assertThat(doc.getFields("field.geohash")[1].binaryValue().utf8ToString(), equalTo("s0fu7n0xng81")); } + public void testKeywordWithGeopointSubfield() throws Exception { + DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> { + b.field("type", "keyword").field("doc_values", false); + ; + b.startObject("fields"); + { + b.startObject("geopoint").field("type", "geo_point").field("doc_values", false).endObject(); + } + b.endObject(); + })); + LuceneDocument doc = mapper.parse(source(b -> b.array("field", "s093jd0k72s1"))).rootDoc(); + assertThat(doc.getFields("field"), arrayWithSize(1)); + assertEquals("s093jd0k72s1", doc.getFields("field")[0].binaryValue().utf8ToString()); + assertThat(doc.getFields("field.geopoint"), arrayWithSize(1)); + assertThat(doc.getField("field.geopoint"), hasToString(both(containsString("field.geopoint:2.999")).and(containsString("1.999")))); + } + + private static XContentParser documentParser(String value, boolean prettyPrint) throws IOException { + XContentBuilder docBuilder = JsonXContent.contentBuilder(); + if (prettyPrint) { + docBuilder.prettyPrint(); + } + docBuilder.startObject(); + docBuilder.field("field", value); + docBuilder.endObject(); + String document = Strings.toString(docBuilder); + XContentParser docParser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, document); + docParser.nextToken(); + docParser.nextToken(); + assertEquals(XContentParser.Token.VALUE_STRING, docParser.nextToken()); + return docParser; + } + + public void testGeoHashMultiFieldParser() throws IOException { + boolean prettyPrint = randomBoolean(); + XContentParser docParser = documentParser("POINT (2 3)", prettyPrint); + XContentParser expectedParser = documentParser("s093jd0k72s1", prettyPrint); + XContentParser parser = new GeoPointFieldMapper.GeoHashMultiFieldParser(docParser, "s093jd0k72s1"); + for (int i = 0; i < 10; i++) { + assertEquals(expectedParser.currentToken(), parser.currentToken()); + assertEquals(expectedParser.currentName(), parser.currentName()); + assertEquals(expectedParser.getTokenLocation(), parser.getTokenLocation()); + assertEquals(expectedParser.textOrNull(), parser.textOrNull()); + expectThrows(UnsupportedOperationException.class, parser::nextToken); + } + } + public void testNullValue() throws Exception { DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_point"))); Mapper fieldMapper = mapper.mappers().getMapper("field"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index 96f51e44c3657..9286daa30d37d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -368,17 +368,18 @@ public void testDimensionIndexedAndDocvalues() { } public void testDimensionMultiValuedField() throws IOException { - DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> { + XContentBuilder mapping = fieldMapping(b -> { minimalMapping(b); b.field("time_series_dimension", true); - })); + }); + DocumentMapper mapper = randomBoolean() ? createDocumentMapper(mapping) : createTimeSeriesModeDocumentMapper(mapping); Exception e = expectThrows(MapperParsingException.class, () -> mapper.parse(source(b -> b.array("field", "1234", "45678")))); assertThat(e.getCause().getMessage(), containsString("Dimension field [field] cannot be a multi-valued field")); } public void testDimensionExtraLongKeyword() throws IOException { - DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> { + DocumentMapper mapper = createTimeSeriesModeDocumentMapper(fieldMapping(b -> { minimalMapping(b); b.field("time_series_dimension", true); })); @@ -604,4 +605,17 @@ public void testDimensionInRoutingPath() throws IOException { ); mapper.documentMapper().validate(settings, false); // Doesn't throw } + + public void testKeywordFieldUtf8LongerThan32766() throws Exception { + DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "keyword"))); + StringBuilder stringBuilder = new StringBuilder(32768); + for (int i = 0; i < 32768; i++) { + stringBuilder.append("a"); + } + MapperParsingException e = expectThrows( + MapperParsingException.class, + () -> mapper.parse(source(b -> b.field("field", stringBuilder.toString()))) + ); + assertThat(e.getCause().getMessage(), containsString("UTF8 encoding is longer than the max length")); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java index 5355ecb676323..2d5d12dd599b0 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java @@ -25,6 +25,7 @@ import java.io.UncheckedIOException; import java.util.Collection; import java.util.HashSet; +import java.util.List; import java.util.function.Function; import static org.hamcrest.Matchers.containsString; @@ -1380,4 +1381,52 @@ public void testFieldNamesIncludeInRoot() throws Exception { assertThat(doc.docs().get(4).get("_field_names"), nullValue()); } + public void testNoDimensionNestedFields() { + { + Exception e = expectThrows(IllegalArgumentException.class, () -> createDocumentMapper(mapping(b -> { + b.startObject("nested"); + { + b.field("type", "nested"); + b.startObject("properties"); + { + b.startObject("foo") + .field("type", randomFrom(List.of("keyword", "ip", "long", "short", "integer", "byte"))) + .field("time_series_dimension", true) + .endObject(); + } + b.endObject(); + } + b.endObject(); + }))); + assertThat(e.getMessage(), containsString("time_series_dimension can't be configured in nested field [nested.foo]")); + } + + { + Exception e = expectThrows(IllegalArgumentException.class, () -> createDocumentMapper(mapping(b -> { + b.startObject("nested"); + { + b.field("type", "nested"); + b.startObject("properties"); + { + b.startObject("other").field("type", "keyword").endObject(); + b.startObject("object").field("type", "object"); + { + b.startObject("properties"); + { + b.startObject("foo") + .field("type", randomFrom(List.of("keyword", "ip", "long", "short", "integer", "byte"))) + .field("time_series_dimension", true) + .endObject(); + } + b.endObject(); + } + b.endObject(); + } + b.endObject(); + } + b.endObject(); + }))); + assertThat(e.getMessage(), containsString("time_series_dimension can't be configured in nested field [nested.object.foo]")); + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapperTests.java index c8849fb9c7415..633ba8ef30efe 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapperTests.java @@ -17,6 +17,8 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.List; +import java.util.Map; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; @@ -123,6 +125,23 @@ public void testStrings() throws IOException { ); } + public void testUnicodeKeys() throws IOException { + String fire = new String(new int[] { 0x1F525 }, 0, 1); + String coffee = "\u2615"; + DocumentMapper docMapper = createDocumentMapper("a", mapping(b -> { + b.startObject(fire).field("type", "keyword").field("time_series_dimension", true).endObject(); + b.startObject(coffee).field("type", "keyword").field("time_series_dimension", true).endObject(); + })); + + ParsedDocument doc = parseDocument(docMapper, b -> b.field(fire, "hot").field(coffee, "good")); + Map tsid = TimeSeriesIdFieldMapper.decodeTsid( + new ByteArrayStreamInput(doc.rootDoc().getBinaryValue("_tsid").bytes) + ); + assertMap(tsid, matchesMap().entry(coffee, "good").entry(fire, "hot")); + // Also make sure the keys are in order + assertThat(List.copyOf(tsid.keySet()), equalTo(List.of(coffee, fire))); + } + public void testKeywordTooLong() throws IOException { DocumentMapper docMapper = createDocumentMapper( "a", diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index b1819af573a4f..be96503a382b6 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -240,7 +240,11 @@ public void testIllegalValues() { } } - public void testSimpleMatchQuery() throws IOException { + public void testParseDefaultsRemoved() throws IOException { + /* + * This json includes many defaults. When we parse the query and then + * call toString on it all of the defaults are removed. + */ String json = """ { "match" : { @@ -258,12 +262,33 @@ public void testSimpleMatchQuery() throws IOException { } }"""; MatchQueryBuilder qb = (MatchQueryBuilder) parseQuery(json); - checkGeneratedJson(json, qb); + checkGeneratedJson(""" + { + "match": { + "message": { + "query": "to be or not to be", + "operator": "AND", + "zero_terms_query": "ALL" + } + } + }""", qb); assertEquals(json, "to be or not to be", qb.value()); assertEquals(json, Operator.AND, qb.operator()); } + public void testToXConentWithDefaults() throws IOException { + QueryBuilder query = new MatchQueryBuilder("foo", "bar"); + checkGeneratedJson(""" + { + "match": { + "foo": { + "query": "bar" + } + } + }""", query); + } + public void testFuzzinessOnNonStringField() throws Exception { MatchQueryBuilder query = new MatchQueryBuilder(INT_FIELD_NAME, 42); query.fuzziness(randomFuzziness(INT_FIELD_NAME)); diff --git a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java index a9f121e9dc349..56a0845a6d7a3 100644 --- a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java @@ -139,7 +139,11 @@ public void testValidate() { assertThat(e.getMessage(), equalTo("[nested] requires 'score_mode' field")); } - public void testFromJson() throws IOException { + public void testParseDefaultsRemoved() throws IOException { + /* + * This json includes many defaults. When we parse the query and then + * call toString on it all of the defaults are removed. + */ String json = """ { "nested" : { @@ -178,7 +182,34 @@ public void testFromJson() throws IOException { }"""; NestedQueryBuilder parsed = (NestedQueryBuilder) parseQuery(json); - checkGeneratedJson(json, parsed); + checkGeneratedJson(""" + { + "nested" : { + "query" : { + "bool" : { + "must" : [ { + "match" : { + "obj1.name" : { + "query" : "blue" + } + } + }, { + "range" : { + "obj1.count" : { + "gt" : 5, + "boost" : 1.0 + } + } + } ], + "boost" : 1.0 + } + }, + "path" : "obj1", + "ignore_unmapped" : false, + "score_mode" : "avg", + "boost" : 1.0 + } + }""", parsed); assertEquals(json, ScoreMode.Avg, parsed.scoreMode()); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 7835f7014818d..640da6b5799ab 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -1279,7 +1279,7 @@ public void testClosedIndicesSkipSyncGlobalCheckpoint() throws Exception { public void testRestoreLocalHistoryFromTranslogOnPromotion() throws IOException, InterruptedException { final IndexShard indexShard = newStartedShard(false); - final int operations = 1024 - scaledRandomIntBetween(0, 1024); + final int operations = randomBoolean() ? scaledRandomIntBetween(0, 1024) : 1024 - scaledRandomIntBetween(0, 1024); indexOnReplicaWithGaps(indexShard, operations, Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED)); final long maxSeqNo = indexShard.seqNoStats().getMaxSeqNo(); @@ -1287,6 +1287,7 @@ public void testRestoreLocalHistoryFromTranslogOnPromotion() throws IOException, indexShard.updateGlobalCheckpointOnReplica(globalCheckpointOnReplica, "test"); final long globalCheckpoint = randomLongBetween(UNASSIGNED_SEQ_NO, indexShard.getLocalCheckpoint()); + final long maxSeqNoOfUpdatesOrDeletes = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, maxSeqNo); final long maxSeqNoOfUpdatesOrDeletesBeforeRollback = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); final Set docsBeforeRollback = getShardDocUIDs(indexShard); final CountDownLatch latch = new CountDownLatch(1); @@ -1294,8 +1295,8 @@ public void testRestoreLocalHistoryFromTranslogOnPromotion() throws IOException, indexShard, indexShard.getPendingPrimaryTerm() + 1, globalCheckpoint, - randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, maxSeqNo), - new ActionListener() { + maxSeqNoOfUpdatesOrDeletes, + new ActionListener<>() { @Override public void onResponse(Releasable releasable) { releasable.close(); @@ -1312,11 +1313,10 @@ public void onFailure(Exception e) { latch.await(); long globalCheckpointOnPromotedReplica = Math.max(globalCheckpointOnReplica, globalCheckpoint); - if (globalCheckpointOnPromotedReplica < maxSeqNo) { - assertThat(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(maxSeqNo)); - } else { - assertThat(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(maxSeqNoOfUpdatesOrDeletesBeforeRollback)); - } + long expectedMaxSeqNoOfUpdatesOrDeletes = globalCheckpointOnPromotedReplica < maxSeqNo + ? maxSeqNo + : Math.max(maxSeqNoOfUpdatesOrDeletesBeforeRollback, maxSeqNoOfUpdatesOrDeletes); + assertThat(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(expectedMaxSeqNoOfUpdatesOrDeletes)); final ShardRouting newRouting = indexShard.routingEntry().moveActiveReplicaToPrimary(); final CountDownLatch resyncLatch = new CountDownLatch(1); indexShard.updateShardState( @@ -1331,11 +1331,7 @@ public void onFailure(Exception e) { assertThat(indexShard.getLocalCheckpoint(), equalTo(maxSeqNo)); assertThat(indexShard.seqNoStats().getMaxSeqNo(), equalTo(maxSeqNo)); assertThat(getShardDocUIDs(indexShard), equalTo(docsBeforeRollback)); - if (globalCheckpointOnPromotedReplica < maxSeqNo) { - assertThat(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(maxSeqNo)); - } else { - assertThat(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(maxSeqNoOfUpdatesOrDeletesBeforeRollback)); - } + assertThat(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(expectedMaxSeqNoOfUpdatesOrDeletes)); closeShard(indexShard, false); } diff --git a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java index f5a5d6ed4c009..efc1b846492b8 100644 --- a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -80,7 +80,10 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.LongUnaryOperator; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; +import static org.elasticsearch.test.VersionUtils.randomCompatibleVersion; import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; @@ -93,6 +96,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.sameInstance; public class StoreTests extends ESTestCase { @@ -918,7 +922,7 @@ public void testMetadataSnapshotStreaming() throws Exception { ByteArrayInputStream inBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); InputStreamStreamInput in = new InputStreamStreamInput(inBuffer); in.setVersion(targetNodeVersion); - Store.MetadataSnapshot inMetadataSnapshot = new Store.MetadataSnapshot(in); + Store.MetadataSnapshot inMetadataSnapshot = Store.MetadataSnapshot.readFrom(in); Map origEntries = new HashMap<>(); origEntries.putAll(outMetadataSnapshot.asMap()); for (Map.Entry entry : inMetadataSnapshot.asMap().entrySet()) { @@ -928,6 +932,21 @@ public void testMetadataSnapshotStreaming() throws Exception { assertThat(inMetadataSnapshot.getCommitUserData(), equalTo(outMetadataSnapshot.getCommitUserData())); } + public void testEmptyMetadataSnapshotStreaming() throws Exception { + var outMetadataSnapshot = randomBoolean() ? Store.MetadataSnapshot.EMPTY : new Store.MetadataSnapshot(emptyMap(), emptyMap(), 0L); + var targetNodeVersion = randomCompatibleVersion(random(), org.elasticsearch.Version.CURRENT); + + var outBuffer = new ByteArrayOutputStream(); + var out = new OutputStreamStreamOutput(outBuffer); + out.setVersion(targetNodeVersion); + outMetadataSnapshot.writeTo(out); + + var inBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); + var in = new InputStreamStreamInput(inBuffer); + in.setVersion(targetNodeVersion); + assertThat(Store.MetadataSnapshot.readFrom(in), sameInstance(Store.MetadataSnapshot.EMPTY)); + } + protected Store.MetadataSnapshot createMetadataSnapshot() { StoreFileMetadata storeFileMetadata1 = new StoreFileMetadata("segments", 1, "666", MIN_SUPPORTED_LUCENE_VERSION.toString()); StoreFileMetadata storeFileMetadata2 = new StoreFileMetadata("no_segments", 1, "666", MIN_SUPPORTED_LUCENE_VERSION.toString()); @@ -978,21 +997,16 @@ public void testStreamStoreFilesMetadata() throws Exception { ); } TransportNodesListShardStoreMetadata.StoreFilesMetadata outStoreFileMetadata = - new TransportNodesListShardStoreMetadata.StoreFilesMetadata( - new ShardId("test", "_na_", 0), - metadataSnapshot, - peerRecoveryRetentionLeases - ); + new TransportNodesListShardStoreMetadata.StoreFilesMetadata(metadataSnapshot, peerRecoveryRetentionLeases); ByteArrayOutputStream outBuffer = new ByteArrayOutputStream(); OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer); - org.elasticsearch.Version targetNodeVersion = randomVersion(random()); + org.elasticsearch.Version targetNodeVersion = randomCompatibleVersion(random(), org.elasticsearch.Version.CURRENT); out.setVersion(targetNodeVersion); outStoreFileMetadata.writeTo(out); ByteArrayInputStream inBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); InputStreamStreamInput in = new InputStreamStreamInput(inBuffer); in.setVersion(targetNodeVersion); - TransportNodesListShardStoreMetadata.StoreFilesMetadata inStoreFileMetadata = - new TransportNodesListShardStoreMetadata.StoreFilesMetadata(in); + var inStoreFileMetadata = TransportNodesListShardStoreMetadata.StoreFilesMetadata.readFrom(in); Iterator outFiles = outStoreFileMetadata.iterator(); for (StoreFileMetadata inFile : inStoreFileMetadata) { assertThat(inFile.name(), equalTo(outFiles.next().name())); @@ -1001,6 +1015,25 @@ public void testStreamStoreFilesMetadata() throws Exception { assertThat(outStoreFileMetadata.peerRecoveryRetentionLeases(), equalTo(peerRecoveryRetentionLeases)); } + public void testStreamEmptyStoreFilesMetadata() throws Exception { + var outStoreFileMetadata = randomBoolean() + ? TransportNodesListShardStoreMetadata.StoreFilesMetadata.EMPTY + : new TransportNodesListShardStoreMetadata.StoreFilesMetadata(Store.MetadataSnapshot.EMPTY, emptyList()); + var outBuffer = new ByteArrayOutputStream(); + var out = new OutputStreamStreamOutput(outBuffer); + var targetNodeVersion = randomCompatibleVersion(random(), org.elasticsearch.Version.CURRENT); + out.setVersion(targetNodeVersion); + outStoreFileMetadata.writeTo(out); + + var inBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); + var in = new InputStreamStreamInput(inBuffer); + in.setVersion(targetNodeVersion); + assertThat( + TransportNodesListShardStoreMetadata.StoreFilesMetadata.readFrom(in), + sameInstance(TransportNodesListShardStoreMetadata.StoreFilesMetadata.EMPTY) + ); + } + public void testMarkCorruptedOnTruncatedSegmentsFile() throws IOException { IndexWriterConfig iwc = newIndexWriterConfig(); final ShardId shardId = new ShardId("index", "_na_", 1); diff --git a/server/src/test/java/org/elasticsearch/indices/SystemIndexManagerTests.java b/server/src/test/java/org/elasticsearch/indices/SystemIndexManagerTests.java index 679144155c107..85597aea72666 100644 --- a/server/src/test/java/org/elasticsearch/indices/SystemIndexManagerTests.java +++ b/server/src/test/java/org/elasticsearch/indices/SystemIndexManagerTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.SystemIndexManager.UpgradeStatus; @@ -217,6 +218,19 @@ public void testManagerProcessesIndicesWithOutdatedMappings() { ); } + /** + * Check that the manager will try to upgrade indices where the mappings metadata is null or absent. + */ + public void testManagerProcessesIndicesWithNullMetadata() { + SystemIndices systemIndices = new SystemIndices(Map.of("MyIndex", FEATURE)); + SystemIndexManager manager = new SystemIndexManager(systemIndices, client); + + assertThat( + manager.getUpgradeStatus(markShardsAvailable(createClusterState(Strings.toString(getMappings(builder -> {})))), DESCRIPTOR), + equalTo(UpgradeStatus.NEEDS_MAPPINGS_UPDATE) + ); + } + /** * Check that the manager will try to upgrade indices where the version in the metadata is null or absent. */ @@ -225,7 +239,7 @@ public void testManagerProcessesIndicesWithNullVersionMetadata() { SystemIndexManager manager = new SystemIndexManager(systemIndices, client); assertThat( - manager.getUpgradeStatus(markShardsAvailable(createClusterState(Strings.toString(getMappings(null)))), DESCRIPTOR), + manager.getUpgradeStatus(markShardsAvailable(createClusterState(Strings.toString(getMappings((String) null)))), DESCRIPTOR), equalTo(UpgradeStatus.NEEDS_MAPPINGS_UPDATE) ); } @@ -385,42 +399,21 @@ private static XContentBuilder getMappings() { } private static XContentBuilder getMappings(String version) { - try { - final XContentBuilder builder = jsonBuilder(); - - builder.startObject(); - { - builder.startObject("_meta"); - builder.field("version", version); - builder.endObject(); - - builder.field("dynamic", "strict"); - builder.startObject("properties"); - { - builder.startObject("completed"); - builder.field("type", "boolean"); - builder.endObject(); - } - builder.endObject(); - } - - builder.endObject(); - return builder; - } catch (IOException e) { - throw new UncheckedIOException("Failed to build " + SYSTEM_INDEX_NAME + " index mappings", e); - } + return getMappings(builder -> builder.object("_meta", meta -> meta.field("version", version))); } // Prior to 7.12.0, .tasks had _meta.version: 3 so we need to be sure we can handle that private static XContentBuilder getMappings(int version) { + return getMappings(builder -> builder.object("_meta", meta -> meta.field("version", version))); + } + + private static XContentBuilder getMappings(CheckedConsumer metaCallback) { try { final XContentBuilder builder = jsonBuilder(); builder.startObject(); { - builder.startObject("_meta"); - builder.field("version", version); - builder.endObject(); + metaCallback.accept(builder); builder.field("dynamic", "strict"); builder.startObject("properties"); @@ -438,4 +431,5 @@ private static XContentBuilder getMappings(int version) { throw new UncheckedIOException("Failed to build " + SYSTEM_INDEX_NAME + " index mappings", e); } } + } diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index d4deb426e9b0c..e5a891e1d1d95 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -36,6 +36,7 @@ import org.elasticsearch.action.support.master.TransportMasterNodeActionUtils; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateTaskConfig; import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterStateTaskExecutor.ClusterTasksResult; import org.elasticsearch.cluster.ClusterStateTaskExecutor.TaskResult; @@ -47,6 +48,7 @@ import org.elasticsearch.cluster.action.shard.ShardStateAction.StartedShardEntry; import org.elasticsearch.cluster.action.shard.ShardStateAction.StartedShardUpdateTask; import org.elasticsearch.cluster.block.ClusterBlock; +import org.elasticsearch.cluster.coordination.JoinTask; import org.elasticsearch.cluster.coordination.JoinTaskExecutor; import org.elasticsearch.cluster.coordination.NodeRemovalClusterStateTaskExecutor; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -93,7 +95,6 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -112,6 +113,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doCallRealMethod; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -347,39 +349,37 @@ public ClusterState reroute(ClusterState state, ClusterRerouteRequest request) { return execute(transportClusterRerouteAction, request, state); } - public ClusterState addNodes(ClusterState clusterState, List nodes) { + public ClusterState addNode(ClusterState clusterState, DiscoveryNode discoveryNode) { return runTasks( joinTaskExecutor, clusterState, - nodes.stream() - .map( - node -> new JoinTaskExecutor.Task( - node, - "dummy reason", - ActionListener.wrap(() -> { throw new AssertionError("should not complete publication"); }) - ) + List.of( + JoinTask.singleNode( + discoveryNode, + "dummy reason", + ActionListener.wrap(() -> { throw new AssertionError("should not complete publication"); }) ) - .toList() + ) ); } public ClusterState joinNodesAndBecomeMaster(ClusterState clusterState, List nodes) { - List joinNodes = new ArrayList<>(); - joinNodes.add(JoinTaskExecutor.newBecomeMasterTask()); - joinNodes.add(JoinTaskExecutor.newFinishElectionTask()); - joinNodes.addAll( - nodes.stream() - .map( - node -> new JoinTaskExecutor.Task( - node, - "dummy reason", - ActionListener.wrap(() -> { throw new AssertionError("should not complete publication"); }) - ) + return runTasks( + joinTaskExecutor, + clusterState, + List.of( + JoinTask.completingElection( + nodes.stream() + .map( + node -> new JoinTask.NodeJoinTask( + node, + "dummy reason", + ActionListener.wrap(() -> { throw new AssertionError("should not complete publication"); }) + ) + ) ) - .toList() + ) ); - - return runTasks(joinTaskExecutor, clusterState, joinNodes); } public ClusterState removeNodes(ClusterState clusterState, List nodes) { @@ -474,11 +474,12 @@ private , Response extends ActionResp @SuppressWarnings("unchecked") private ClusterState executeClusterStateUpdateTask(ClusterState state, Runnable runnable) { ClusterState[] resultingState = new ClusterState[1]; + doCallRealMethod().when(clusterService).submitStateUpdateTask(anyString(), any(ClusterStateUpdateTask.class), any()); doAnswer(invocationOnMock -> { - ClusterStateUpdateTask task = (ClusterStateUpdateTask) invocationOnMock.getArguments()[1]; - ClusterStateTaskExecutor executor = (ClusterStateTaskExecutor) invocationOnMock - .getArguments()[2]; - ClusterTasksResult result = executor.execute(state, List.of(task)); + ClusterStateTaskListener task = (ClusterStateTaskListener) invocationOnMock.getArguments()[1]; + ClusterStateTaskExecutor executor = (ClusterStateTaskExecutor< + ClusterStateTaskListener>) invocationOnMock.getArguments()[3]; + ClusterTasksResult result = executor.execute(state, List.of(task)); for (TaskResult taskResult : result.executionResults().values()) { if (taskResult.isSuccess() == false) { throw taskResult.getFailure(); @@ -486,7 +487,8 @@ private ClusterState executeClusterStateUpdateTask(ClusterState state, Runnable } resultingState[0] = result.resultingState(); return null; - }).when(clusterService).submitStateUpdateTask(anyString(), any(ClusterStateUpdateTask.class), any()); + }).when(clusterService) + .submitStateUpdateTask(anyString(), any(ClusterStateTaskListener.class), any(ClusterStateTaskConfig.class), any()); runnable.run(); assertThat(resultingState[0], notNullValue()); return resultingState[0]; diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java index d9bb3aa220317..1aa7902112710 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java @@ -463,7 +463,7 @@ public ClusterState randomlyUpdateClusterState( if (randomBoolean()) { // add node if (state.nodes().getSize() < 10) { - state = cluster.addNodes(state, Collections.singletonList(createNode())); + state = cluster.addNode(state, createNode()); updateNodes(state, clusterStateServiceMap, indicesServiceSupplier); } } else { @@ -476,7 +476,7 @@ public ClusterState randomlyUpdateClusterState( } if (randomBoolean()) { // and add it back - state = cluster.addNodes(state, Collections.singletonList(discoveryNode)); + state = cluster.addNode(state, discoveryNode); updateNodes(state, clusterStateServiceMap, indicesServiceSupplier); } } @@ -525,7 +525,8 @@ private IndicesClusterStateService createIndicesClusterStateService( transportService, Collections.emptyMap(), Collections.emptyMap(), - threadPool + threadPool, + List.of() ); final PeerRecoveryTargetService recoveryTargetService = new PeerRecoveryTargetService( threadPool, diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java index 9525055b13b2d..d4ec767e5817c 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.store.Store; @@ -65,7 +66,9 @@ public class RepositoriesServiceTests extends ESTestCase { @Override public void setUp() throws Exception { super.setUp(); + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); ThreadPool threadPool = mock(ThreadPool.class); + when(threadPool.getThreadContext()).thenReturn(threadContext); final TransportService transportService = new TransportService( Settings.EMPTY, mock(Transport.class), @@ -93,7 +96,8 @@ public void setUp() throws Exception { transportService, typesRegistry, typesRegistry, - threadPool + threadPool, + List.of() ); repositoriesService.start(); } diff --git a/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java index 11ae7975d8106..f8f65f6ca0f4d 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -584,6 +584,11 @@ private static TestPipelineAggregationBuilder fromXContent(String name, XContent @Override protected void validate(ValidationContext context) {} + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_EMPTY; + } } /** diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java index 6af3a3d06f047..646604b0cc167 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.aggregations; import org.apache.lucene.util.SetOnce; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -345,6 +346,11 @@ public String getWriteableName() { return "rewritten"; } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_EMPTY; + } + @Override protected void doWriteTo(StreamOutput out) throws IOException { throw new UnsupportedOperationException(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregatorTests.java index d6876d135bff0..d7acbf3d51c98 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregatorTests.java @@ -33,6 +33,7 @@ import java.time.Instant; import java.time.ZoneOffset; import java.util.Collections; +import java.util.Comparator; import java.util.List; import java.util.Objects; import java.util.Set; @@ -43,6 +44,10 @@ public class IpPrefixAggregatorTests extends AggregatorTestCase { + private static final Comparator IP_ADDRESS_KEY_COMPARATOR = Comparator.comparing( + InternalIpPrefix.Bucket::getKeyAsString + ); + private static final class TestIpDataHolder { private final String ipAddressAsString; private final InetAddress ipAddress; @@ -212,6 +217,10 @@ public void testIpv4Addresses() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); + assertEquals( + ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), + List.of(1L, 1L, 4L, 1L) + ); }, fieldType); } @@ -261,6 +270,10 @@ public void testIpv6Addresses() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); + assertEquals( + ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), + List.of(2L, 1L, 2L) + ); }, fieldType); } @@ -313,6 +326,10 @@ public void testZeroPrefixLength() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); + assertEquals( + ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), + List.of((long) ipAddresses.size()) + ); }, fieldType); } @@ -365,6 +382,10 @@ public void testIpv4MaxPrefixLength() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); + assertEquals( + ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), + List.of(1L, 1L, 1L, 2L, 1L, 1L) + ); }, fieldType); } @@ -414,6 +435,10 @@ public void testIpv6MaxPrefixLength() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); + assertEquals( + ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), + List.of(1L, 1L, 1L, 1L, 1L) + ); }, fieldType); } @@ -471,6 +496,10 @@ public void testAggregateOnIpv4Field() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); + assertEquals( + ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), + List.of(1L, 1L, 4L, 1L) + ); }, fieldTypes); } @@ -525,6 +554,10 @@ public void testAggregateOnIpv6Field() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); + assertEquals( + ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), + List.of(2L, 1L, 2L) + ); }, fieldTypes); } @@ -898,11 +931,12 @@ public void testMinDocCount() throws IOException { // GIVEN final int prefixLength = 16; final String field = "ipv4"; + int minDocCount = 2; final IpPrefixAggregationBuilder aggregationBuilder = new IpPrefixAggregationBuilder("ip_prefix").field(field) .isIpv6(false) .keyed(randomBoolean()) .appendPrefixLength(false) - .minDocCount(2) + .minDocCount(minDocCount) .prefixLength(prefixLength); final MappedFieldType fieldType = new IpFieldMapper.IpFieldType(field); final List ipAddresses = List.of( @@ -941,6 +975,13 @@ public void testMinDocCount() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); + assertTrue( + ipPrefix.getBuckets().stream().map(InternalIpPrefix.Bucket::getDocCount).allMatch(docCount -> docCount >= minDocCount) + ); + assertEquals( + ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), + List.of(4L) + ); }, fieldType); } @@ -1002,6 +1043,10 @@ public void testAggregationWithQueryFilter() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); + assertEquals( + ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), + List.of(4L) + ); }, fieldType); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalAvgTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalAvgTests.java index fb4f6730728c4..67a385985657c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalAvgTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalAvgTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; import java.util.ArrayList; @@ -41,6 +42,16 @@ protected void assertReduced(InternalAvg reduced, List inputs) { assertEquals(sum / counts, reduced.value(), 0.0000001); } + @Override + protected boolean supportsSampling() { + return true; + } + + @Override + protected void assertSampled(InternalAvg sampled, InternalAvg reduced, SamplingContext samplingContext) { + assertEquals(sampled.value(), reduced.value(), 1e-12); + } + public void testSummationAccuracy() { double[] values = new double[] { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7 }; verifyAvgOfDoubles(values, 0.9, 0d); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java index b1c37581f128e..6df82fa0721d8 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.metrics.ExtendedStats.Bounds; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; import java.util.ArrayList; @@ -85,6 +86,21 @@ protected void assertReduced(InternalExtendedStats reduced, List { @Override @@ -45,6 +49,20 @@ protected void assertReduced(InternalHDRPercentileRanks reduced, List it1 = sampled.iterator(); + Iterator it2 = reduced.iterator(); + while (it1.hasNext() && it2.hasNext()) { + assertThat(it1.next(), equalTo(it2.next())); + } + } + @Override protected Class implementationClass() { return ParsedHDRPercentileRanks.class; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesTests.java index 8b47852ff5736..6e986f69c1c50 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesTests.java @@ -11,6 +11,7 @@ import org.HdrHistogram.DoubleHistogram; import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.support.SamplingContext; import java.util.Arrays; import java.util.HashMap; @@ -19,6 +20,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.equalTo; public class InternalHDRPercentilesTests extends InternalPercentilesTestCase { @@ -48,6 +50,20 @@ protected void assertReduced(InternalHDRPercentiles reduced, List it1 = sampled.iterator(); + Iterator it2 = reduced.iterator(); + while (it1.hasNext() && it2.hasNext()) { + assertThat(it1.next(), equalTo(it2.next())); + } + } + @Override protected Class implementationClass() { return ParsedHDRPercentiles.class; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java index 6098f6fc66d96..af922a36db612 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java @@ -11,12 +11,15 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; + public class InternalMaxTests extends InternalAggregationTestCase { @Override @@ -31,6 +34,16 @@ protected void assertReduced(InternalMax reduced, List inputs) { assertEquals(inputs.stream().mapToDouble(InternalMax::value).max().getAsDouble(), reduced.value(), 0); } + @Override + protected boolean supportsSampling() { + return true; + } + + @Override + protected void assertSampled(InternalMax sampled, InternalMax reduced, SamplingContext samplingContext) { + assertThat(sampled.getValue(), equalTo(reduced.getValue())); + } + @Override protected void assertFromXContent(InternalMax max, ParsedAggregation parsedAggregation) { ParsedMax parsed = ((ParsedMax) parsedAggregation); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java index 67f0da1c735bc..cd5afc42e863d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; import java.io.IOException; @@ -17,6 +18,8 @@ import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; + public class InternalMedianAbsoluteDeviationTests extends InternalAggregationTestCase { @Override @@ -47,6 +50,20 @@ protected void assertReduced(InternalMedianAbsoluteDeviation reduced, List { @Override protected InternalMin createTestInstance(String name, Map metadata) { @@ -30,6 +33,16 @@ protected void assertReduced(InternalMin reduced, List inputs) { assertEquals(inputs.stream().mapToDouble(InternalMin::value).min().getAsDouble(), reduced.value(), 0); } + @Override + protected boolean supportsSampling() { + return true; + } + + @Override + protected void assertSampled(InternalMin sampled, InternalMin reduced, SamplingContext samplingContext) { + assertThat(sampled.getValue(), equalTo(reduced.getValue())); + } + @Override protected void assertFromXContent(InternalMin min, ParsedAggregation parsedAggregation) { ParsedMin parsed = ((ParsedMin) parsedAggregation); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java index df98c4500bb59..000c66d44f521 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.test.VersionUtils; @@ -161,6 +162,16 @@ protected void assertReduced(InternalScriptedMetric reduced, List inputs) assertEquals(expectedMax, reduced.getMax(), 0d); } + @Override + protected boolean supportsSampling() { + return true; + } + + @Override + protected void assertSampled(InternalStats sampled, InternalStats reduced, SamplingContext samplingContext) { + assertEquals(sampled.getCount(), samplingContext.inverseScale(reduced.getCount())); + assertEquals(sampled.getSum(), samplingContext.inverseScale(reduced.getSum()), 1e-7); + assertEquals(sampled.getMin(), reduced.getMin(), 0d); + assertEquals(sampled.getMax(), reduced.getMax(), 0d); + } + public void testSummationAccuracy() { double[] values = new double[] { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7 }; verifyStatsOfDoubles(values, 13.5, 0.9, 0d); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesRanksTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesRanksTests.java index 7b28517dae453..27d3fafee1319 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesRanksTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesRanksTests.java @@ -10,12 +10,16 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.support.SamplingContext; import java.util.Arrays; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; + public class InternalTDigestPercentilesRanksTests extends InternalPercentilesRanksTestCase { @Override @@ -57,6 +61,24 @@ protected void assertReduced(InternalTDigestPercentileRanks reduced, List it1 = sampled.iterator(); + Iterator it2 = reduced.iterator(); + while (it1.hasNext() && it2.hasNext()) { + assertThat(it1.next(), equalTo(it2.next())); + } + } + @Override protected Class implementationClass() { return ParsedTDigestPercentileRanks.class; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesTests.java index b6894d340a20e..5c8e6aa2700c9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.support.SamplingContext; import java.util.Arrays; import java.util.HashMap; @@ -18,6 +19,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.equalTo; public class InternalTDigestPercentilesTests extends InternalPercentilesTestCase { @@ -55,6 +57,20 @@ protected void assertReduced(InternalTDigestPercentiles reduced, List it1 = sampled.iterator(); + Iterator it2 = reduced.iterator(); + while (it1.hasNext() && it2.hasNext()) { + assertThat(it1.next(), equalTo(it2.next())); + } + } + @Override protected Class implementationClass() { return ParsedTDigestPercentiles.class; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java index ba63452e56472..296a9c8d6ab5c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.test.NotEqualMessageBuilder; @@ -54,6 +55,7 @@ import static java.lang.Math.min; import static java.util.Comparator.comparing; import static java.util.stream.Collectors.toList; +import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; public class InternalTopHitsTests extends InternalAggregationTestCase { @@ -271,6 +273,16 @@ protected void assertReduced(InternalTopHits reduced, List inpu assertEqualsWithErrorMessageFromXContent(expectedHits, actualHits); } + @Override + protected boolean supportsSampling() { + return true; + } + + @Override + protected void assertSampled(InternalTopHits sampled, InternalTopHits reduced, SamplingContext samplingContext) { + assertThat(sampled.getHits(), equalTo(reduced.getHits())); + } + /** * Assert that two objects are equals, calling {@link ToXContent#toXContent(XContentBuilder, ToXContent.Params)} to print out their * differences if they aren't equal. diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalValueCountTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalValueCountTests.java index 31ebb152adcbc..618d9bec1ef1c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalValueCountTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalValueCountTests.java @@ -10,12 +10,15 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; + public class InternalValueCountTests extends InternalAggregationTestCase { @Override @@ -28,6 +31,16 @@ protected void assertReduced(InternalValueCount reduced, List { @Override @@ -44,6 +47,16 @@ protected void assertReduced(InternalWeightedAvg reduced, List metadata) { return new Sum(name, value, formatter, metadata); } + @Override + protected boolean supportsSampling() { + return true; + } + @Override protected void assertReduced(Sum reduced, List inputs) { double expectedSum = inputs.stream().mapToDouble(Sum::value).sum(); assertEquals(expectedSum, reduced.value(), 0.0001d); } + protected void assertSampled(Sum sampled, Sum reduced, SamplingContext samplingContext) { + assertEquals(sampled.value(), samplingContext.inverseScale(reduced.value()), 1e-7); + } + public void testSummationAccuracy() { // Summing up a normal array and expect an accurate value double[] values = new double[] { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7 }; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/SamplingContextTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/SamplingContextTests.java new file mode 100644 index 0000000000000..a6662873c425c --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/SamplingContextTests.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.aggregations.support; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; + +public class SamplingContextTests extends ESTestCase { + protected static final int NUMBER_OF_TEST_RUNS = 20; + + private static SamplingContext randomContext() { + return new SamplingContext(randomDoubleBetween(1e-8, 0.1, false), randomInt()); + } + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/83748") + public void testScaling() { + for (int i = 0; i < 20; i++) { + SamplingContext samplingContext = randomContext(); + long randomLong = randomLongBetween(100_000_000L, Long.MAX_VALUE); + double randomDouble = randomDouble(); + long rescaled = samplingContext.inverseScale(samplingContext.scale(randomLong)); + // No matter how you scale `long` values, the inverse back may be a little off + long error = (long) (rescaled * 1e-15); + assertThat( + Double.toString(samplingContext.probability()), + rescaled, + allOf(greaterThanOrEqualTo(randomLong - error), lessThanOrEqualTo(randomLong + error)) + ); + assertThat( + Double.toString(samplingContext.probability()), + randomDouble, + closeTo(samplingContext.inverseScale(samplingContext.scale(randomDouble)), 1e-12) + ); + } + } + + public void testNoScaling() { + SamplingContext samplingContext = new SamplingContext(1.0, randomInt()); + long randomLong = randomLong(); + double randomDouble = randomDouble(); + assertThat(randomLong, equalTo(samplingContext.scale(randomLong))); + assertThat(randomDouble, equalTo(samplingContext.scale(randomDouble))); + } + +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesAggregatorTests.java index 93bba946c17bd..ccbd596e911d2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesAggregatorTests.java @@ -17,8 +17,6 @@ import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; @@ -26,6 +24,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; +import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper.TimeSeriesIdBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.support.ValuesSourceType; @@ -33,8 +32,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import java.util.SortedMap; -import java.util.TreeMap; import java.util.function.Consumer; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; @@ -80,15 +77,13 @@ public void testStandAloneTimeSeriesWithSum() throws IOException { public static void writeTS(RandomIndexWriter iw, long timestamp, Object[] dimensions, Object[] metrics) throws IOException { final List fields = new ArrayList<>(); fields.add(new SortedNumericDocValuesField(DataStreamTimestampFieldMapper.DEFAULT_PATH, timestamp)); - final SortedMap dimensionFields = new TreeMap<>(); + final TimeSeriesIdBuilder builder = new TimeSeriesIdBuilder(); for (int i = 0; i < dimensions.length; i += 2) { - final BytesReference reference; - if (dimensions[i + 1] instanceof Number) { - reference = TimeSeriesIdFieldMapper.encodeTsidValue(((Number) dimensions[i + 1]).longValue()); + if (dimensions[i + 1]instanceof Number n) { + builder.addLong(dimensions[i].toString(), n.longValue()); } else { - reference = TimeSeriesIdFieldMapper.encodeTsidValue(dimensions[i + 1].toString()); + builder.addString(dimensions[i].toString(), dimensions[i + 1].toString()); } - dimensionFields.put(dimensions[i].toString(), reference); } for (int i = 0; i < metrics.length; i += 2) { if (metrics[i + 1] instanceof Integer || metrics[i + 1] instanceof Long) { @@ -99,13 +94,9 @@ public static void writeTS(RandomIndexWriter iw, long timestamp, Object[] dimens fields.add(new DoubleDocValuesField(metrics[i].toString(), (double) metrics[i + 1])); } } - try (BytesStreamOutput out = new BytesStreamOutput()) { - TimeSeriesIdFieldMapper.encodeTsid(out, dimensionFields); - BytesReference timeSeriesId = out.bytes(); - fields.add(new SortedDocValuesField(TimeSeriesIdFieldMapper.NAME, timeSeriesId.toBytesRef())); - } + fields.add(new SortedDocValuesField(TimeSeriesIdFieldMapper.NAME, builder.build().toBytesRef())); // TODO: Handle metrics - iw.addDocument(fields.stream().toList()); + iw.addDocument(fields); } private void timeSeriesTestCase( diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesCancellationTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesCancellationTests.java new file mode 100644 index 0000000000000..b66db7736a7ff --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesCancellationTests.java @@ -0,0 +1,128 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +package org.elasticsearch.search.aggregations.timeseries; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.SortedDocValuesField; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.core.internal.io.IOUtils; +import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; +import org.elasticsearch.search.aggregations.BucketCollector; +import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.test.ESTestCase; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.hamcrest.Matchers.equalTo; + +public class TimeSeriesCancellationTests extends ESTestCase { + + private static Directory dir; + private static IndexReader reader; + + @BeforeClass + public static void setup() throws IOException { + dir = newDirectory(); + IndexWriterConfig iwc = newIndexWriterConfig(); + iwc.setIndexSort( + new Sort( + new SortField(TimeSeriesIdFieldMapper.NAME, SortField.Type.STRING), + new SortField(DataStream.TimestampField.FIXED_TIMESTAMP_FIELD, SortField.Type.LONG) + ) + ); + RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); + indexRandomDocuments(iw, randomIntBetween(2048, 4096)); + iw.flush(); + reader = iw.getReader(); + iw.close(); + } + + private static void indexRandomDocuments(RandomIndexWriter w, int numDocs) throws IOException { + for (int i = 1; i <= numDocs; ++i) { + Document doc = new Document(); + String tsid = "tsid" + randomIntBetween(0, 30); + long time = randomNonNegativeLong(); + doc.add(new SortedDocValuesField(TimeSeriesIdFieldMapper.NAME, new BytesRef(tsid))); + doc.add(new NumericDocValuesField(DataStream.TimestampField.FIXED_TIMESTAMP_FIELD, time)); + w.addDocument(doc); + } + } + + @AfterClass + public static void cleanup() throws IOException { + IOUtils.close(reader, dir); + dir = null; + reader = null; + } + + public void testLowLevelCancellationActions() throws IOException { + ContextIndexSearcher searcher = new ContextIndexSearcher( + reader, + IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), + IndexSearcher.getDefaultQueryCachingPolicy(), + true + ); + TimeSeriesIndexSearcher timeSeriesIndexSearcher = new TimeSeriesIndexSearcher( + searcher, + List.of(() -> { throw new TaskCancelledException("Cancel"); }) + ); + CountingBucketCollector bc = new CountingBucketCollector(); + expectThrows(TaskCancelledException.class, () -> timeSeriesIndexSearcher.search(new MatchAllDocsQuery(), bc)); + // We count every segment and every record as 1 and break on 2048th iteration counting from 0 + // so we expect to see 2048 - number_of_segments - 1 (-1 is because we check before we collect) + assertThat(bc.count.get(), equalTo(Math.max(0, 2048 - reader.leaves().size() - 1))); + } + + public static class CountingBucketCollector extends BucketCollector { + public AtomicInteger count = new AtomicInteger(); + + @Override + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx) throws IOException { + return new LeafBucketCollector() { + @Override + public void collect(int doc, long owningBucketOrd) throws IOException { + count.incrementAndGet(); + } + }; + } + + @Override + public void preCollection() throws IOException { + + } + + @Override + public void postCollection() throws IOException { + + } + + @Override + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE; + } + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcherTests.java index d680997275414..7bc5a2522d55b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcherTests.java @@ -26,7 +26,6 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.search.aggregations.BucketCollector; @@ -35,12 +34,12 @@ import java.io.IOException; import java.io.UncheckedIOException; +import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/83647") public class TimeSeriesIndexSearcherTests extends ESTestCase { // Index a random set of docs with timestamp and tsid with the tsid/timestamp sort order @@ -87,7 +86,7 @@ public void testCollectInOrderAcrossSegments() throws IOException, InterruptedEx IndexReader reader = DirectoryReader.open(dir); IndexSearcher searcher = new IndexSearcher(reader); - TimeSeriesIndexSearcher indexSearcher = new TimeSeriesIndexSearcher(searcher); + TimeSeriesIndexSearcher indexSearcher = new TimeSeriesIndexSearcher(searcher, List.of()); BucketCollector collector = new BucketCollector() { @@ -108,13 +107,13 @@ public void collect(int doc, long owningBucketOrd) throws IOException { BytesRef latestTSID = tsid.lookupOrd(tsid.ordValue()); long latestTimestamp = timestamp.longValue(); if (currentTSID != null) { - assertTrue(latestTSID.compareTo(currentTSID) >= 0); + assertTrue(currentTSID + "->" + latestTSID.utf8ToString(), latestTSID.compareTo(currentTSID) >= 0); if (latestTSID.equals(currentTSID)) { - assertTrue(latestTimestamp >= currentTimestamp); + assertTrue(currentTimestamp + "->" + latestTimestamp, latestTimestamp >= currentTimestamp); } } currentTimestamp = latestTimestamp; - currentTSID = latestTSID; + currentTSID = BytesRef.deepCopyOf(latestTSID); total++; } }; diff --git a/server/src/test/java/org/elasticsearch/search/lookup/SourceLookupTests.java b/server/src/test/java/org/elasticsearch/search/lookup/SourceLookupTests.java new file mode 100644 index 0000000000000..21ec0d4584f91 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/lookup/SourceLookupTests.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.lookup; + +import org.apache.lucene.codecs.StoredFieldsReader; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.StringField; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.store.Directory; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentFactory; + +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; + +public class SourceLookupTests extends ESTestCase { + + public void testSetSegmentAndDocument() throws IOException { + try (Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir)) { + Document doc = new Document(); + doc.add(new StringField("field", "value", Field.Store.YES)); + iw.addDocument(doc); + + try (IndexReader reader = iw.getReader()) { + LeafReaderContext readerContext = reader.leaves().get(0); + + SourceLookup sourceLookup = new SourceLookup(); + sourceLookup.setSegmentAndDocument(readerContext, 42); + sourceLookup.setSource( + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()) + ); + assertNotNull(sourceLookup.internalSourceRef()); + + // Source should be preserved if we pass in the same reader and document + sourceLookup.setSegmentAndDocument(readerContext, 42); + assertNotNull(sourceLookup.internalSourceRef()); + + // Check that the stored fields reader is not loaded eagerly + LeafReader throwingReader = new SequentialStoredFieldsLeafReader(readerContext.reader()) { + @Override + protected StoredFieldsReader doGetSequentialStoredFieldsReader(StoredFieldsReader reader) { + throw new UnsupportedOperationException("attempted to load stored fields reader"); + } + + @Override + public CacheHelper getReaderCacheHelper() { + return in.getReaderCacheHelper(); + } + + @Override + public CacheHelper getCoreCacheHelper() { + return in.getCoreCacheHelper(); + } + }; + + sourceLookup.setSegmentAndDocument(throwingReader.getContext(), 0); + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, sourceLookup::source); + assertThat(e.getCause(), instanceOf(UnsupportedOperationException.class)); + assertThat(e.getCause().getMessage(), containsString("attempted to load stored fields reader")); + } + } + } +} diff --git a/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java new file mode 100644 index 0000000000000..7c7d5f6395f43 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java @@ -0,0 +1,109 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.snapshots; + +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.RepositoriesMetadata; +import org.elasticsearch.cluster.metadata.RepositoryMetadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.health.HealthIndicatorDetails; +import org.elasticsearch.health.HealthIndicatorResult; +import org.elasticsearch.health.SimpleHealthIndicatorDetails; +import org.elasticsearch.test.ESTestCase; + +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.common.util.CollectionUtils.appendToCopy; +import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.RED; +import static org.elasticsearch.health.ServerHealthComponents.SNAPSHOT; +import static org.elasticsearch.repositories.RepositoryData.CORRUPTED_REPO_GEN; +import static org.elasticsearch.repositories.RepositoryData.EMPTY_REPO_GEN; +import static org.elasticsearch.snapshots.RepositoryIntegrityHealthIndicatorService.NAME; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class RepositoryIntegrityHealthIndicatorServiceTests extends ESTestCase { + + public void testIsGreenWhenAllRepositoriesAreNotCorrupted() { + var repos = randomList(1, 10, () -> createRepositoryMetadata("healthy-repo", false)); + var clusterState = createClusterStateWith(new RepositoriesMetadata(repos)); + var service = createRepositoryCorruptionHealthIndicatorService(clusterState); + + assertThat( + service.calculate(), + equalTo( + new HealthIndicatorResult( + NAME, + SNAPSHOT, + GREEN, + "No corrupted repositories.", + new SimpleHealthIndicatorDetails(Map.of("total_repositories", repos.size())) + ) + ) + ); + } + + public void testIsRedWhenAtLeastOneRepoIsCorrupted() { + var repos = appendToCopy( + randomList(1, 10, () -> createRepositoryMetadata("healthy-repo", false)), + createRepositoryMetadata("corrupted-repo", true) + ); + var clusterState = createClusterStateWith(new RepositoriesMetadata(repos)); + var service = createRepositoryCorruptionHealthIndicatorService(clusterState); + + assertThat( + service.calculate(), + equalTo( + new HealthIndicatorResult( + NAME, + SNAPSHOT, + RED, + "Detected [1] corrupted repositories: [corrupted-repo].", + new SimpleHealthIndicatorDetails( + Map.of("total_repositories", repos.size(), "corrupted_repositories", 1, "corrupted", List.of("corrupted-repo")) + ) + ) + ) + ); + } + + public void testIsGreenWhenNoMetadata() { + var clusterState = createClusterStateWith(null); + var service = createRepositoryCorruptionHealthIndicatorService(clusterState); + + assertThat( + service.calculate(), + equalTo(new HealthIndicatorResult(NAME, SNAPSHOT, GREEN, "No repositories configured.", HealthIndicatorDetails.EMPTY)) + ); + } + + private static ClusterState createClusterStateWith(RepositoriesMetadata metadata) { + var builder = ClusterState.builder(new ClusterName("test-cluster")); + if (metadata != null) { + builder.metadata(Metadata.builder().putCustom(RepositoriesMetadata.TYPE, metadata)); + } + return builder.build(); + } + + private static RepositoryMetadata createRepositoryMetadata(String name, boolean corrupted) { + return new RepositoryMetadata(name, "uuid", "s3", Settings.EMPTY, corrupted ? CORRUPTED_REPO_GEN : EMPTY_REPO_GEN, EMPTY_REPO_GEN); + } + + private static RepositoryIntegrityHealthIndicatorService createRepositoryCorruptionHealthIndicatorService(ClusterState clusterState) { + var clusterService = mock(ClusterService.class); + when(clusterService.state()).thenReturn(clusterState); + return new RepositoryIntegrityHealthIndicatorService(clusterService); + } +} diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index b0325195bc17b..9b4a06afa705a 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -1715,7 +1715,8 @@ protected void assertSnapshotOrGenericThread() { } ), emptyMap(), - threadPool + threadPool, + List.of() ); final ActionFilters actionFilters = new ActionFilters(emptySet()); snapshotsService = new SnapshotsService( diff --git a/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java b/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java index 9e8fc5c8983a6..6e40e9434141e 100644 --- a/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java +++ b/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java @@ -46,6 +46,7 @@ import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.in; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class TaskManagerTests extends ESTestCase { private ThreadPool threadPool; @@ -76,7 +77,9 @@ public void testResultsServiceRetryTotalTime() { public void testTrackingChannelTask() throws Exception { final TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Set.of()); Set cancelledTasks = ConcurrentCollections.newConcurrentSet(); - taskManager.setTaskCancellationService(new TaskCancellationService(mock(TransportService.class)) { + final var transportServiceMock = mock(TransportService.class); + when(transportServiceMock.getThreadPool()).thenReturn(threadPool); + taskManager.setTaskCancellationService(new TaskCancellationService(transportServiceMock) { @Override void cancelTaskAndDescendants(CancellableTask task, String reason, boolean waitForCompletion, ActionListener listener) { assertThat(reason, equalTo("channel was closed")); @@ -124,7 +127,9 @@ void cancelTaskAndDescendants(CancellableTask task, String reason, boolean waitF public void testTrackingTaskAndCloseChannelConcurrently() throws Exception { final TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Set.of()); Set cancelledTasks = ConcurrentCollections.newConcurrentSet(); - taskManager.setTaskCancellationService(new TaskCancellationService(mock(TransportService.class)) { + final var transportServiceMock = mock(TransportService.class); + when(transportServiceMock.getThreadPool()).thenReturn(threadPool); + taskManager.setTaskCancellationService(new TaskCancellationService(transportServiceMock) { @Override void cancelTaskAndDescendants(CancellableTask task, String reason, boolean waitForCompletion, ActionListener listener) { assertTrue("task [" + task + "] was cancelled already", cancelledTasks.add(task)); @@ -180,7 +185,9 @@ void cancelTaskAndDescendants(CancellableTask task, String reason, boolean waitF public void testRemoveBansOnChannelDisconnects() throws Exception { final TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Set.of()); - taskManager.setTaskCancellationService(new TaskCancellationService(mock(TransportService.class)) { + final var transportServiceMock = mock(TransportService.class); + when(transportServiceMock.getThreadPool()).thenReturn(threadPool); + taskManager.setTaskCancellationService(new TaskCancellationService(transportServiceMock) { @Override void cancelTaskAndDescendants(CancellableTask task, String reason, boolean waitForCompletion, ActionListener listener) {} }); diff --git a/server/src/test/java/org/elasticsearch/transport/ResultDeduplicatorTests.java b/server/src/test/java/org/elasticsearch/transport/ResultDeduplicatorTests.java index 2bdfa3cc7865c..2d9fa940d5d5a 100644 --- a/server/src/test/java/org/elasticsearch/transport/ResultDeduplicatorTests.java +++ b/server/src/test/java/org/elasticsearch/transport/ResultDeduplicatorTests.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ResultDeduplicator; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; @@ -29,8 +31,11 @@ public void testRequestDeduplication() throws Exception { @Override public void setParentTask(final TaskId taskId) {} }; - final ResultDeduplicator deduplicator = new ResultDeduplicator<>(); + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + final ResultDeduplicator deduplicator = new ResultDeduplicator<>(threadContext); final SetOnce> listenerHolder = new SetOnce<>(); + final var headerName = "thread-context-header"; + final var headerGenerator = new AtomicInteger(); int iterationsPerThread = scaledRandomIntBetween(100, 1000); Thread[] threads = new Thread[between(1, 4)]; Phaser barrier = new Phaser(threads.length + 1); @@ -38,18 +43,24 @@ public void setParentTask(final TaskId taskId) {} threads[i] = new Thread(() -> { barrier.arriveAndAwaitAdvance(); for (int n = 0; n < iterationsPerThread; n++) { - deduplicator.executeOnce(request, new ActionListener() { - @Override - public void onResponse(Void aVoid) { - successCount.incrementAndGet(); - } + final var headerValue = Integer.toString(headerGenerator.incrementAndGet()); + try (var ignored = threadContext.stashContext()) { + threadContext.putHeader(headerName, headerValue); + deduplicator.executeOnce(request, new ActionListener<>() { + @Override + public void onResponse(Void aVoid) { + assertThat(threadContext.getHeader(headerName), equalTo(headerValue)); + successCount.incrementAndGet(); + } - @Override - public void onFailure(Exception e) { - assertThat(e, sameInstance(failure)); - failureCount.incrementAndGet(); - } - }, (req, reqListener) -> listenerHolder.set(reqListener)); + @Override + public void onFailure(Exception e) { + assertThat(threadContext.getHeader(headerName), equalTo(headerValue)); + assertThat(e, sameInstance(failure)); + failureCount.incrementAndGet(); + } + }, (req, reqListener) -> listenerHolder.set(reqListener)); + } } }); threads[i].start(); diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index 584a2a130afac..3e81b71292390 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -289,6 +289,10 @@ public static ClusterState getClusterStateWithDataStreams( boolean replicated ) { Metadata.Builder builder = Metadata.builder(); + builder.put( + "template_1", + new ComposableIndexTemplate(List.of("*"), null, null, null, null, null, new ComposableIndexTemplate.DataStreamTemplate()) + ); List allIndices = new ArrayList<>(); for (Tuple dsTuple : dataStreams) { diff --git a/test/framework/src/main/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java index 5d01eb6031134..4a3b3d27c75ab 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java @@ -13,7 +13,7 @@ import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.DeprecationHandler; -import org.elasticsearch.xcontent.FilterXContentParser; +import org.elasticsearch.xcontent.FilterXContentParserWrapper; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -55,7 +55,7 @@ protected static Builder builderFor(String file) { .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream) ) { // copyCurrentStructure does not property handle filters when it is passed a json parser. So we hide it. - return builder.copyCurrentStructure(new FilterXContentParser(parser) { + return builder.copyCurrentStructure(new FilterXContentParserWrapper(parser) { }); } } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java index 6f9e3649ae679..21aa106ff75c2 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java @@ -126,6 +126,14 @@ protected final DocumentMapper createDocumentMapper(XContentBuilder mappings) th return createMapperService(mappings).documentMapper(); } + protected final DocumentMapper createTimeSeriesModeDocumentMapper(XContentBuilder mappings) throws IOException { + Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), "time_series") + .put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "uid") + .build(); + return createMapperService(settings, mappings).documentMapper(); + } + protected final DocumentMapper createDocumentMapper(Version version, XContentBuilder mappings) throws IOException { return createMapperService(version, mappings).documentMapper(); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/TestDocumentParserContext.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/TestDocumentParserContext.java index 4eeb644132a3c..b42d561de4d72 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/TestDocumentParserContext.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/TestDocumentParserContext.java @@ -8,6 +8,8 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.Version; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.IndexAnalyzers; @@ -31,7 +33,7 @@ public class TestDocumentParserContext extends DocumentParserContext { * Use with caution as it can cause {@link NullPointerException}s down the line. */ public TestDocumentParserContext() { - super(MappingLookup.EMPTY, null, null, null, null); + super(MappingLookup.EMPTY, MapperTestCase.createIndexSettings(Version.CURRENT, Settings.EMPTY), null, null, null); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java index 47e12cd4934d4..c61bd0c2799ac 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java @@ -29,6 +29,8 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetadata; import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshots; @@ -417,7 +419,9 @@ public static ClusterService mockClusterService(RepositoryMetadata metadata) { } private static ClusterService mockClusterService(ClusterState initialState) { + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); final ThreadPool threadPool = mock(ThreadPool.class); + when(threadPool.getThreadContext()).thenReturn(threadContext); when(threadPool.executor(ThreadPool.Names.SNAPSHOT)).thenReturn(new SameThreadExecutorService()); when(threadPool.generic()).thenReturn(new SameThreadExecutorService()); when(threadPool.info(ThreadPool.Names.SNAPSHOT)).thenReturn( diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 36062d69ca401..dfdfd267373b5 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -123,6 +123,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.AggregationContext.ProductionAggregationContext; import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; import org.elasticsearch.search.aggregations.support.ValuesSourceType; @@ -577,7 +578,7 @@ private A searchAndReduce( C a = createAggregator(builder, context); a.preCollection(); if (context.isInSortOrderExecutionRequired()) { - new TimeSeriesIndexSearcher(subSearcher).search(rewritten, a); + new TimeSeriesIndexSearcher(subSearcher, List.of()).search(rewritten, a); } else { Weight weight = subSearcher.createWeight(rewritten, ScoreMode.COMPLETE, 1f); subSearcher.search(weight, a); @@ -588,7 +589,7 @@ private A searchAndReduce( } else { root.preCollection(); if (context.isInSortOrderExecutionRequired()) { - new TimeSeriesIndexSearcher(searcher).search(rewritten, MultiBucketCollector.wrap(true, List.of(root))); + new TimeSeriesIndexSearcher(searcher, List.of()).search(rewritten, MultiBucketCollector.wrap(true, List.of(root))); } else { searcher.search(rewritten, MultiBucketCollector.wrap(true, List.of(root))); } @@ -1013,7 +1014,19 @@ public void testSupportedFieldTypes() throws IOException { // TODO in the future we can make this more explicit with expectThrows(), when the exceptions are standardized AssertionError failure = null; try { - searchAndReduce(indexSearcher, new MatchAllDocsQuery(), aggregationBuilder, fieldType); + InternalAggregation internalAggregation = searchAndReduce( + indexSearcher, + new MatchAllDocsQuery(), + aggregationBuilder, + fieldType + ); + // We should make sure if the builder says it supports sampling, that the internal aggregations returned override + // finalizeSampling + if (aggregationBuilder.supportsSampling()) { + SamplingContext randomSamplingContext = new SamplingContext(randomDoubleBetween(1e-8, 0.1, false), randomInt()); + InternalAggregation sampledResult = internalAggregation.finalizeSampling(randomSamplingContext); + assertThat(sampledResult.getClass(), equalTo(internalAggregation.getClass())); + } if (supportedVSTypes.contains(vst) == false || unsupportedMappedFieldTypes.contains(fieldType.typeName())) { failure = new AssertionError( "Aggregator [" diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index faca8ead9f5e6..4c9e43900b2f0 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -334,6 +334,10 @@ public static void createRepository(Logger logger, String repoName, String type) createRepository(logger, repoName, type, randomRepositorySettings(), true); } + protected void deleteRepository(String repoName) { + assertAcked(client().admin().cluster().prepareDeleteRepository(repoName)); + } + public static Settings.Builder randomRepositorySettings() { final Settings.Builder settings = Settings.builder(); settings.put("location", randomRepoPath()).put("compress", randomBoolean()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index fbee26415c388..a7765ff11c8fa 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1627,6 +1627,11 @@ public static void setClusterReadOnly(boolean value) { assertAcked(client().admin().cluster().prepareUpdateSettings().setPersistentSettings(settings).get()); } + /** Sets cluster persistent settings **/ + public void updateClusterSettings(Settings.Builder persistentSettings) { + assertAcked(client().admin().cluster().prepareUpdateSettings().setPersistentSettings(persistentSettings).get()); + } + private static CountDownLatch newLatch(List latches) { CountDownLatch l = new CountDownLatch(1); latches.add(l); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 3645298543e0c..69b4000caaeea 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -12,6 +12,9 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.admin.indices.template.delete.DeleteComponentTemplateAction; +import org.elasticsearch.action.admin.indices.template.delete.DeleteComposableIndexTemplateAction; +import org.elasticsearch.action.datastreams.DeleteDataStreamAction; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.Client; @@ -127,6 +130,21 @@ public void tearDown() throws Exception { assertThat(searchService.getActiveContexts(), equalTo(0)); assertThat(searchService.getOpenScrollContexts(), equalTo(0)); super.tearDown(); + var deleteDataStreamsRequest = new DeleteDataStreamAction.Request("*"); + deleteDataStreamsRequest.indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN); + try { + assertAcked(client().execute(DeleteDataStreamAction.INSTANCE, deleteDataStreamsRequest).actionGet()); + } catch (IllegalStateException e) { + // Ignore if action isn't registered, because data streams is a module and + // if the delete action isn't registered then there no data streams to delete. + if (e.getMessage().startsWith("failed to find action") == false) { + throw e; + } + } + var deleteComposableIndexTemplateRequest = new DeleteComposableIndexTemplateAction.Request("*"); + assertAcked(client().execute(DeleteComposableIndexTemplateAction.INSTANCE, deleteComposableIndexTemplateRequest).actionGet()); + var deleteComponentTemplateRequest = new DeleteComponentTemplateAction.Request("*"); + assertAcked(client().execute(DeleteComponentTemplateAction.INSTANCE, deleteComponentTemplateRequest).actionGet()); assertAcked( client().admin().indices().prepareDelete("*").setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN).get() ); diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java index f40af339af193..95ecf3f3a24fe 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java @@ -142,6 +142,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree; import org.elasticsearch.search.aggregations.pipeline.StatsBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.support.AggregationContext; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.search.aggregations.timeseries.ParsedTimeSeries; import org.elasticsearch.search.aggregations.timeseries.TimeSeriesAggregationBuilder; import org.elasticsearch.xcontent.ContextParser; @@ -472,6 +473,12 @@ public void testReduceRandom() throws IOException { T reduced = (T) inputs.toReduce().get(0).reduce(toReduce, context); doAssertReducedMultiBucketConsumer(reduced, bucketConsumer); assertReduced(reduced, inputs.toReduce()); + if (supportsSampling()) { + SamplingContext randomContext = new SamplingContext(randomDoubleBetween(1e-8, 0.1, false), randomInt()); + @SuppressWarnings("unchecked") + T sampled = (T) reduced.finalizeSampling(randomContext); + assertSampled(sampled, reduced, randomContext); + } } protected void doAssertReducedMultiBucketConsumer(Aggregation agg, MultiBucketConsumerService.MultiBucketConsumer bucketConsumer) { @@ -487,11 +494,19 @@ protected ScriptService mockScriptService() { protected abstract void assertReduced(T reduced, List inputs); + protected void assertSampled(T sampled, T reduced, SamplingContext samplingContext) { + throw new UnsupportedOperationException("aggregation supports sampling but does not implement assertSampled"); + } + @Override public final T createTestInstance() { return createTestInstance(randomAlphaOfLength(5)); } + protected boolean supportsSampling() { + return false; + } + public final Map createTestMetadata() { Map metadata = null; if (randomBoolean()) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java b/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java index ce99a6e2c92ec..371d7302a61f7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java @@ -43,9 +43,13 @@ private XContentTestUtils() { public static Map convertToMap(ToXContent part) throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.startObject(); - part.toXContent(builder, EMPTY_PARAMS); - builder.endObject(); + if (part.isFragment()) { + builder.startObject(); + part.toXContent(builder, EMPTY_PARAMS); + builder.endObject(); + } else { + part.toXContent(builder, EMPTY_PARAMS); + } return XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index a4e338626191d..245593cc5e76d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -1407,6 +1407,24 @@ public static void assertAcknowledged(Response response) throws IOException { assertThat(jsonBody, containsString("\"acknowledged\":true")); } + /** + * Updates the cluster with the provided settings (as persistent settings) + **/ + public static void updateClusterSettings(Settings settings) throws IOException { + updateClusterSettings(client(), settings); + } + + /** + * Updates the cluster with the provided settings (as persistent settings) + **/ + public static void updateClusterSettings(RestClient client, Settings settings) throws IOException { + Request request = new Request("PUT", "/_cluster/settings"); + String entity = "{ \"persistent\":" + Strings.toString(settings) + "}"; + request.setJsonEntity(entity); + Response response = client.performRequest(request); + assertOK(response); + } + /** * Permits subclasses to increase the default timeout when waiting for green health */ @@ -1440,6 +1458,10 @@ public static void ensureHealth(String index, Consumer requestConsumer) ensureHealth(client(), index, requestConsumer); } + public static void ensureHealth(RestClient restClient, Consumer requestConsumer) throws IOException { + ensureHealth(restClient, "", requestConsumer); + } + protected static void ensureHealth(RestClient restClient, String index, Consumer requestConsumer) throws IOException { Request request = new Request("GET", "/_cluster/health" + (index.isBlank() ? "" : "/" + index)); requestConsumer.accept(request); @@ -1604,7 +1626,11 @@ protected static Map getAlias(final String index, final String a } protected static Map getAsMap(final String endpoint) throws IOException { - Response response = client().performRequest(new Request("GET", endpoint)); + return getAsMap(client(), endpoint); + } + + protected static Map getAsMap(RestClient client, final String endpoint) throws IOException { + Response response = client.performRequest(new Request("GET", endpoint)); return responseAsMap(response); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java index bdd8ba9dab1df..86121fa0d7da0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java @@ -87,13 +87,20 @@ public String getReasonPhrase() { * Get a list of all of the values of all warning headers returned in the response. */ public List getWarningHeaders() { - List warningHeaders = new ArrayList<>(); + return getHeaders("Warning"); + } + + /** + * Get a list of all the values of a given header returned in the response. + */ + public List getHeaders(String name) { + List headers = new ArrayList<>(); for (Header header : response.getHeaders()) { - if (header.getName().equals("Warning")) { - warningHeaders.add(header.getValue()); + if (header.getName().equalsIgnoreCase(name)) { + headers.add(header.getValue()); } } - return warningHeaders; + return headers; } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index e70434f7225e4..a58c4e21e530c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -447,7 +447,7 @@ public void test() throws IOException { inFipsJvm() && testCandidate.getTestSection().getSkipSection().getFeatures().contains("fips_140") ); - if (testCandidate.getSetupSection().isEmpty() == false) { + if (skipSetupSections() == false && testCandidate.getSetupSection().isEmpty() == false) { logger.debug("start setup test [{}]", testCandidate.getTestPath()); for (ExecutableSection executableSection : testCandidate.getSetupSection().getExecutableSections()) { executeSection(executableSection); @@ -470,6 +470,10 @@ public void test() throws IOException { } } + protected boolean skipSetupSections() { + return false; + } + /** * Execute an {@link ExecutableSection}, careful to log its place of origin on failure. */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 23a7146561da9..efc53b08fad27 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -367,6 +367,7 @@ public void execute(ClientYamlTestExecutionContext executionContext) throws IOEx final String testPath = executionContext.getClientYamlTestCandidate() != null ? executionContext.getClientYamlTestCandidate().getTestPath() : null; + checkElasticProductHeader(response.getHeaders("X-elastic-product")); checkWarningHeaders(response.getWarningHeaders(), testPath); } catch (ClientYamlTestResponseException e) { ClientYamlTestResponse restTestResponse = e.getRestTestResponse(); @@ -392,6 +393,31 @@ public void execute(ClientYamlTestExecutionContext executionContext) throws IOEx } } + void checkElasticProductHeader(final List productHeaders) { + if (productHeaders.isEmpty()) { + fail("Response is missing required X-Elastic-Product response header"); + } + boolean headerPresent = false; + final List unexpected = new ArrayList<>(); + for (String header : productHeaders) { + if (header.equals("Elasticsearch")) { + headerPresent = true; + break; + } else { + unexpected.add(header); + } + } + if (headerPresent == false) { + StringBuilder failureMessage = new StringBuilder(); + appendBadHeaders( + failureMessage, + unexpected, + "did not get expected product header [Elasticsearch], found header" + (unexpected.size() > 1 ? "s" : "") + ); + fail(failureMessage.toString()); + } + } + void checkWarningHeaders(final List warningHeaders) { checkWarningHeaders(warningHeaders, null); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java index fdd3451012d5c..b7238588ffe36 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java @@ -605,6 +605,7 @@ public void testNodeSelectorByVersion() throws IOException { doSection.getApiCallSection().getNodeSelector() ) ).thenReturn(mockResponse); + when(mockResponse.getHeaders("X-elastic-product")).thenReturn(List.of("Elasticsearch")); doSection.execute(context); verify(context).callApi( "indices.get_field_mapping", diff --git a/test/framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yml b/test/framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yml index 5e08112253ef0..053370bad7e50 100644 --- a/test/framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yml +++ b/test/framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yml @@ -5,14 +5,14 @@ index: index: test_1 type: test - id: 1 + id: "1" body: { "foo": "bar" } - do: get: index: test_1 type: _all - id: 1 + id: "1" - match: { _index: test_1 } - match: { _type: test } diff --git a/test/framework/src/test/resources/rest-api-spec/test/suite2/10_basic.yml b/test/framework/src/test/resources/rest-api-spec/test/suite2/10_basic.yml index 745e111740285..7594da3703fff 100644 --- a/test/framework/src/test/resources/rest-api-spec/test/suite2/10_basic.yml +++ b/test/framework/src/test/resources/rest-api-spec/test/suite2/10_basic.yml @@ -5,7 +5,7 @@ index: index: test-weird-index-中文 type: weird.type - id: 1 + id: "1" body: { foo: bar } - match: { _index: test-weird-index-中文 } @@ -17,7 +17,7 @@ get: index: test-weird-index-中文 type: weird.type - id: 1 + id: "1" - match: { _index: test-weird-index-中文 } - match: { _type: weird.type } diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 1486ec7c8afa6..482c94e06195b 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -1,3 +1,6 @@ +import org.elasticsearch.gradle.Version +import org.elasticsearch.gradle.internal.info.BuildParams + apply plugin: 'elasticsearch.docs-test' apply plugin: 'elasticsearch.rest-resources' @@ -60,6 +63,9 @@ testClusters.matching { it.name == "integTest" }.configureEach { setting 'xpack.security.authc.realms.saml.saml1.sp.acs', 'https://kibana.org/api/security/saml/callback' setting 'xpack.security.authc.realms.saml.saml1.attributes.principal', 'uid' setting 'xpack.security.authc.realms.saml.saml1.attributes.name', 'urn:oid:2.5.4.3' + + requiresFeature 'es.user_profile_feature_flag_enabled', Version.fromString("8.1.0") + user username: 'test_admin' } diff --git a/x-pack/docs/en/rest-api/security.asciidoc b/x-pack/docs/en/rest-api/security.asciidoc index e3e4a47f17b32..78f4f3db00cd9 100644 --- a/x-pack/docs/en/rest-api/security.asciidoc +++ b/x-pack/docs/en/rest-api/security.asciidoc @@ -132,7 +132,18 @@ communicate with a secured {es} cluster. * <> * <> +[discrete] +[[security-user-profile-apis]] +=== User Profile + +Use the following APIs to retrieve and manage user profiles. + +* <> +* <> +* <> + +include::security/activate-user-profile.asciidoc[] include::security/authenticate.asciidoc[] include::security/change-password.asciidoc[] include::security/clear-cache.asciidoc[] @@ -165,6 +176,7 @@ include::security/get-service-accounts.asciidoc[] include::security/get-service-credentials.asciidoc[] include::security/get-tokens.asciidoc[] include::security/get-user-privileges.asciidoc[] +include::security/get-user-profile.asciidoc[] include::security/get-users.asciidoc[] include::security/grant-api-keys.asciidoc[] include::security/has-privileges.asciidoc[] @@ -181,6 +193,4 @@ include::security/saml-invalidate-api.asciidoc[] include::security/saml-complete-logout-api.asciidoc[] include::security/saml-sp-metadata.asciidoc[] include::security/ssl.asciidoc[] -include::security/get-user-profile.asciidoc[] -include::security/activate-user-profile.asciidoc[] include::security/update-user-profile-data.asciidoc[] diff --git a/x-pack/docs/en/rest-api/security/activate-user-profile.asciidoc b/x-pack/docs/en/rest-api/security/activate-user-profile.asciidoc index 15a117b76a036..9e825e2e2ad9b 100644 --- a/x-pack/docs/en/rest-api/security/activate-user-profile.asciidoc +++ b/x-pack/docs/en/rest-api/security/activate-user-profile.asciidoc @@ -1,10 +1,131 @@ [role="xpack"] [[security-api-activiate-user-profile]] === Activate user profile API + +beta::[] + ++++ Activate user profile ++++ -Creates or updates the user profile on behalf of another user +Creates or updates a user profile on behalf of another user. + +[[security-api-activate-user-profile-request]] +==== {api-request-title} + +`POST /_security/profile/_activate` + +[[security-api-activate-user-profile-prereqs]] +==== {api-prereq-title} + +* To use this API, you must have the `manage_user_profile` cluster privilege. + +[[security-api-activate-user-profile-desc]] +==== {api-description-title} + +The activate user profile API creates or updates a profile document for end +users with information that is extracted from the user's authentication object, +including `username`, `full_name`, `roles`, and the authentication realm. + +When updating a profile document, the API enables the document if it was +disabled. Any updates do not change existing content for either the `access` or +`data` fields. + +This API is intended only for use by applications (such as {kib}) that need to +create or update profiles for end users. + +IMPORTANT: The calling application must have either an `access_token`, or a +combination of `username` and `password` for the user that the profile document +is intended for. + +[role="child_attributes"] +[[security-api-activate-user-profile-request-body]] +==== {api-request-body-title} + +`access_token`:: +(Required*, string) +The user's access token. If you specify the `access_token` grant type, this +parameter is required. It is not valid with other grant types. + +`grant_type`:: +(Required, string) +The type of grant. ++ +.Valid values for `grant_type` +[%collapsible%open] +==== +`access_token`:: +(Required*, string) +In this type of grant, you must supply an access token that was created by the +{es} token service. For more information, see +<> and <>. + +`password`:: +(Required*, string) +In this type of grant, you must supply the `username` and `password` for the +user that you want to create the API key for. +==== + +`password`:: +(Optional*, string) +The user's password. If you specify the `password` grant type, this parameter is +required. It is not valid with other grant types. + +`username`:: +(Optional*, string) +The username that identifies the user. If you specify the `password` grant type, +this parameter is required. It is not valid with other grant types. + +*Indicates that the setting is required in some, but not all situations. + +[[security-api-activate-user-profile-response-body]] +==== {api-response-body-title} + +A successful activate user profile API call returns a JSON structure that contains +the profile unique ID, user information, timestamp for the operation and version +control numbers. + +[[security-api-activate-user-profile-example]] +==== {api-examples-title} + +[source,console] +---- +POST /_security/profile/_activate +{ + "grant_type": "password", + "username" : "jacknich", + "password" : "l0ng-r4nd0m-p@ssw0rd" +} +---- +// TEST[setup:jacknich_user] + +The API returns the following response: -coming::[8.2.0] +[source,console-result] +---- +{ + "uid": "u_kd2JMqwUQwSCCOxMv7M1vw", + "enabled": true, + "last_synchronized": 1642650651037, + "user": { + "username": "jacknich", + "roles": [ + "admin", "other_role1" + ], + "realm_name": "native", + "full_name": "Jack Nicholson", + "email": "jacknich@example.com", + "active": true + }, + "access": {}, + "data": {}, + "_doc": { + "_primary_term": 88, + "_seq_no": 66 + } +} +---- +// TESTRESPONSE[s/u_kd2JMqwUQwSCCOxMv7M1vw/$body.uid/] +// TESTRESPONSE[s/1642650651037/$body.last_synchronized/] +// TESTRESPONSE[s/88/$body._doc._primary_term/] +// TESTRESPONSE[s/66/$body._doc._seq_no/] diff --git a/x-pack/docs/en/rest-api/security/get-user-profile.asciidoc b/x-pack/docs/en/rest-api/security/get-user-profile.asciidoc index dc081c19b4f2c..2fbefe439eadd 100644 --- a/x-pack/docs/en/rest-api/security/get-user-profile.asciidoc +++ b/x-pack/docs/en/rest-api/security/get-user-profile.asciidoc @@ -1,10 +1,137 @@ [role="xpack"] [[security-api-get-user-profile]] === Get user profile API + +beta::[] + ++++ Get user profile ++++ -Retrieves a user's profile given the unique profile ID +Retrieves a user's profile using the unique profile ID. + +[[security-api-get-user-profile-request]] +==== {api-request-title} + +`GET /_security/profile/` + +[[security-api-get-user-profile-prereqs]] +==== {api-prereq-title} + +* To use this API, you must have _at least_ the `manage_user_profile` cluster privilege. + + +[[security-api-get-user-profile-desc]] +==== {api-description-title} + +The get user profile API returns the user profile document matching a specified +`uid`, which is generated when +<>. + +[[security-api-get-user-profile-path-params]] +==== {api-path-parms-title} + +`uid`:: +(Required, string) A unique identifier for the user profile. + +[[security-api-get-user-profile-query-params]] +==== {api-query-parms-title} + +`data`:: +(Optional, string) Comma-separated list of filters for the `data` field of +the profile document. To return all content, use `data=*`. To return a +subset of content, use `data=` to retrieve the content nested under the +specified ``. Defaults to returning no content. + +[[security-api-get-user-profile-response-body]] +==== {api-response-body-title} + +A successful call returns the JSON representation of the user profile +and its internal versioning numbers. The API returns an empty object +if no profile document is found for the provided `uid`. +The content of the `data` field is not returned by default to avoid deserializing +a potential large payload. + +[[security-api-get-user-profile-example]] +==== {api-examples-title} + +[source,console] +---- +GET /_security/profile/u_kd2JMqwUQwSCCOxMv7M1vw +---- +// TEST[skip:uid is random and no way to ensure this uid exists] + +The API returns the following response for a `uid` matching `u_kd2JMqwUQwSCCOxMv7M1vw`: + +[source,js] +---- +{ + "u_kd2JMqwUQwSCCOxMv7M1vw": { + "uid": "u_kd2JMqwUQwSCCOxMv7M1vw", + "enabled": true, + "last_synchronized": 1642650651037, + "user": { + "username": "jacknich", + "roles": [ + "admin", "other_role1" + ], + "realm_name": "native1", + "full_name": "Jack Nicholson", + "email": "jacknich@example.com", + "active": true + }, + "access": {}, + "data": {}, <1> + "_doc": { + "_primary_term": 1, + "_seq_no": 0 + } + } +} +---- +// NOTCONSOLE +// Besides the uid being random, the response cannot be compared against due to +// the last_synchronized and _doc fields being unpredictable. + +<1> No content is returned in the `data` field by default. + +The following request retrieves a subset of `data` that's nested under the +key `app1`, along with the user's profile: + +[source,console] +---- +GET /_security/profile/u_kd2JMqwUQwSCCOxMv7M1vw?data=app1.key1 +---- +// TEST[skip:uid is random and no way to ensure this uid exists] -coming::[8.2.0] +[source,js] +---- +{ + "u_kd2JMqwUQwSCCOxMv7M1vw": { + "uid": "u_kd2JMqwUQwSCCOxMv7M1vw", + "enabled": true, + "last_synchronized": 1642650651037, + "user": { + "username": "jacknich", + "roles": [ + "admin", "other_role1" + ], + "realm_name": "native1", + "full_name": "Jack Nicholson", + "email": "jacknich@example.com", + "active": true + }, + "access": {}, + "data": { + "app1": { + "key1": "value1" + } + }, + "_doc": { + "_primary_term": 1, + "_seq_no": 0 + } + } +} +---- +// NOTCONSOLE diff --git a/x-pack/docs/en/rest-api/security/update-user-profile-data.asciidoc b/x-pack/docs/en/rest-api/security/update-user-profile-data.asciidoc index 40946cd6196ab..f0127339aa7ef 100644 --- a/x-pack/docs/en/rest-api/security/update-user-profile-data.asciidoc +++ b/x-pack/docs/en/rest-api/security/update-user-profile-data.asciidoc @@ -1,10 +1,172 @@ [role="xpack"] [[security-api-update-user-profile-data]] -=== Activate user profile data API +=== Update user profile data API + +beta::[] + ++++ Update user profile data ++++ -Update application specific data for the user profile of the given unique ID. +Updates specific data for the user profile that's associated with the specified +unique ID. + +[[security-api-update-user-profile-data-request]] +==== {api-request-title} + +`POST /_security/profile/_data/` + +[[security-api-update-user-profile-data-prereqs]] +==== {api-prereq-title} + +To use this API, you must have one of the following privileges: + +* The `manage_user_profile` cluster privilege. +* The `update_profile_data` global privilege for the namespaces that are +referenced in the request. + +[[security-api-update-user-profile-data-desc]] +==== {api-description-title} + +The update user profile API updates the `access` and `data` fields of an +existing user profile document with JSON objects. New keys and their values are +added to the profile document, and conflicting keys are replaced by data that's +included in the request. + +For both `access` and `data`, content is namespaced by the top-level fields. +The `update_profile_data` global privilege grants privileges for updating only +the allowed namespaces. + +[[security-api-update-user-profile-data-path-params]] +==== {api-path-parms-title} + +`uid`:: +(Required, string) A unique identifier for the user profile. + +[[security-api-update-user-profile-data-query-params]] +==== {api-query-parms-title} + +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=if_seq_no] + +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=if_primary_term] + +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=refresh] + +`uid`:: +(Required, string) A unique identifier for the user profile. + +[[security-api-update-user-profile-data-request-body]] +==== {api-request-body-title} + +`access`:: +(Required*, object) +Searchable data that you want to associate with the user profile. +This field supports a nested data structure. Within the `access` object, +top-level keys cannot begin with an underscore (`_`) or contain a period (`.`). + +`data`:: +(Required*, object) +Non-searchable data that you want to associate with the user profile. +This field supports a nested data structure. Within the `data` object, top-level +keys cannot begin with an underscore (`_`) or contain a period (`.`) +The `data` object is not searchable, but can be retrieved with the +<>. + +*Indicates that the setting is required in some, but not all situations. + +[[security-api-update-user-profile-data-response-body]] +==== {api-response-body-title} + +A successful update user profile data API call returns a JSON structure +indicating that the request is acknowledged: + +[source,js] +---- +{ + "acknowledged": true +} +---- +// NOTCONSOLE + +[[security-api-update-user-profile-data-example]] +==== {api-examples-title} + +The following request updates a profile document for a `uid` matching +`u_kd2JMqwUQwSCCOxMv7M1vw`: + +[source,console] +---- +POST /_security/profile/_data/u_kd2JMqwUQwSCCOxMv7M1vw +{ + "access": { + "app1": { + "tag": "prod" + } + }, + "data": { + "app1": { + "theme": "default" + } + } +} +---- +// TEST[skip:uid is random and no way to ensure this uid exists] + +You can update the profile data to replace some keys and add new keys: + +[source,console] +---- +POST /_security/profile/_data/u_kd2JMqwUQwSCCOxMv7M1vw +{ + "access": { + "app1": { + "tag": "dev" + } + }, + "data": { + "app1": { + "font": "large" + } + } +} +---- +// TEST[skip:uid is random and no way to ensure this uid exists] + +If you run the request again, the consolidated profile data is returned: -coming::[8.2.0] +[source,js] +---- +{ + "u_kd2JMqwUQwSCCOxMv7M1vw": { + "uid": "u_kd2JMqwUQwSCCOxMv7M1vw", + "enabled": true, + "last_synchronized": 1642650651037, + "user": { + "username": "jacknich", + "roles": [ + "admin", "other_role1" + ], + "realm_name": "native1", + "full_name": "Jack Nicholson", + "email": "jacknich@example.com", + "active": true + }, + "access": { + "app1": { + "tag": "dev" + } + }, + "data": { + "app1": { + "theme": "default", + "font": "large" + } + }, + "_doc": { + "_primary_term": 1, + "_seq_no": 0 + } + } +} +---- +// NOTCONSOLE diff --git a/x-pack/docs/en/security/authentication/saml-guide.asciidoc b/x-pack/docs/en/security/authentication/saml-guide.asciidoc index e4f6bc23c1543..ba19563c75f81 100644 --- a/x-pack/docs/en/security/authentication/saml-guide.asciidoc +++ b/x-pack/docs/en/security/authentication/saml-guide.asciidoc @@ -20,7 +20,8 @@ required in {kib} to activate the SAML authentication provider. NOTE: The SAML support in {kib} is designed on the expectation that it will be the primary (or sole) authentication method for users of that {kib} instance. Once you enable SAML authentication in {kib} it will affect all users who try -to login. The <> section provides more detail about how this works. +to login. The <> section provides more detail about how +this works. [[saml-guide-idp]] === The identity provider diff --git a/x-pack/docs/en/security/authentication/saml-realm.asciidoc b/x-pack/docs/en/security/authentication/saml-realm.asciidoc index cd91505f63d32..dba414d192081 100644 --- a/x-pack/docs/en/security/authentication/saml-realm.asciidoc +++ b/x-pack/docs/en/security/authentication/saml-realm.asciidoc @@ -17,4 +17,4 @@ chain. In order to simplify the process of configuring SAML authentication within the Elastic Stack, there is a step-by-step guide to -<>. +<>. diff --git a/x-pack/docs/en/security/authentication/service-accounts.asciidoc b/x-pack/docs/en/security/authentication/service-accounts.asciidoc index 141ad57f8067f..ce64b539ab9b6 100644 --- a/x-pack/docs/en/security/authentication/service-accounts.asciidoc +++ b/x-pack/docs/en/security/authentication/service-accounts.asciidoc @@ -51,6 +51,9 @@ communicate with {es}. `elastic/kibana`:: The service account used by {kib} to communicate with {es}. +`elastic/enterprise-search-server`:: The service account used by Enterprise Search +to communicate with {es}. + // tag::service-accounts-usage[] IMPORTANT: Do not attempt to use service accounts for authenticating individual users. Service accounts can only be authenticated with service tokens, which are diff --git a/x-pack/docs/en/security/authorization/built-in-roles.asciidoc b/x-pack/docs/en/security/authorization/built-in-roles.asciidoc index 4f589d50bafb4..ad317e276d960 100644 --- a/x-pack/docs/en/security/authorization/built-in-roles.asciidoc +++ b/x-pack/docs/en/security/authorization/built-in-roles.asciidoc @@ -84,8 +84,11 @@ This role does not have access to editing tools in {kib}. [[built-in-roles-kibana-system]] `kibana_system` :: Grants access necessary for the {kib} system user to read from and write to the {kib} indices, manage index templates and tokens, and check the availability of -the {es} cluster. This role grants read access to the `.monitoring-*` indices -and read and write access to the `.reporting-*` indices. For more information, +the {es} cluster. It also permits +<>, +as well as updating user profile data for the `kibana-*` namespace. +This role grants read access to the `.monitoring-*` indices and read and write +access to the `.reporting-*` indices. For more information, see {kibana-ref}/using-kibana-with-security.html[Configuring Security in {kib}]. + NOTE: This role should not be assigned to users as the granted permissions may @@ -172,7 +175,7 @@ Grants full access to cluster management and data indices. This role also grants direct read-only access to restricted indices like `.security`. A user with the `superuser` role can <> any other user in the system. + -On {ecloud}, all standard users, including those with the `superuser` role are +On {ecloud}, all standard users, including those with the `superuser` role are restricted from performing <> actions. + IMPORTANT: This role can manage security and create roles with unlimited privileges. diff --git a/x-pack/docs/en/security/authorization/managing-roles.asciidoc b/x-pack/docs/en/security/authorization/managing-roles.asciidoc index 19ffea585bc44..49f068ad7bfd1 100644 --- a/x-pack/docs/en/security/authorization/managing-roles.asciidoc +++ b/x-pack/docs/en/security/authorization/managing-roles.asciidoc @@ -101,25 +101,32 @@ multiple data streams, indices, and aliases. [[roles-global-priv]] ==== Global Privileges -The following describes the structure of a global privileges entry: +The following describes the structure of the global privileges entry: [source,js] ------- { "application": { "manage": { <1> - "applications": [ ... ] <2> + "applications": [ ... ] <2> + } + }, + "profile": { + "write": { <3> + "applications": [ ... ] <4> } } } ------- // NOTCONSOLE -<1> The only supported global privilege is the ability to manage application - privileges +<1> The privilege for the ability to manage application privileges <2> The list of application names that may be managed. This list supports wildcards (e.g. `"myapp-*"`) and regular expressions (e.g. `"/app[0-9]*/"`) +<3> The privilege for the ability to write the `access` and `data` of any user profile +<4> The list of names, wildcards and regular expressions to which the write +privilege is restricted to [[roles-application-priv]] ==== Application Privileges @@ -195,7 +202,7 @@ see <>. === Role management UI You can manage users and roles easily in {kib}. To -manage roles, log in to {kib} and go to *Management / Security / Roles*. +manage roles, log in to {kib} and go to *Management / Security / Roles*. [discrete] [[roles-management-api]] @@ -203,8 +210,8 @@ manage roles, log in to {kib} and go to *Management / Security / Roles*. The _Role Management APIs_ enable you to add, update, remove and retrieve roles dynamically. When you use the APIs to manage roles in the `native` realm, the -roles are stored in an internal {es} index. For more information and examples, -see <>. +roles are stored in an internal {es} index. For more information and examples, +see <>. [discrete] [[roles-management-file]] diff --git a/x-pack/docs/en/security/configuring-stack-security.asciidoc b/x-pack/docs/en/security/configuring-stack-security.asciidoc index 4fb41a7f4d8a4..8c1623a079e70 100644 --- a/x-pack/docs/en/security/configuring-stack-security.asciidoc +++ b/x-pack/docs/en/security/configuring-stack-security.asciidoc @@ -1,8 +1,6 @@ [[configuring-stack-security]] == Start the Elastic Stack with security enabled -beta::[This functionality is in beta and is subject to change. The design and code is less mature than official GA features and is being provided as-is with no warranties. Beta features are not subject to the support SLA of official GA features.] - When you start {es} for the first time, the following security configuration occurs automatically: diff --git a/x-pack/docs/en/watcher/transform/search.asciidoc b/x-pack/docs/en/watcher/transform/search.asciidoc index 7e62e30566baa..9ed60ceda2df3 100644 --- a/x-pack/docs/en/watcher/transform/search.asciidoc +++ b/x-pack/docs/en/watcher/transform/search.asciidoc @@ -71,15 +71,15 @@ The following table lists all available settings for the search | `request.indices_options.expand_wildcards` | no | `open` | Determines how to expand indices wildcards. An array consisting of a combination of `open`, `closed`, and `hidden`. Alternatively a value of `none` or `all`. - (see <>) + (see <>) | `request.indices_options.ignore_unavailable` | no | `true` | A boolean value that determines whether the search should leniently ignore unavailable indices - (see <>) + (see <>) | `request.indices_options.allow_no_indices` | no | `true` | A boolean value that determines whether the search should leniently return no results when no indices - are resolved (see <>) + are resolved (see <>) | `request.template` | no | - | The body of the search template. See <> for more information. diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregationBuilder.java index 8804818f91695..0a4a98f0c365f 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregationBuilder.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.analytics.cumulativecardinality; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; @@ -128,4 +129,9 @@ public String getWriteableName() { protected boolean overrideBucketsPath() { return true; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_4_0; + } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregationBuilder.java index bd86230ed4182..a0135c045bef4 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregationBuilder.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.analytics.movingPercentiles; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; @@ -129,4 +130,9 @@ public String getWriteableName() { protected boolean overrideBucketsPath() { return true; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_9_0; + } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregationBuilder.java index f16b19f46f045..f2cf85646f7be 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregationBuilder.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.analytics.normalize; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; @@ -16,6 +17,12 @@ import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.Mean; +import org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.Percent; +import org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.RescaleZeroToOne; +import org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.RescaleZeroToOneHundred; +import org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.Softmax; +import org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.ZScore; import java.io.IOException; import java.util.List; @@ -27,12 +34,6 @@ import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.FORMAT; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -import static org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.Mean; -import static org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.Percent; -import static org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.RescaleZeroToOne; -import static org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.RescaleZeroToOneHundred; -import static org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.Softmax; -import static org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.ZScore; public class NormalizePipelineAggregationBuilder extends AbstractPipelineAggregationBuilder { public static final String NAME = "normalize"; @@ -152,4 +153,9 @@ public boolean equals(Object obj) { public String getWriteableName() { return NAME; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_9_0; + } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalRate.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalRate.java index 5617611219aa4..77b034c911036 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalRate.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalRate.java @@ -13,6 +13,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -84,6 +85,11 @@ public InternalRate reduce(List aggregations, AggregationRe return new InternalRate(name, kahanSummation.value(), firstDivisor, format, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return new InternalRate(name, samplingContext.inverseScale(sum), divisor, format, getMetadata()); + } + @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.field(CommonFields.VALUE.getPreferredName(), value()); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java index 7872c362fcc0b..5e28e700399e7 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java @@ -74,6 +74,11 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu return new RateAggregationBuilder(this, factoriesBuilder, metadata); } + @Override + public boolean supportsSampling() { + return true; + } + /** * Read from a stream. */ diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/InternalTTest.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/InternalTTest.java index d04a42d7452f7..3425eb206892e 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/InternalTTest.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/InternalTTest.java @@ -13,6 +13,7 @@ import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -61,6 +62,11 @@ public InternalTTest reduce(List aggregations, AggregationR return new InternalTTest(name, reduced, format, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { double value = state.getValue(); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilder.java index 606fdbacfbb55..36b1218411f0b 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilder.java @@ -110,6 +110,11 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu return new TTestAggregationBuilder(this, factoriesBuilder, metadata); } + @Override + public boolean supportsSampling() { + return true; + } + @Override public BucketCardinality bucketCardinality() { return BucketCardinality.NONE; diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalRateTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalRateTests.java index 8bc1cd7ffa0db..b2e0099bf32c4 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalRateTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalRateTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; @@ -24,6 +25,7 @@ import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.closeTo; import static org.mockito.Mockito.mock; public class InternalRateTests extends InternalAggregationTestCase { @@ -53,6 +55,16 @@ protected BuilderAndToReduce randomResultsToReduce(String name, in return new BuilderAndToReduce<>(mock(AggregationBuilder.class), inputs); } + @Override + protected boolean supportsSampling() { + return true; + } + + @Override + protected void assertSampled(InternalRate sampled, InternalRate reduced, SamplingContext samplingContext) { + assertThat(sampled.getValue(), closeTo(samplingContext.inverseScale(reduced.getValue()), 1e-10)); + } + @Override protected void assertReduced(InternalRate reduced, List inputs) { double expected = inputs.stream().mapToDouble(a -> a.sum).sum() / reduced.divisor; diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/InternalTTestTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/InternalTTestTests.java index d9c9bfa80ac6a..7a8bb0cc6471f 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/InternalTTestTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/InternalTTestTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; @@ -77,6 +78,16 @@ protected void assertReduced(InternalTTest reduced, List inputs) assertEquals(expected.getValue(), reduced.getValue(), 0.00001); } + @Override + protected void assertSampled(InternalTTest sampled, InternalTTest reduced, SamplingContext samplingContext) { + assertEquals(sampled.getValue(), reduced.getValue(), 1e-12); + } + + @Override + protected boolean supportsSampling() { + return true; + } + @Override protected void assertFromXContent(InternalTTest min, ParsedAggregation parsedAggregation) { // There is no ParsedTTest yet so we cannot test it here diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java index b7d6240be0dc2..1c9e40f4cfda1 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java @@ -43,6 +43,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.ASYNC_SEARCH_ORIGIN; public class TransportSubmitAsyncSearchAction extends HandledTransportAction { + private final ClusterService clusterService; private final NodeClient nodeClient; private final BiFunction, SearchRequest, AggregationReduceContext> requestToAggReduceContextBuilder; private final TransportSearchAction searchAction; @@ -62,6 +63,7 @@ public TransportSubmitAsyncSearchAction( BigArrays bigArrays ) { super(SubmitAsyncSearchAction.NAME, transportService, actionFilters, SubmitAsyncSearchRequest::new); + this.clusterService = clusterService; this.nodeClient = nodeClient; this.requestToAggReduceContextBuilder = (task, request) -> searchService.aggReduceContextBuilder(task, request).forFinalReduction(); this.searchAction = searchAction; @@ -144,7 +146,10 @@ public void onFailure(Exception exc) { private SearchRequest createSearchRequest(SubmitAsyncSearchRequest request, Task submitTask, TimeValue keepAlive) { String docID = UUIDs.randomBase64UUID(); - Map originHeaders = ClientHelper.filterSecurityHeaders(nodeClient.threadPool().getThreadContext().getHeaders()); + Map originHeaders = ClientHelper.getPersistableSafeSecurityHeaders( + nodeClient.threadPool().getThreadContext(), + clusterService.state() + ); SearchRequest searchRequest = new SearchRequest(request.getSearchRequest()) { @Override public AsyncSearchTask createTask(long id, String type, String action, TaskId parentTaskId, Map taskHeaders) { diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/existence/FrozenExistenceDeciderService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/existence/FrozenExistenceDeciderService.java index dd534dfd40915..6b1035c9b7b23 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/existence/FrozenExistenceDeciderService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/existence/FrozenExistenceDeciderService.java @@ -25,7 +25,6 @@ import java.util.List; import java.util.Objects; import java.util.stream.Collectors; -import java.util.stream.StreamSupport; /** * This decider looks at all indices and ensures a minimum capacity is available if any indices are in the frozen ILM phase, since that @@ -45,7 +44,9 @@ public String name() { @Override public AutoscalingDeciderResult scale(Settings configuration, AutoscalingDeciderContext context) { - List indicesNeedingFrozen = StreamSupport.stream(context.state().metadata().spliterator(), false) + List indicesNeedingFrozen = context.state() + .metadata() + .stream() .filter(this::needsTier) .map(imd -> imd.getIndex().getName()) .limit(10) diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/shards/FrozenShardsDeciderService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/shards/FrozenShardsDeciderService.java index 9d405593bb4bb..18044de7b5954 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/shards/FrozenShardsDeciderService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/shards/FrozenShardsDeciderService.java @@ -25,7 +25,6 @@ import java.io.IOException; import java.util.List; import java.util.Objects; -import java.util.stream.StreamSupport; /** * This decider enforces that on a 64GB memory node (31GB heap) we can max have 2000 shards. We arrive at 2000 because our current limit is @@ -58,7 +57,7 @@ public AutoscalingDeciderResult scale(Settings configuration, AutoscalingDecider } static int countFrozenShards(Metadata metadata) { - return StreamSupport.stream(metadata.spliterator(), false) + return metadata.stream() .filter(imd -> FrozenUtils.isFrozenIndex(imd.getSettings())) .mapToInt(IndexMetadata::getTotalNumberOfShards) .sum(); diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/FrozenStorageDeciderService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/FrozenStorageDeciderService.java index dbaa812c5eafc..5c96e9029c530 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/FrozenStorageDeciderService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/FrozenStorageDeciderService.java @@ -26,7 +26,6 @@ import java.io.IOException; import java.util.List; import java.util.Objects; -import java.util.stream.StreamSupport; public class FrozenStorageDeciderService implements AutoscalingDeciderService { public static final String NAME = "frozen_storage"; @@ -42,7 +41,7 @@ public String name() { @Override public AutoscalingDeciderResult scale(Settings configuration, AutoscalingDeciderContext context) { Metadata metadata = context.state().metadata(); - long dataSetSize = StreamSupport.stream(metadata.spliterator(), false) + long dataSetSize = metadata.stream() .filter(imd -> FrozenUtils.isFrozenIndex(imd.getSettings())) .mapToLong(imd -> estimateSize(imd, context.info())) .sum(); diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java index 9a10b5a98952c..1989fece08243 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java @@ -558,7 +558,7 @@ private SingleForecast forecast(Metadata metadata, IndexAbstraction.DataStream s for (int i = 0; i < numberNewIndices; ++i) { final String uuid = UUIDs.randomBase64UUID(); final Tuple rolledDataStreamInfo = dataStream.unsafeNextWriteIndexAndGeneration(state.metadata()); - dataStream = dataStream.unsafeRollover(new Index(rolledDataStreamInfo.v1(), uuid), rolledDataStreamInfo.v2()); + dataStream = dataStream.unsafeRollover(new Index(rolledDataStreamInfo.v1(), uuid), rolledDataStreamInfo.v2(), null); // this unintentionally copies the in-sync allocation ids too. This has the fortunate effect of these indices // not being regarded new by the disk threshold decider, thereby respecting the low watermark threshold even for primaries. diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index b43ac4bd9a6c0..e184c9ffb2567 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -1,7 +1,8 @@ +import org.elasticsearch.gradle.Version +import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.info.BuildParams -import org.elasticsearch.gradle.util.GradleUtils import org.elasticsearch.gradle.internal.test.RestIntegTestTask -import org.elasticsearch.gradle.VersionProperties +import org.elasticsearch.gradle.util.GradleUtils apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' @@ -76,16 +77,19 @@ tasks.named("yamlRestTest").configure { } tasks.named("yamlRestTestV7CompatTest").configure { - systemProperty 'tests.rest.blacklist', [ - 'unsigned_long/50_script_values/Scripted sort values', - 'unsigned_long/50_script_values/script_score query', - 'unsigned_long/50_script_values/Script query', - 'data_stream/140_data_stream_aliases/Fix IndexNotFoundException error when handling remove alias action', - ].join(',') + systemProperty 'tests.rest.blacklist', [ + 'unsigned_long/50_script_values/Scripted sort values', + 'unsigned_long/50_script_values/script_score query', + 'unsigned_long/50_script_values/Script query', + 'data_stream/140_data_stream_aliases/Fix IndexNotFoundException error when handling remove alias action', + ].join(',') } -tasks.named("yamlRestTestV7CompatTransform").configure{ task -> - task.skipTest("vectors/10_dense_vector_basic/Deprecated function signature", "to support it, it would require to almost revert back the #48725 and complicate the code" ) +tasks.named("yamlRestTestV7CompatTransform").configure { task -> + task.skipTest( + "vectors/10_dense_vector_basic/Deprecated function signature", + "to support it, it would require to almost revert back the #48725 and complicate the code" + ) task.skipTest("vectors/30_sparse_vector_basic/Cosine Similarity", "not supported for compatibility") task.skipTest("vectors/30_sparse_vector_basic/Deprecated function signature", "not supported for compatibility") task.skipTest("vectors/30_sparse_vector_basic/Dot Product", "not supported for compatibility") @@ -93,36 +97,55 @@ tasks.named("yamlRestTestV7CompatTransform").configure{ task -> task.skipTest("vectors/35_sparse_vector_l1l2/L2 norm", "not supported for compatibility") task.skipTest("vectors/40_sparse_vector_special_cases/Dimensions can be sorted differently", "not supported for compatibility") task.skipTest("vectors/40_sparse_vector_special_cases/Documents missing a vector field", "not supported for compatibility") - task.skipTest("vectors/40_sparse_vector_special_cases/Query vector has different dimensions from documents' vectors", "not supported for compatibility") + task.skipTest( + "vectors/40_sparse_vector_special_cases/Query vector has different dimensions from documents' vectors", + "not supported for compatibility" + ) task.skipTest("vectors/40_sparse_vector_special_cases/Sparse vectors should error with dense vector functions", "not supported for compatibility") task.skipTest("vectors/40_sparse_vector_special_cases/Vectors of different dimensions and data types", "not supported for compatibility") task.skipTest("vectors/50_vector_stats/Usage stats on vector fields", "not supported for compatibility") - task.skipTest("roles/30_prohibited_role_query/Test use prohibited query inside role query", "put role request with a term lookup (deprecated) and type. Requires validation in REST layer") + task.skipTest( + "roles/30_prohibited_role_query/Test use prohibited query inside role query", + "put role request with a term lookup (deprecated) and type. Requires validation in REST layer" + ) task.skipTest("ml/jobs_crud/Test create job with delimited format", "removing undocumented functionality") task.skipTest("ml/datafeeds_crud/Test update datafeed to point to missing job", "behaviour change #44752 - not allowing to update datafeed job_id") - task.skipTest("ml/datafeeds_crud/Test update datafeed to point to different job", "behaviour change #44752 - not allowing to update datafeed job_id") - task.skipTest("ml/datafeeds_crud/Test update datafeed to point to job already attached to another datafeed", "behaviour change #44752 - not allowing to update datafeed job_id") + task.skipTest( + "ml/datafeeds_crud/Test update datafeed to point to different job", + "behaviour change #44752 - not allowing to update datafeed job_id" + ) + task.skipTest( + "ml/datafeeds_crud/Test update datafeed to point to job already attached to another datafeed", + "behaviour change #44752 - not allowing to update datafeed job_id" + ) task.skipTest("rollup/delete_job/Test basic delete_job", "rollup was an experimental feature, also see #41227") task.skipTest("rollup/delete_job/Test delete job twice", "rollup was an experimental feature, also see #41227") task.skipTest("rollup/delete_job/Test delete running job", "rollup was an experimental feature, also see #41227") task.skipTest("rollup/get_jobs/Test basic get_jobs", "rollup was an experimental feature, also see #41227") task.skipTest("rollup/put_job/Test basic put_job", "rollup was an experimental feature, also see #41227") task.skipTest("rollup/start_job/Test start job twice", "rollup was an experimental feature, also see #41227") - task.skipTest("ml/trained_model_cat_apis/Test cat trained models", "A type field was added to cat.ml_trained_models #73660, this is a backwards compatible change. Still this is a cat api, and we don't support them with rest api compatibility. (the test would be very hard to transform too)") + task.skipTest( + "ml/trained_model_cat_apis/Test cat trained models", + "A type field was added to cat.ml_trained_models #73660, this is a backwards compatible change. Still this is a cat api, and we don't support them with rest api compatibility. (the test would be very hard to transform too)" + ) task.skipTest("indices.freeze/30_usage/Usage stats on frozen indices", "#70192 -- the freeze index API is removed from 8.0") task.skipTest("indices.freeze/20_stats/Translog stats on frozen indices", "#70192 -- the freeze index API is removed from 8.0") task.skipTest("indices.freeze/10_basic/Basic", "#70192 -- the freeze index API is removed from 8.0") task.skipTest("indices.freeze/10_basic/Test index options", "#70192 -- the freeze index API is removed from 8.0") + task.skipTest("sql/sql/Paging through results", "scrolling through search hit queries no longer produces empty last page in 8.2") + task.skipTest("service_accounts/10_basic/Test get service accounts", "new service accounts are added") task.replaceValueInMatch("_type", "_doc") task.addAllowedWarningRegex("\\[types removal\\].*") task.addAllowedWarningRegexForTest("Including \\[accept_enterprise\\] in get license.*", "Installing enterprise license") task.addAllowedWarningRegex("bucket_span .* is not an integral .* of the number of seconds in 1d.* This is now deprecated.*") - task.replaceValueTextByKeyValue("catch", + task.replaceValueTextByKeyValue( + "catch", 'bad_request', '/It is no longer possible to freeze indices, but existing frozen indices can still be unfrozen/', - "Cannot freeze write index for data stream") + "Cannot freeze write index for data stream" + ) } @@ -151,11 +174,9 @@ testClusters.configureEach { extraConfigFile nodeCert.name, nodeCert extraConfigFile serviceTokens.name, serviceTokens - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - systemProperty 'es.random_sampler_feature_flag_registered', 'true' - systemProperty 'es.user_profile_feature_flag_enabled', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") + requiresFeature 'es.random_sampler_feature_flag_registered', Version.fromString("8.1.0") + requiresFeature 'es.user_profile_feature_flag_enabled', Version.fromString("8.1.0") } tasks.register('enforceApiSpecsConvention').configure { diff --git a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle index bc0e0d80edc7e..31cdf04f6dc94 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask @@ -66,9 +67,7 @@ testClusters.matching {it.name == "follow-cluster" }.configureEach { testClusters.configureEach { - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } tasks.named("check").configure { dependsOn "follow-cluster" } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java index 9448a8898b4e6..c9af16f1d4542 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java @@ -393,11 +393,11 @@ User getUser(final Client remoteClient) { return securityContext.getUser(); } - public static Client wrapClient(Client client, Map headers) { + public static Client wrapClient(Client client, Map headers, ClusterState clusterState) { if (headers.isEmpty()) { return client; } else { - Map filteredHeaders = ClientHelper.filterSecurityHeaders(headers); + Map filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders(headers, clusterState); if (filteredHeaders.isEmpty()) { return client; } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java index 29a267f6fd78a..7c85c7afb72f0 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java @@ -304,7 +304,7 @@ void createAndFollow( Runnable successHandler, Consumer failureHandler ) { - Client followerClient = CcrLicenseChecker.wrapClient(client, headers); + Client followerClient = CcrLicenseChecker.wrapClient(client, headers, clusterService.state()); followerClient.execute( PutFollowAction.INSTANCE, request, diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java index 2fcc177798b0e..fe977216b4c0c 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java @@ -150,7 +150,7 @@ protected AllocatedPersistentTask createTask( Map headers ) { ShardFollowTask params = taskInProgress.getParams(); - Client followerClient = wrapClient(client, params.getHeaders()); + Client followerClient = wrapClient(client, params.getHeaders(), clusterService.state()); BiConsumer scheduler = (delay, command) -> threadPool.scheduleUnlessShuttingDown( delay, Ccr.CCR_THREAD_POOL_NAME, @@ -562,7 +562,8 @@ private String getLeaderShardHistoryUUID(ShardFollowTask params) { } private Client remoteClient(ShardFollowTask params) { - return wrapClient(client.getRemoteClusterClient(params.getRemoteCluster()), params.getHeaders()); + // TODO: do we need minNodeVersion here since it is for remote cluster + return wrapClient(client.getRemoteClusterClient(params.getRemoteCluster()), params.getHeaders(), clusterService.state()); } interface FollowerStatsInfoHandler { @@ -571,7 +572,7 @@ interface FollowerStatsInfoHandler { @Override protected void nodeOperation(final AllocatedPersistentTask task, final ShardFollowTask params, final PersistentTaskState state) { - Client followerClient = wrapClient(client, params.getHeaders()); + Client followerClient = wrapClient(client, params.getHeaders(), clusterService.state()); ShardFollowNodeTask shardFollowNodeTask = (ShardFollowNodeTask) task; logger.info("{} Starting to track leader shard {}", params.getFollowShardId(), params.getLeaderShardId()); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java index b4fbe29d2a45e..fa636eeb5d5c8 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java @@ -95,7 +95,10 @@ protected void masterOperation( return; } final Client remoteClient = client.getRemoteClusterClient(request.getRemoteCluster()); - final Map filteredHeaders = ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()); + final Map filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders( + threadPool.getThreadContext(), + clusterService.state() + ); Consumer consumer = remoteClusterState -> { String[] indices = request.getLeaderIndexPatterns().toArray(new String[0]); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index 1661585b5062f..d37dd4b237873 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -197,7 +197,11 @@ private void createFollowerIndex( .masterNodeTimeout(request.masterNodeTimeout()) .indexSettings(overrideSettings); - final Client clientWithHeaders = CcrLicenseChecker.wrapClient(this.client, threadPool.getThreadContext().getHeaders()); + final Client clientWithHeaders = CcrLicenseChecker.wrapClient( + this.client, + threadPool.getThreadContext().getHeaders(), + clusterService.state() + ); threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(new AbstractRunnable() { @Override @@ -268,7 +272,8 @@ public void onFailure(Exception e) { assert restoreInfo.failedShards() > 0 : "Should have failed shards"; delegatedListener.onResponse(new PutFollowAction.Response(true, false, false)); } - }) + }), + threadPool.getThreadContext() ); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java index f1367a29b31fc..bd89f833a4ad9 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java @@ -179,7 +179,10 @@ void start( validate(request, leaderIndexMetadata, followIndexMetadata, leaderIndexHistoryUUIDs, mapperService); final int numShards = followIndexMetadata.getNumberOfShards(); final ResponseHandler handler = new ResponseHandler(numShards, listener); - Map filteredHeaders = ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()); + Map filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders( + threadPool.getThreadContext(), + clusterService.state() + ); for (int shardId = 0; shardId < numShards; shardId++) { String taskId = followIndexMetadata.getIndexUUID() + "-" + shardId; diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java index 1f775b97ee4d0..e76154ee5f470 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; @@ -178,10 +179,16 @@ private void removeRetentionLeaseForShard( ) { logger.trace("{} removing retention lease [{}] while unfollowing leader index", followerShardId, retentionLeaseId); final ThreadContext threadContext = threadPool.getThreadContext(); + // We're about to stash the thread context for this retention lease removal. The listener will be completed while the + // context is stashed. The context needs to be restored in the listener when it is completing or else it is simply wiped. + final ActionListener preservedListener = new ContextPreservingActionListener<>( + threadContext.newRestorableContext(true), + listener + ); try (ThreadContext.StoredContext ignore = threadPool.getThreadContext().stashContext()) { // we have to execute under the system context so that if security is enabled the removal is authorized threadContext.markAsSystemContext(); - CcrRetentionLeases.asyncRemoveRetentionLease(leaderShardId, retentionLeaseId, remoteClient, listener); + CcrRetentionLeases.asyncRemoveRetentionLease(leaderShardId, retentionLeaseId, remoteClient, preservedListener); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java index eb8038755d782..d116106b044ae 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java @@ -114,7 +114,7 @@ public static class PutCcrRestoreSessionResponse extends ActionResponse { PutCcrRestoreSessionResponse(StreamInput in) throws IOException { super(in); node = new DiscoveryNode(in); - storeFileMetadata = new Store.MetadataSnapshot(in); + storeFileMetadata = Store.MetadataSnapshot.readFrom(in); mappingVersion = in.readVLong(); } diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index 768bbb936ee0f..ce2da68cfefb4 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -77,7 +77,7 @@ tasks.named("processResources").configure { inputs.properties(expansions) filter("tokens" : expansions, ReplaceTokens.class) } - String licenseKey = providers.systemProperty("license.key").forUseAtConfigurationTime().getOrNull() + String licenseKey = providers.systemProperty("license.key").getOrNull() if (licenseKey != null) { println "Using provided license key from ${licenseKey}" } else if (BuildParams.isSnapshotBuild()) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotRepository.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotRepository.java index 66be6d081dbda..127376c83bc43 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotRepository.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotRepository.java @@ -140,7 +140,7 @@ public void snapshotShard(SnapshotShardContext context) { && mapperService.documentMapper().sourceMapper().isComplete() == false) { context.onFailure( new IllegalStateException( - "Can't snapshot _source only on an index that has incomplete source ie. has _source disabled " + "or filters the source" + "Can't snapshot _source only on an index that has incomplete source ie. has _source disabled or filters the source" ) ); return; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java index 08e68ed94097f..d1a1791998bcf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; @@ -14,12 +15,19 @@ import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; import org.elasticsearch.xpack.core.security.authc.AuthenticationServiceField; +import org.elasticsearch.xpack.core.security.authc.support.AuthenticationContextSerializer; import org.elasticsearch.xpack.core.security.authc.support.SecondaryAuthentication; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -77,6 +85,89 @@ public static Map filterSecurityHeaders(Map head } } + /** + * In addition to {@link #filterSecurityHeaders}, also check the version of Authentication objects + * and rewrite them using minNodeVersion so that they are safe to be persisted as index data + * and loaded by all nodes in the cluster. + */ + public static Map getPersistableSafeSecurityHeaders(ThreadContext threadContext, ClusterState clusterState) { + return maybeRewriteAuthenticationHeadersForVersion( + filterSecurityHeaders(threadContext.getHeaders()), + key -> new AuthenticationContextSerializer(key).readFromContext(threadContext), + clusterState.nodes().getMinNodeVersion() + ); + } + + /** + * Similar to {@link #getPersistableSafeSecurityHeaders(ThreadContext, ClusterState)}, + * but works on a Map of headers instead of ThreadContext. + */ + public static Map getPersistableSafeSecurityHeaders(Map headers, ClusterState clusterState) { + final CheckedFunction authenticationReader = key -> { + final String authHeader = headers.get(key); + return authHeader == null ? null : AuthenticationContextSerializer.decode(authHeader); + }; + return maybeRewriteAuthenticationHeadersForVersion( + filterSecurityHeaders(headers), + authenticationReader, + clusterState.nodes().getMinNodeVersion() + ); + } + + private static Map maybeRewriteAuthenticationHeadersForVersion( + Map filteredHeaders, + CheckedFunction authenticationReader, + Version minNodeVersion + ) { + Map newHeaders = null; + + final String authHeader = maybeRewriteSingleAuthenticationHeaderForVersion( + authenticationReader, + AuthenticationField.AUTHENTICATION_KEY, + minNodeVersion + ); + if (authHeader != null) { + newHeaders = new HashMap<>(); + newHeaders.put(AuthenticationField.AUTHENTICATION_KEY, authHeader); + } + + final String secondaryHeader = maybeRewriteSingleAuthenticationHeaderForVersion( + authenticationReader, + SecondaryAuthentication.THREAD_CTX_KEY, + minNodeVersion + ); + if (secondaryHeader != null) { + if (newHeaders == null) { + newHeaders = new HashMap<>(); + } + newHeaders.put(SecondaryAuthentication.THREAD_CTX_KEY, secondaryHeader); + } + + if (newHeaders != null) { + final HashMap mutableHeaders = new HashMap<>(filteredHeaders); + mutableHeaders.putAll(newHeaders); + return Map.copyOf(mutableHeaders); + } else { + return filteredHeaders; + } + } + + private static String maybeRewriteSingleAuthenticationHeaderForVersion( + CheckedFunction authenticationReader, + String authenticationHeaderKey, + Version minNodeVersion + ) { + try { + final Authentication authentication = authenticationReader.apply(authenticationHeaderKey); + if (authentication != null && authentication.getVersion().after(minNodeVersion)) { + return authentication.maybeRewriteForOlderVersion(minNodeVersion).encode(); + } + } catch (IOException e) { + throw new UncheckedIOException("failed to read authentication with key [" + authenticationHeaderKey + "]", e); + } + return null; + } + /** * . * @deprecated use ThreadContext.ACTION_ORIGIN_TRANSIENT_NAME @@ -167,6 +258,7 @@ public static T executeWithHeaders( Client client, Supplier supplier ) { + // No need to rewrite authentication header because it will be handled by Security Interceptor Map filteredHeaders = filterSecurityHeaders(headers); // no security headers, we will have to use the xpack internal user for @@ -206,6 +298,7 @@ public static v Request request, ActionListener listener ) { + // No need to rewrite authentication header because it will be handled by Security Interceptor final Map filteredHeaders = filterSecurityHeaders(headers); final ThreadContext threadContext = client.threadPool().getThreadContext(); // No headers (e.g. security not installed/in use) so execute as origin diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 8d78275266a86..3bdb76c15ac35 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -36,6 +36,7 @@ import org.elasticsearch.xpack.core.action.XPackUsageAction; import org.elasticsearch.xpack.core.aggregatemetric.AggregateMetricFeatureSetUsage; import org.elasticsearch.xpack.core.analytics.AnalyticsFeatureSetUsage; +import org.elasticsearch.xpack.core.archive.ArchiveFeatureSetUsage; import org.elasticsearch.xpack.core.async.DeleteAsyncResultAction; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; import org.elasticsearch.xpack.core.datastreams.DataStreamFeatureSetUsage; @@ -444,6 +445,11 @@ public List getNamedWriteables() { ConfigurableClusterPrivileges.ManageApplicationPrivileges.WRITEABLE_NAME, ConfigurableClusterPrivileges.ManageApplicationPrivileges::createFrom ), + new NamedWriteableRegistry.Entry( + ConfigurableClusterPrivilege.class, + ConfigurableClusterPrivileges.WriteProfileDataPrivileges.WRITEABLE_NAME, + ConfigurableClusterPrivileges.WriteProfileDataPrivileges::createFrom + ), // security : role-mappings new NamedWriteableRegistry.Entry(RoleMapperExpression.class, AllExpression.NAME, AllExpression::new), new NamedWriteableRegistry.Entry(RoleMapperExpression.class, AnyExpression.NAME, AnyExpression::new), @@ -552,7 +558,9 @@ public List getNamedWriteables() { // Data Streams new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.DATA_STREAMS, DataStreamFeatureSetUsage::new), // Data Tiers - new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.DATA_TIERS, DataTiersFeatureSetUsage::new) + new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.DATA_TIERS, DataTiersFeatureSetUsage::new), + // Archive + new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.ARCHIVE, ArchiveFeatureSetUsage::new) ) ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java index 59343705b9098..dbc100e62ac1e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java @@ -71,6 +71,8 @@ public final class XPackField { public static final String AGGREGATE_METRIC = "aggregate_metric"; /** Name constant for the operator privileges feature. */ public static final String OPERATOR_PRIVILEGES = "operator_privileges"; + /** Name constant for the archive feature. */ + public static final String ARCHIVE = "archive"; private XPackField() {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java index 959ffc448f548..6a9d00e62e975 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.action; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.internal.node.NodeClient; @@ -20,15 +21,9 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackFeatureSet; -import org.elasticsearch.xpack.core.XPackFeatureSet.Usage; -import org.elasticsearch.xpack.core.common.IteratingActionListener; import java.util.ArrayList; -import java.util.Collections; import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReferenceArray; -import java.util.function.BiConsumer; public class TransportXPackUsageAction extends TransportMasterNodeAction { @@ -66,32 +61,28 @@ protected List usageActions() { @Override protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, ActionListener listener) { - final ActionListener> usageActionListener = listener.delegateFailure( - (l, usages) -> l.onResponse(new XPackUsageResponse(usages)) - ); - final AtomicReferenceArray featureSetUsages = new AtomicReferenceArray<>(usageActions.size()); - final AtomicInteger position = new AtomicInteger(0); - final BiConsumer>> consumer = (featureUsageAction, iteratingListener) -> { - // Since we're executing the actions locally we should create a new request - // to avoid mutating the original request and setting the wrong parent task, - // since it is possible that the parent task gets cancelled and new child tasks are banned. - final XPackUsageRequest childRequest = new XPackUsageRequest(); - childRequest.setParentTask(request.getParentTask()); - client.executeLocally(featureUsageAction, childRequest, iteratingListener.delegateFailure((l, usageResponse) -> { - featureSetUsages.set(position.getAndIncrement(), usageResponse.getUsage()); - // the value sent back doesn't matter since our predicate keeps iterating - l.onResponse(Collections.emptyList()); - })); - }; - IteratingActionListener, XPackUsageFeatureAction> iteratingActionListener = - new IteratingActionListener<>(usageActionListener, consumer, usageActions, threadPool.getThreadContext(), (ignore) -> { - final List usageList = new ArrayList<>(featureSetUsages.length()); - for (int i = 0; i < featureSetUsages.length(); i++) { - usageList.add(featureSetUsages.get(i)); + new ActionRunnable<>(listener) { + final List responses = new ArrayList<>(usageActions.size()); + + @Override + protected void doRun() { + if (responses.size() < usageActions().size()) { + final var childRequest = new XPackUsageRequest(); + childRequest.setParentTask(request.getParentTask()); + client.executeLocally( + usageActions.get(responses.size()), + childRequest, + listener.delegateFailure((delegate, response) -> { + responses.add(response.getUsage()); + run(); // XPackUsageFeatureTransportAction always forks to MANAGEMENT so no risk of stack overflow here + }) + ); + } else { + assert responses.size() == usageActions.size() : responses.size() + " vs " + usageActions.size(); + listener.onResponse(new XPackUsageResponse(responses)); } - return usageList; - }, (ignore) -> true); - iteratingActionListener.run(); + } + }.run(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java index c6c941ef3092d..83e835d4bb6dc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java @@ -47,6 +47,7 @@ public class XPackInfoFeatureAction extends ActionType public static final XPackInfoFeatureAction DATA_STREAMS = new XPackInfoFeatureAction(XPackField.DATA_STREAMS); public static final XPackInfoFeatureAction DATA_TIERS = new XPackInfoFeatureAction(XPackField.DATA_TIERS); public static final XPackInfoFeatureAction AGGREGATE_METRIC = new XPackInfoFeatureAction(XPackField.AGGREGATE_METRIC); + public static final XPackInfoFeatureAction ARCHIVE = new XPackInfoFeatureAction(XPackField.ARCHIVE); public static final List ALL; static { @@ -74,7 +75,8 @@ public class XPackInfoFeatureAction extends ActionType DATA_STREAMS, SEARCHABLE_SNAPSHOTS, DATA_TIERS, - AGGREGATE_METRIC + AGGREGATE_METRIC, + ARCHIVE ) ); ALL = Collections.unmodifiableList(actions); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java index cd310064ffa0f..bfbac109012e5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java @@ -44,6 +44,7 @@ public class XPackUsageFeatureAction extends ActionType ALL = List.of( AGGREGATE_METRIC, @@ -66,7 +67,8 @@ public class XPackUsageFeatureAction extends ActionType { + + public static final MlMemoryAction INSTANCE = new MlMemoryAction(); + public static final String NAME = "cluster:monitor/xpack/ml/memory/stats/get"; + + static final String MEM = "mem"; + static final String TOTAL = "total"; + static final String TOTAL_IN_BYTES = "total_in_bytes"; + static final String ADJUSTED_TOTAL = "adjusted_total"; + static final String ADJUSTED_TOTAL_IN_BYTES = "adjusted_total_in_bytes"; + static final String ML = "ml"; + static final String MAX = "max"; + static final String MAX_IN_BYTES = "max_in_bytes"; + static final String NATIVE_CODE_OVERHEAD = "native_code_overhead"; + static final String NATIVE_CODE_OVERHEAD_IN_BYTES = "native_code_overhead_in_bytes"; + static final String ANOMALY_DETECTORS = "anomaly_detectors"; + static final String ANOMALY_DETECTORS_IN_BYTES = "anomaly_detectors_in_bytes"; + static final String DATA_FRAME_ANALYTICS = "data_frame_analytics"; + static final String DATA_FRAME_ANALYTICS_IN_BYTES = "data_frame_analytics_in_bytes"; + static final String NATIVE_INFERENCE = "native_inference"; + static final String NATIVE_INFERENCE_IN_BYTES = "native_inference_in_bytes"; + static final String JVM = "jvm"; + static final String HEAP_MAX = "heap_max"; + static final String HEAP_MAX_IN_BYTES = "heap_max_in_bytes"; + static final String JAVA_INFERENCE_MAX = "java_inference_max"; + static final String JAVA_INFERENCE_MAX_IN_BYTES = "java_inference_max_in_bytes"; + static final String JAVA_INFERENCE = "java_inference"; + static final String JAVA_INFERENCE_IN_BYTES = "java_inference_in_bytes"; + + private MlMemoryAction() { + super(NAME, Response::new); + } + + public static class Request extends AcknowledgedRequest { + + private final String nodeId; + + public Request(String nodeId) { + this.nodeId = ExceptionsHelper.requireNonNull(nodeId, "nodeId"); + } + + public Request(StreamInput in) throws IOException { + super(in); + nodeId = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(nodeId); + } + + public String getNodeId() { + return nodeId; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public int hashCode() { + return Objects.hash(nodeId); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Objects.equals(nodeId, other.nodeId); + } + } + + public static class Response extends BaseNodesResponse implements ToXContentFragment { + + public static class MlMemoryStats extends BaseNodeResponse implements ToXContent, Writeable { + + private final ByteSizeValue memTotal; + private final ByteSizeValue memAdjustedTotal; + private final ByteSizeValue mlMax; + private final ByteSizeValue mlNativeCodeOverhead; + private final ByteSizeValue mlAnomalyDetectors; + private final ByteSizeValue mlDataFrameAnalytics; + private final ByteSizeValue mlNativeInference; + private final ByteSizeValue jvmHeapMax; + private final ByteSizeValue jvmInferenceMax; + private final ByteSizeValue jvmInference; + + public MlMemoryStats( + DiscoveryNode node, + ByteSizeValue memTotal, + ByteSizeValue memAdjustedTotal, + ByteSizeValue mlMax, + ByteSizeValue mlNativeCodeOverhead, + ByteSizeValue mlAnomalyDetectors, + ByteSizeValue mlDataFrameAnalytics, + ByteSizeValue mlNativeInference, + ByteSizeValue jvmHeapMax, + ByteSizeValue jvmInferenceMax, + ByteSizeValue jvmInference + ) { + super(node); + this.memTotal = Objects.requireNonNull(memTotal); + this.memAdjustedTotal = Objects.requireNonNull(memAdjustedTotal); + this.mlMax = Objects.requireNonNull(mlMax); + this.mlNativeCodeOverhead = Objects.requireNonNull(mlNativeCodeOverhead); + this.mlAnomalyDetectors = Objects.requireNonNull(mlAnomalyDetectors); + this.mlDataFrameAnalytics = Objects.requireNonNull(mlDataFrameAnalytics); + this.mlNativeInference = Objects.requireNonNull(mlNativeInference); + this.jvmHeapMax = Objects.requireNonNull(jvmHeapMax); + this.jvmInferenceMax = Objects.requireNonNull(jvmInferenceMax); + this.jvmInference = Objects.requireNonNull(jvmInference); + } + + public MlMemoryStats(StreamInput in) throws IOException { + super(in); + memTotal = new ByteSizeValue(in); + memAdjustedTotal = new ByteSizeValue(in); + mlMax = new ByteSizeValue(in); + mlNativeCodeOverhead = new ByteSizeValue(in); + mlAnomalyDetectors = new ByteSizeValue(in); + mlDataFrameAnalytics = new ByteSizeValue(in); + mlNativeInference = new ByteSizeValue(in); + jvmHeapMax = new ByteSizeValue(in); + jvmInferenceMax = new ByteSizeValue(in); + jvmInference = new ByteSizeValue(in); + } + + public ByteSizeValue getMemTotal() { + return memTotal; + } + + public ByteSizeValue getMemAdjustedTotal() { + return memAdjustedTotal; + } + + public ByteSizeValue getMlMax() { + return mlMax; + } + + public ByteSizeValue getMlNativeCodeOverhead() { + return mlNativeCodeOverhead; + } + + public ByteSizeValue getMlAnomalyDetectors() { + return mlAnomalyDetectors; + } + + public ByteSizeValue getMlDataFrameAnalytics() { + return mlDataFrameAnalytics; + } + + public ByteSizeValue getMlNativeInference() { + return mlNativeInference; + } + + public ByteSizeValue getJvmHeapMax() { + return jvmHeapMax; + } + + public ByteSizeValue getJvmInferenceMax() { + return jvmInferenceMax; + } + + public ByteSizeValue getJvmInference() { + return jvmInference; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + DiscoveryNode node = getNode(); + builder.startObject(node.getId()); + builder.field("name", node.getName()); + builder.field("ephemeral_id", node.getEphemeralId()); + builder.field("transport_address", node.getAddress().toString()); + + builder.startObject("attributes"); + for (Map.Entry entry : node.getAttributes().entrySet()) { + if (entry.getKey().startsWith("ml.")) { + builder.field(entry.getKey(), entry.getValue()); + } + } + builder.endObject(); + + builder.startArray("roles"); + for (DiscoveryNodeRole role : node.getRoles()) { + builder.value(role.roleName()); + } + builder.endArray(); + + builder.startObject(MEM); + + builder.humanReadableField(TOTAL_IN_BYTES, TOTAL, memTotal); + builder.humanReadableField(ADJUSTED_TOTAL_IN_BYTES, ADJUSTED_TOTAL, memAdjustedTotal); + + builder.startObject(ML); + builder.humanReadableField(MAX_IN_BYTES, MAX, mlMax); + builder.humanReadableField(NATIVE_CODE_OVERHEAD_IN_BYTES, NATIVE_CODE_OVERHEAD, mlNativeCodeOverhead); + builder.humanReadableField(ANOMALY_DETECTORS_IN_BYTES, ANOMALY_DETECTORS, mlAnomalyDetectors); + builder.humanReadableField(DATA_FRAME_ANALYTICS_IN_BYTES, DATA_FRAME_ANALYTICS, mlDataFrameAnalytics); + builder.humanReadableField(NATIVE_INFERENCE_IN_BYTES, NATIVE_INFERENCE, mlNativeInference); + builder.endObject(); + + builder.endObject(); // end mem + + builder.startObject(JVM); + builder.humanReadableField(HEAP_MAX_IN_BYTES, HEAP_MAX, jvmHeapMax); + builder.humanReadableField(JAVA_INFERENCE_MAX_IN_BYTES, JAVA_INFERENCE_MAX, jvmInferenceMax); + builder.humanReadableField(JAVA_INFERENCE_IN_BYTES, JAVA_INFERENCE, jvmInference); + builder.endObject(); + + builder.endObject(); // end node + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + memTotal.writeTo(out); + memAdjustedTotal.writeTo(out); + mlMax.writeTo(out); + mlNativeCodeOverhead.writeTo(out); + mlAnomalyDetectors.writeTo(out); + mlDataFrameAnalytics.writeTo(out); + mlNativeInference.writeTo(out); + jvmHeapMax.writeTo(out); + jvmInferenceMax.writeTo(out); + jvmInference.writeTo(out); + } + + @Override + public int hashCode() { + return Objects.hash( + getNode(), + memTotal, + memAdjustedTotal, + mlMax, + mlNativeCodeOverhead, + mlAnomalyDetectors, + mlDataFrameAnalytics, + mlNativeInference, + jvmHeapMax, + jvmInferenceMax, + jvmInference + ); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MlMemoryStats that = (MlMemoryStats) o; + return Objects.equals(getNode(), that.getNode()) + && Objects.equals(memTotal, that.memTotal) + && Objects.equals(memAdjustedTotal, that.memAdjustedTotal) + && Objects.equals(mlMax, that.mlMax) + && Objects.equals(mlNativeCodeOverhead, that.mlNativeCodeOverhead) + && Objects.equals(mlAnomalyDetectors, that.mlAnomalyDetectors) + && Objects.equals(mlDataFrameAnalytics, that.mlDataFrameAnalytics) + && Objects.equals(mlNativeInference, that.mlNativeInference) + && Objects.equals(jvmHeapMax, that.jvmHeapMax) + && Objects.equals(jvmInferenceMax, that.jvmInferenceMax) + && Objects.equals(jvmInference, that.jvmInference); + } + + @Override + public String toString() { + return Strings.toString(this); + } + } + + public Response(StreamInput in) throws IOException { + super(in); + } + + public Response(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); + } + + @Override + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readList(MlMemoryStats::new); + } + + @Override + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeList(nodes); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject("nodes"); + for (MlMemoryStats mlMemoryStats : getNodes()) { + mlMemoryStats.toXContent(builder, params); + } + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(getNodes()); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Response other = (Response) obj; + return Objects.equals(getNodes(), other.getNodes()); + } + + @Override + public final String toString() { + return Strings.toString(this); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoAction.java new file mode 100644 index 0000000000000..2632fbdad21f5 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoAction.java @@ -0,0 +1,160 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.ByteSizeValue; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +public class TrainedModelCacheInfoAction extends ActionType { + + public static final TrainedModelCacheInfoAction INSTANCE = new TrainedModelCacheInfoAction(); + public static final String NAME = "cluster:internal/xpack/ml/trained_models/cache/info"; + + private TrainedModelCacheInfoAction() { + super(NAME, Response::new); + } + + public static class Request extends BaseNodesRequest { + + public Request(DiscoveryNode... concreteNodes) { + super(concreteNodes); + } + + public Request(StreamInput in) throws IOException { + super(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public int hashCode() { + return Arrays.hashCode(concreteNodes()); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Arrays.deepEquals(concreteNodes(), other.concreteNodes()); + } + } + + public static class Response extends BaseNodesResponse { + + public static class CacheInfo extends BaseNodeResponse implements Writeable { + + private final ByteSizeValue jvmInferenceMax; + private final ByteSizeValue jvmInference; + + public CacheInfo(DiscoveryNode node, ByteSizeValue jvmInferenceMax, ByteSizeValue jvmInference) { + super(node); + this.jvmInferenceMax = Objects.requireNonNull(jvmInferenceMax); + this.jvmInference = Objects.requireNonNull(jvmInference); + } + + public CacheInfo(StreamInput in) throws IOException { + super(in); + jvmInferenceMax = new ByteSizeValue(in); + jvmInference = new ByteSizeValue(in); + } + + public ByteSizeValue getJvmInferenceMax() { + return jvmInferenceMax; + } + + public ByteSizeValue getJvmInference() { + return jvmInference; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + jvmInferenceMax.writeTo(out); + jvmInference.writeTo(out); + } + + @Override + public int hashCode() { + return Objects.hash(getNode(), jvmInferenceMax, jvmInference); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CacheInfo cacheInfo = (CacheInfo) o; + return Objects.equals(getNode(), cacheInfo.getNode()) + && Objects.equals(jvmInferenceMax, cacheInfo.jvmInferenceMax) + && Objects.equals(jvmInference, cacheInfo.jvmInference); + } + } + + public Response(StreamInput in) throws IOException { + super(in); + } + + public Response(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); + } + + @Override + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readList(CacheInfo::new); + } + + @Override + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeList(nodes); + } + + @Override + public int hashCode() { + return Objects.hash(getNodes()); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Response other = (Response) obj; + return Objects.equals(getNodes(), other.getNodes()); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java index 2541a2c5c5cad..69fc5edf111bb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java @@ -8,6 +8,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -22,6 +23,7 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -38,8 +40,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.xpack.core.ClientHelper.filterSecurityHeaders; - /** * A datafeed update contains partial properties to update a {@link DatafeedConfig}. * The main difference between this class and {@link DatafeedConfig} is that here all @@ -334,7 +334,7 @@ public IndicesOptions getIndicesOptions() { * Applies the update to the given {@link DatafeedConfig} * @return a new {@link DatafeedConfig} that contains the update */ - public DatafeedConfig apply(DatafeedConfig datafeedConfig, Map headers) { + public DatafeedConfig apply(DatafeedConfig datafeedConfig, Map headers, ClusterState clusterState) { if (id.equals(datafeedConfig.getId()) == false) { throw new IllegalArgumentException("Cannot apply update to datafeedConfig with different id"); } @@ -384,7 +384,7 @@ public DatafeedConfig apply(DatafeedConfig datafeedConfig, Map h builder.setRuntimeMappings(runtimeMappings); } if (headers.isEmpty() == false) { - builder.setHeaders(filterSecurityHeaders(headers)); + builder.setHeaders(ClientHelper.getPersistableSafeSecurityHeaders(headers, clusterState)); } return builder.build(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java index b295ba4122580..9a26d10702e5e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -121,6 +122,7 @@ public String getTopClassesResultsField() { return topClassesResultsField; } + @Override public String getResultsField() { return resultsField; } @@ -246,6 +248,11 @@ boolean isNoop(ClassificationConfig originalConfig) { && (predictionFieldType == null || predictionFieldType.equals(originalConfig.getPredictionFieldType())); } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_8_0; + } + public static class Builder implements InferenceConfigUpdate.Builder { private Integer numTopClasses; private String topClassesResultsField; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java index 9696afe098885..58fb78d53b8b9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java @@ -65,6 +65,11 @@ public int hashCode() { return EmptyConfigUpdate.class.hashCode(); } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_9_0; + } + public static class Builder implements InferenceConfigUpdate.Builder { @Override @@ -72,6 +77,7 @@ public Builder setResultsField(String resultsField) { return this; } + @Override public EmptyConfigUpdate build() { return new EmptyConfigUpdate(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java index 78274da77ab10..da0beaa6785d8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -101,6 +102,11 @@ public String getName() { return NAME; } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_8_0_0; + } + @Override public InferenceConfig apply(InferenceConfig originalConfig) { if (originalConfig instanceof FillMaskConfig == false) { @@ -191,6 +197,7 @@ public FillMaskConfigUpdate.Builder setTokenizationUpdate(TokenizationUpdate tok return this; } + @Override public FillMaskConfigUpdate build() { return new FillMaskConfigUpdate(this.numTopClasses, this.resultsField, this.tokenizationUpdate); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java index b00ee7134bac9..30ecac00a3b80 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; -import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.VersionedNamedWriteable; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -15,7 +15,7 @@ import java.util.HashSet; import java.util.Set; -public interface InferenceConfigUpdate extends NamedWriteable { +public interface InferenceConfigUpdate extends VersionedNamedWriteable { Set RESERVED_ML_FIELD_NAMES = new HashSet<>( Arrays.asList(WarningInferenceResults.WARNING.getPreferredName(), TrainedModelConfig.MODEL_ID.getPreferredName()) ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java index f3dbd03dcbf3b..d52463eb092c6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -144,6 +145,11 @@ public int hashCode() { return Objects.hash(resultsField, tokenizationUpdate); } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_8_0_0; + } + public static class Builder implements InferenceConfigUpdate.Builder { private String resultsField; private TokenizationUpdate tokenizationUpdate; @@ -159,6 +165,7 @@ public NerConfigUpdate.Builder setTokenizationUpdate(TokenizationUpdate tokeniza return this; } + @Override public NerConfigUpdate build() { return new NerConfigUpdate(resultsField, tokenizationUpdate); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java index a317e3c6c9e89..92bfe1bf9ea79 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -146,6 +147,11 @@ public int hashCode() { return Objects.hash(resultsField, tokenizationUpdate); } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_8_0_0; + } + public static class Builder implements InferenceConfigUpdate.Builder { private String resultsField; private TokenizationUpdate tokenizationUpdate; @@ -161,6 +167,7 @@ public PassThroughConfigUpdate.Builder setTokenizationUpdate(TokenizationUpdate return this; } + @Override public PassThroughConfigUpdate build() { return new PassThroughConfigUpdate(this.resultsField, tokenizationUpdate); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java index 2400bc1c670a8..c655c391317fb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -84,6 +85,7 @@ public Integer getNumTopFeatureImportanceValues() { return numTopFeatureImportanceValues; } + @Override public String getResultsField() { return resultsField; } @@ -109,6 +111,11 @@ public String getName() { return NAME.getPreferredName(); } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_8_0; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -186,6 +193,7 @@ public Builder setNumTopFeatureImportanceValues(Integer numTopFeatureImportanceV return this; } + @Override public RegressionConfigUpdate build() { return new RegressionConfigUpdate(resultsField, numTopFeatureImportanceValues); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java index 2266fb5ca3e44..0eaf101c25c8e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -68,6 +69,11 @@ public String getWriteableName() { return NAME; } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_9_0; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(resultsField); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java index 72a0858117e61..a62f9e3a197fb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -21,10 +22,10 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.CLASSIFICATION_LABELS; +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.NUM_TOP_CLASSES; +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.RESULTS_FIELD; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.TOKENIZATION; -import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextClassificationConfig.CLASSIFICATION_LABELS; -import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextClassificationConfig.NUM_TOP_CLASSES; -import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextClassificationConfig.RESULTS_FIELD; public class TextClassificationConfigUpdate extends NlpConfigUpdate implements NamedXContentObject { @@ -96,6 +97,11 @@ public String getName() { return NAME; } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_8_0_0; + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); @@ -237,6 +243,7 @@ public TextClassificationConfigUpdate.Builder setTokenizationUpdate(Tokenization return this; } + @Override public TextClassificationConfigUpdate build() { return new TextClassificationConfigUpdate( this.classificationLabels, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java index 1ca7d04fb1eee..589b71bd631d0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -95,6 +96,11 @@ public String getName() { return NAME; } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_8_0_0; + } + @Override public InferenceConfig apply(InferenceConfig originalConfig) { if ((resultsField == null || resultsField.equals(originalConfig.getResultsField())) && super.isNoop()) { @@ -160,6 +166,7 @@ public TextEmbeddingConfigUpdate.Builder setTokenizationUpdate(TokenizationUpdat return this; } + @Override public TextEmbeddingConfigUpdate build() { return new TextEmbeddingConfigUpdate(resultsField, tokenizationUpdate); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java index edfc675fe6ec4..3cf9f8c8f8354 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -235,8 +236,14 @@ public Builder setTokenizationUpdate(TokenizationUpdate tokenizationUpdate) { return this; } + @Override public ZeroShotClassificationConfigUpdate build() { return new ZeroShotClassificationConfigUpdate(labels, isMultiLabel, resultsField, tokenizationUpdate); } } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_8_0_0; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfileRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfileRequest.java index bc9453fd37f91..17e91b4b8c984 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfileRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfileRequest.java @@ -18,32 +18,32 @@ public class GetProfileRequest extends ActionRequest { private final String uid; - private final Set datKeys; + private final Set dataKeys; - public GetProfileRequest(String uid, Set datKeys) { + public GetProfileRequest(String uid, Set dataKeys) { this.uid = uid; - this.datKeys = datKeys; + this.dataKeys = dataKeys; } public GetProfileRequest(StreamInput in) throws IOException { super(in); this.uid = in.readString(); - this.datKeys = in.readSet(StreamInput::readString); + this.dataKeys = in.readSet(StreamInput::readString); } public String getUid() { return uid; } - public Set getDatKeys() { - return datKeys; + public Set getDataKeys() { + return dataKeys; } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(uid); - out.writeStringCollection(datKeys); + out.writeStringCollection(dataKeys); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/Profile.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/Profile.java index 3e199e05e2d3e..5b04b6f054248 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/Profile.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/Profile.java @@ -35,7 +35,7 @@ public record ProfileUser( String username, List roles, String realmName, - @Nullable String realmDomain, + @Nullable String domainName, String email, String fullName, String displayName, @@ -56,7 +56,7 @@ public ProfileUser(StreamInput in) throws IOException { } public QualifiedName qualifiedName() { - return new QualifiedName(username, realmDomain); + return new QualifiedName(username, domainName); } @Override @@ -65,8 +65,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("username", username); builder.field("roles", roles); builder.field("realm_name", realmName); - if (realmDomain != null) { - builder.field("realm_domain", realmDomain); + if (domainName != null) { + builder.field("realm_domain", domainName); } if (email != null) { builder.field("email", email); @@ -87,7 +87,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(username); out.writeStringCollection(roles); out.writeString(realmName); - out.writeOptionalString(realmDomain); + out.writeOptionalString(domainName); out.writeOptionalString(email); out.writeOptionalString(fullName); out.writeOptionalString(displayName); @@ -124,15 +124,19 @@ public Profile(StreamInput in) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); + innerToXContent(builder, params); + versionControl.toXContent(builder, params); + builder.endObject(); + return builder; + } + + public void innerToXContent(XContentBuilder builder, Params params) throws IOException { builder.field("uid", uid); builder.field("enabled", enabled); builder.field("last_synchronized", lastSynchronized); user.toXContent(builder, params); builder.field("access", access); builder.field("data", applicationData); - versionControl.toXContent(builder, params); - builder.endObject(); - return builder; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesAction.java new file mode 100644 index 0000000000000..35c6d7f9dd148 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesAction.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action.profile; + +import org.elasticsearch.action.ActionType; + +public class SearchProfilesAction extends ActionType { + + public static final String NAME = "cluster:admin/xpack/security/profile/search"; + public static final SearchProfilesAction INSTANCE = new SearchProfilesAction(); + + public SearchProfilesAction() { + super(NAME, SearchProfilesResponse::new); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesRequest.java new file mode 100644 index 0000000000000..20e48247943d6 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesRequest.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action.profile; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Objects; +import java.util.Set; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public class SearchProfilesRequest extends ActionRequest { + + private final Set dataKeys; + /** + * String to search name related fields of a profile document + */ + private final String name; + private final int size; + + public SearchProfilesRequest(Set dataKeys, String name, int size) { + this.dataKeys = Objects.requireNonNull(dataKeys, "data parameter must not be null"); + this.name = Objects.requireNonNull(name, "name must not be null"); + this.size = size; + } + + public SearchProfilesRequest(StreamInput in) throws IOException { + super(in); + this.dataKeys = in.readSet(StreamInput::readString); + this.name = in.readOptionalString(); + this.size = in.readVInt(); + } + + public Set getDataKeys() { + return dataKeys; + } + + public String getName() { + return name; + } + + public int getSize() { + return size; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringCollection(dataKeys); + out.writeOptionalString(name); + out.writeVInt(size); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (size < 0) { + validationException = addValidationError("[size] parameter cannot be negative but was [" + size + "]", validationException); + } + return validationException; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesResponse.java new file mode 100644 index 0000000000000..955e60d2c289f --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesResponse.java @@ -0,0 +1,109 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action.profile; + +import org.apache.lucene.search.TotalHits; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; + +public class SearchProfilesResponse extends ActionResponse implements ToXContentObject { + + private final ProfileHit[] profileHits; + private final long tookInMillis; + private final TotalHits totalHits; + + public SearchProfilesResponse(ProfileHit[] profileHits, long tookInMillis, TotalHits totalHits) { + this.profileHits = profileHits; + this.tookInMillis = tookInMillis; + this.totalHits = totalHits; + } + + public SearchProfilesResponse(StreamInput in) throws IOException { + super(in); + this.profileHits = in.readArray(ProfileHit::new, ProfileHit[]::new); + this.tookInMillis = in.readVLong(); + this.totalHits = Lucene.readTotalHits(in); + } + + public ProfileHit[] getProfileHits() { + return profileHits; + } + + public long getTookInMillis() { + return tookInMillis; + } + + public TotalHits getTotalHits() { + return totalHits; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeArray(profileHits); + out.writeVLong(tookInMillis); + Lucene.writeTotalHits(out, totalHits); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field("took", tookInMillis); + builder.startObject("total"); + { + builder.field("value", totalHits.value); + builder.field("relation", totalHits.relation == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); + } + builder.endObject(); + builder.startArray("users"); + { + for (ProfileHit profileHit : profileHits) { + profileHit.toXContent(builder, params); + } + } + builder.endArray(); + } + builder.endObject(); + return builder; + } + + public record ProfileHit(Profile profile, float score) implements Writeable, ToXContentObject { + + public ProfileHit(StreamInput in) throws IOException { + this(new Profile(in), in.readFloat()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + profile.writeTo(out); + out.writeFloat(score); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field("_score", score); + builder.field("uid", profile.uid()); + profile.user().toXContent(builder, params); + builder.field("access", profile.access()); + builder.field("data", profile.applicationData()); + // TODO: output a field of sort which is just score plus uid? + } + builder.endObject(); + return builder; + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/UpdateProfileDataRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/UpdateProfileDataRequest.java index 10072f4c51a36..d007239755539 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/UpdateProfileDataRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/UpdateProfileDataRequest.java @@ -81,7 +81,7 @@ public RefreshPolicy getRefreshPolicy() { return refreshPolicy; } - public Set applicationNames() { + public Set getApplicationNames() { final Set names = new HashSet<>(access.keySet()); names.addAll(data.keySet()); return Set.copyOf(names); @@ -90,7 +90,7 @@ public Set applicationNames() { @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; - final Set applicationNames = applicationNames(); + final Set applicationNames = getApplicationNames(); if (applicationNames.isEmpty()) { validationException = addValidationError("update request is empty", validationException); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java index edc01bfe27211..c21233436bd4a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java @@ -18,6 +18,8 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; @@ -39,6 +41,8 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef.newAnonymousRealmRef; import static org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef.newApiKeyRealmRef; import static org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef.newInternalAttachRealmRef; @@ -50,6 +54,7 @@ import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.ATTACH_REALM_TYPE; import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.FALLBACK_REALM_NAME; import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.FALLBACK_REALM_TYPE; +import static org.elasticsearch.xpack.core.security.authc.RealmDomain.REALM_DOMAIN_PARSER; // TODO(hub-cap) Clean this up after moving User over - This class can re-inherit its field AUTHENTICATION_KEY in AuthenticationField. // That interface can be removed @@ -442,7 +447,7 @@ public String toString() { return builder.toString(); } - public static class RealmRef implements Writeable { + public static class RealmRef implements Writeable, ToXContentObject { private final String nodeName; private final String name; @@ -481,6 +486,21 @@ public void writeTo(StreamOutput out) throws IOException { } } + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field("name", name); + builder.field("type", type); + builder.field("node_name", nodeName); + if (domain != null) { + builder.field("domain", domain); + } + } + builder.endObject(); + return builder; + } + public String getNodeName() { return nodeName; } @@ -560,6 +580,19 @@ static RealmRef newApiKeyRealmRef(String nodeName) { } } + public static ConstructingObjectParser REALM_REF_PARSER = new ConstructingObjectParser<>( + "realm_ref", + false, + (args, v) -> new RealmRef((String) args[0], (String) args[1], (String) args[2], (RealmDomain) args[3]) + ); + + static { + REALM_REF_PARSER.declareString(constructorArg(), new ParseField("name")); + REALM_REF_PARSER.declareString(constructorArg(), new ParseField("type")); + REALM_REF_PARSER.declareString(constructorArg(), new ParseField("node_name")); + REALM_REF_PARSER.declareObject(optionalConstructorArg(), (p, c) -> REALM_DOMAIN_PARSER.parse(p, c), new ParseField("domain")); + } + // TODO is a newer version than the node's a valid value? public static Authentication newInternalAuthentication(User internalUser, Version version, String nodeName) { // TODO create a system user class, so that the type system guarantees that this is only invoked for internal users diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmConfig.java index 017c87ae983eb..e827fea69b527 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmConfig.java @@ -13,12 +13,18 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Objects; import java.util.function.Function; import java.util.function.Supplier; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + public class RealmConfig { final RealmIdentifier identifier; @@ -183,7 +189,7 @@ public boolean hasSetting(Setting.AffixSetting setting) { * (e.g. {@code xpack.security.authc.realms.native.native_realm.order}), it is often necessary to be able to * pass this pair of variables as a single type (e.g. in method parameters, or return values). */ - public static class RealmIdentifier implements Writeable { + public static class RealmIdentifier implements Writeable, ToXContentObject, Comparable { private final String type; private final String name; @@ -235,5 +241,33 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(type); out.writeString(name); } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field("name", name); + builder.field("type", type); + } + builder.endObject(); + return builder; + } + + @Override + public int compareTo(RealmIdentifier other) { + int result = name.compareTo(other.name); + return (result == 0) ? type.compareTo(other.type) : result; + } + } + + public static ConstructingObjectParser REALM_IDENTIFIER_PARSER = new ConstructingObjectParser<>( + "realm_identifier", + false, + (args, v) -> new RealmIdentifier((String) args[0], (String) args[1]) + ); + + static { + REALM_IDENTIFIER_PARSER.declareString(constructorArg(), new ParseField("name")); + REALM_IDENTIFIER_PARSER.declareString(constructorArg(), new ParseField("type")); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmDomain.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmDomain.java index 14d044de9c343..8863953dc844d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmDomain.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmDomain.java @@ -10,11 +10,19 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.List; import java.util.Set; -public record RealmDomain(String name, Set realms) implements Writeable { +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xpack.core.security.authc.RealmConfig.REALM_IDENTIFIER_PARSER; + +public record RealmDomain(String name, Set realms) implements Writeable, ToXContentObject { @Override public void writeTo(StreamOutput out) throws IOException { @@ -27,4 +35,33 @@ static RealmDomain readFrom(StreamInput in) throws IOException { Set realms = in.readSet(RealmConfig.RealmIdentifier::new); return new RealmDomain(domainName, realms); } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field("name", name); + // Sort to have stable order in display + builder.xContentList("realms", realms.stream().sorted().toList()); + } + builder.endObject(); + return builder; + } + + @Override + public String toString() { + return "RealmDomain{" + "name='" + name + '\'' + ", realms=" + realms + '}'; + } + + @SuppressWarnings("unchecked") + public static ConstructingObjectParser REALM_DOMAIN_PARSER = new ConstructingObjectParser<>( + "realm_domain", + false, + (args, v) -> new RealmDomain((String) args[0], Set.copyOf((List) args[1])) + ); + + static { + REALM_DOMAIN_PARSER.declareString(constructorArg(), new ParseField("name")); + REALM_DOMAIN_PARSER.declareObjectArray(constructorArg(), (p, c) -> REALM_IDENTIFIER_PARSER.parse(p, c), new ParseField("realms")); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index efbf987637aff..63d7027cbe78f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -37,6 +37,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -110,9 +111,7 @@ public RoleDescriptor( ) { this.name = name; this.clusterPrivileges = clusterPrivileges != null ? clusterPrivileges : Strings.EMPTY_ARRAY; - this.configurableClusterPrivileges = configurableClusterPrivileges != null - ? configurableClusterPrivileges - : ConfigurableClusterPrivileges.EMPTY_ARRAY; + this.configurableClusterPrivileges = sortConfigurableClusterPrivileges(configurableClusterPrivileges); this.indicesPrivileges = indicesPrivileges != null ? indicesPrivileges : IndicesPrivileges.NONE; this.applicationPrivileges = applicationPrivileges != null ? applicationPrivileges : ApplicationResourcePrivileges.NONE; this.runAs = runAs != null ? runAs : Strings.EMPTY_ARRAY; @@ -669,6 +668,23 @@ private static RoleDescriptor.IndicesPrivileges parseIndex(String roleName, XCon .build(); } + private static ConfigurableClusterPrivilege[] sortConfigurableClusterPrivileges( + ConfigurableClusterPrivilege[] configurableClusterPrivileges + ) { + if (null == configurableClusterPrivileges) { + return ConfigurableClusterPrivileges.EMPTY_ARRAY; + } else if (configurableClusterPrivileges.length < 2) { + return configurableClusterPrivileges; + } else { + ConfigurableClusterPrivilege[] configurableClusterPrivilegesCopy = Arrays.copyOf( + configurableClusterPrivileges, + configurableClusterPrivileges.length + ); + Arrays.sort(configurableClusterPrivilegesCopy, Comparator.comparingInt(o -> o.getCategory().ordinal())); + return configurableClusterPrivilegesCopy; + } + } + private static void checkIfExceptFieldsIsSubsetOfGrantedFields(String roleName, String[] grantedFields, String[] deniedFields) { try { FieldPermissions.buildPermittedFieldsAutomaton(grantedFields, deniedFields); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilege.java index ad5c7c436ac74..f9722ca42f20d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilege.java @@ -40,7 +40,8 @@ public interface ConfigurableClusterPrivilege extends NamedWriteable, ToXContent * from the categories. */ enum Category { - APPLICATION(new ParseField("application")); + APPLICATION(new ParseField("application")), + PROFILE(new ParseField("profile")); public final ParseField field; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivileges.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivileges.java index 82f7a6062013a..8b46d97d8c843 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivileges.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivileges.java @@ -18,6 +18,8 @@ import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.security.action.privilege.ApplicationPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataAction; +import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataRequest; import org.elasticsearch.xpack.core.security.authz.permission.ClusterPermission; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege.Category; import org.elasticsearch.xpack.core.security.support.StringMatcher; @@ -30,6 +32,7 @@ import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.function.Predicate; @@ -94,13 +97,25 @@ public static List parse(XContentParser parser) th while (parser.nextToken() != XContentParser.Token.END_OBJECT) { expectedToken(parser.currentToken(), parser, XContentParser.Token.FIELD_NAME); - expectFieldName(parser, Category.APPLICATION.field); - expectedToken(parser.nextToken(), parser, XContentParser.Token.START_OBJECT); - expectedToken(parser.nextToken(), parser, XContentParser.Token.FIELD_NAME); + expectFieldName(parser, Category.APPLICATION.field, Category.PROFILE.field); + if (Category.APPLICATION.field.match(parser.currentName(), parser.getDeprecationHandler())) { + expectedToken(parser.nextToken(), parser, XContentParser.Token.START_OBJECT); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + expectedToken(parser.currentToken(), parser, XContentParser.Token.FIELD_NAME); - expectFieldName(parser, ManageApplicationPrivileges.Fields.MANAGE); - privileges.add(ManageApplicationPrivileges.parse(parser)); - expectedToken(parser.nextToken(), parser, XContentParser.Token.END_OBJECT); + expectFieldName(parser, ManageApplicationPrivileges.Fields.MANAGE); + privileges.add(ManageApplicationPrivileges.parse(parser)); + } + } else { + assert Category.PROFILE.field.match(parser.currentName(), parser.getDeprecationHandler()); + expectedToken(parser.nextToken(), parser, XContentParser.Token.START_OBJECT); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + expectedToken(parser.currentToken(), parser, XContentParser.Token.FIELD_NAME); + + expectFieldName(parser, WriteProfileDataPrivileges.Fields.WRITE); + privileges.add(WriteProfileDataPrivileges.parse(parser)); + } + } } return privileges; @@ -131,6 +146,114 @@ private static void expectFieldName(XContentParser parser, ParseField... fields) } } + /** + * The {@link WriteProfileDataPrivileges} privilege is a {@link ConfigurableClusterPrivilege} that grants the + * ability to write the {@code data} and {@code access} sections of any user profile. + * The privilege is namespace configurable such that only specific top-level keys in the {@code data} and {@code access} + * dictionary permit writes (wildcards and regexps are supported, but exclusions are not). + */ + public static class WriteProfileDataPrivileges implements ConfigurableClusterPrivilege { + public static final String WRITEABLE_NAME = "write-profile-data-privileges"; + + private final Set applicationNames; + private final Predicate applicationPredicate; + private final Predicate requestPredicate; + + public WriteProfileDataPrivileges(Set applicationNames) { + this.applicationNames = Collections.unmodifiableSet(applicationNames); + this.applicationPredicate = StringMatcher.of(applicationNames); + this.requestPredicate = request -> { + if (request instanceof final UpdateProfileDataRequest updateProfileRequest) { + assert null == updateProfileRequest.validate(); + final Collection requestApplicationNames = updateProfileRequest.getApplicationNames(); + return requestApplicationNames.stream().allMatch(application -> applicationPredicate.test(application)); + } + return false; + }; + } + + @Override + public Category getCategory() { + return Category.PROFILE; + } + + public Collection getApplicationNames() { + return this.applicationNames; + } + + @Override + public String getWriteableName() { + return WRITEABLE_NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(this.applicationNames, StreamOutput::writeString); + } + + public static WriteProfileDataPrivileges createFrom(StreamInput in) throws IOException { + final Set applications = in.readSet(StreamInput::readString); + return new WriteProfileDataPrivileges(applications); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.field(Fields.WRITE.getPreferredName(), Map.of(Fields.APPLICATIONS.getPreferredName(), applicationNames)); + } + + public static WriteProfileDataPrivileges parse(XContentParser parser) throws IOException { + expectedToken(parser.currentToken(), parser, XContentParser.Token.FIELD_NAME); + expectFieldName(parser, Fields.WRITE); + expectedToken(parser.nextToken(), parser, XContentParser.Token.START_OBJECT); + expectedToken(parser.nextToken(), parser, XContentParser.Token.FIELD_NAME); + expectFieldName(parser, Fields.APPLICATIONS); + expectedToken(parser.nextToken(), parser, XContentParser.Token.START_ARRAY); + final String[] applications = XContentUtils.readStringArray(parser, false); + expectedToken(parser.nextToken(), parser, XContentParser.Token.END_OBJECT); + return new WriteProfileDataPrivileges(new LinkedHashSet<>(Arrays.asList(applications))); + } + + @Override + public String toString() { + return "{" + + getCategory() + + ":" + + Fields.WRITE.getPreferredName() + + ":" + + Fields.APPLICATIONS.getPreferredName() + + "=" + + Strings.collectionToDelimitedString(applicationNames, ",") + + "}"; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + final WriteProfileDataPrivileges that = (WriteProfileDataPrivileges) o; + return this.applicationNames.equals(that.applicationNames); + } + + @Override + public int hashCode() { + return applicationNames.hashCode(); + } + + @Override + public ClusterPermission.Builder buildPermission(ClusterPermission.Builder builder) { + return builder.add(this, Set.of(UpdateProfileDataAction.NAME), requestPredicate); + } + + private interface Fields { + ParseField WRITE = new ParseField("write"); + ParseField APPLICATIONS = new ParseField("applications"); + } + } + /** * The {@code ManageApplicationPrivileges} privilege is a {@link ConfigurableClusterPrivilege} that grants the * ability to execute actions related to the management of application privileges (Get, Put, Delete) for a subset @@ -164,7 +287,7 @@ public Category getCategory() { } public Collection getApplicationNames() { - return Collections.unmodifiableCollection(this.applicationNames); + return this.applicationNames; } @Override @@ -184,10 +307,7 @@ public static ManageApplicationPrivileges createFrom(StreamInput in) throws IOEx @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.field( - Fields.MANAGE.getPreferredName(), - Collections.singletonMap(Fields.APPLICATIONS.getPreferredName(), applicationNames) - ); + return builder.field(Fields.MANAGE.getPreferredName(), Map.of(Fields.APPLICATIONS.getPreferredName(), applicationNames)); } public static ManageApplicationPrivileges parse(XContentParser parser) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java index 0f79f7b9310ae..24589c3525f00 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.core.ilm.action.ExplainLifecycleAction; import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; import org.elasticsearch.xpack.core.security.support.Automatons; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; import java.util.Arrays; import java.util.Collection; @@ -99,7 +100,8 @@ public final class IndexPrivilege extends Privilege { GetDataStreamAction.NAME, ResolveIndexAction.NAME, FieldCapabilitiesAction.NAME + "*", - GetRollupIndexCapsAction.NAME + "*" + GetRollupIndexCapsAction.NAME + "*", + GetCheckpointAction.NAME + "*" // transform internal action ); private static final Automaton MANAGE_FOLLOW_INDEX_AUTOMATON = patterns( PutFollowAction.NAME, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index 3e7fe85c132bd..e336c1ee32460 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -22,10 +22,10 @@ import org.elasticsearch.xpack.core.security.action.privilege.GetBuiltinPrivilegesAction; import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileAction; import org.elasticsearch.xpack.core.security.action.profile.GetProfileAction; -import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataAction; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges.ManageApplicationPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges.WriteProfileDataPrivileges; import org.elasticsearch.xpack.core.security.support.MetadataUtils; import org.elasticsearch.xpack.core.security.user.KibanaSystemUser; import org.elasticsearch.xpack.core.security.user.UsernamesField; @@ -667,8 +667,6 @@ public static RoleDescriptor kibanaSystemRoleDescriptor(String name) { "delegate_pki", GetProfileAction.NAME, ActivateProfileAction.NAME, - // TODO: this cluster action will be replaced with a special privilege that grants write access to a subset of namespaces - UpdateProfileDataAction.NAME, // To facilitate ML UI functionality being controlled using Kibana security privileges "manage_ml", // The symbolic constant for this one is in SecurityActionMapper, so not accessible from X-Pack core @@ -780,7 +778,9 @@ public static RoleDescriptor kibanaSystemRoleDescriptor(String name) { .privileges("create_index", "delete_index", "read", "index") .build(), }, null, - new ConfigurableClusterPrivilege[] { new ManageApplicationPrivileges(Collections.singleton("kibana-*")) }, + new ConfigurableClusterPrivilege[] { + new ManageApplicationPrivileges(Set.of("kibana-*")), + new WriteProfileDataPrivileges(Set.of("kibana-*")) }, null, MetadataUtils.DEFAULT_RESERVED_METADATA, null diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointAction.java new file mode 100644 index 0000000000000..168853fa9bf70 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointAction.java @@ -0,0 +1,156 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.transform.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Objects; + +/** + * Transform internal API (no REST layer) to retrieve index checkpoints. + */ +public class GetCheckpointAction extends ActionType { + + public static final GetCheckpointAction INSTANCE = new GetCheckpointAction(); + + // note: this is an index action and requires `view_index_metadata` + public static final String NAME = "indices:internal/transform/checkpoint"; + + private GetCheckpointAction() { + super(NAME, GetCheckpointAction.Response::new); + } + + public static class Request extends ActionRequest implements IndicesRequest.Replaceable { + + private String[] indices; + private final IndicesOptions indicesOptions; + + public Request(StreamInput in) throws IOException { + super(in); + indices = in.readStringArray(); + indicesOptions = IndicesOptions.readIndicesOptions(in); + } + + public Request(String[] indices, IndicesOptions indicesOptions) { + this.indices = indices != null ? indices : Strings.EMPTY_ARRAY; + this.indicesOptions = indicesOptions; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public String[] indices() { + return indices; + } + + @Override + public IndicesOptions indicesOptions() { + return indicesOptions; + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (obj == null || obj.getClass() != getClass()) { + return false; + } + Request that = (Request) obj; + + return Arrays.equals(indices, that.indices) && Objects.equals(indicesOptions, that.indicesOptions); + } + + @Override + public int hashCode() { + return Objects.hash(Arrays.hashCode(indices), indicesOptions); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(indices); + indicesOptions.writeIndicesOptions(out); + } + + @Override + public IndicesRequest indices(String... indices) { + this.indices = indices; + return this; + } + + // this action does not allow remote indices, but they have to be resolved upfront, see {@link DefaultCheckpointProvider} + @Override + public boolean allowsRemoteIndices() { + return false; + } + } + + public static class Response extends ActionResponse { + + private final Map checkpoints; + + public Response(Map checkpoints) { + this.checkpoints = checkpoints; + } + + public Response(StreamInput in) throws IOException { + this.checkpoints = in.readOrderedMap(StreamInput::readString, StreamInput::readLongArray); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeMap(getCheckpoints(), StreamOutput::writeString, StreamOutput::writeLongArray); + } + + public Map getCheckpoints() { + return Collections.unmodifiableMap(checkpoints); + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (obj == null || obj.getClass() != getClass()) { + return false; + } + Response that = (Response) obj; + + return this.checkpoints.size() == that.checkpoints.size() + && this.checkpoints.entrySet().stream().allMatch(e -> Arrays.equals(e.getValue(), that.checkpoints.get(e.getKey()))); + } + + @Override + public int hashCode() { + int hash = 1; + + for (Entry e : checkpoints.entrySet()) { + hash = 31 * hash + Objects.hash(e.getKey(), Arrays.hashCode(e.getValue())); + } + + return hash; + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeAction.java new file mode 100644 index 0000000000000..341cc0a9cec0b --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeAction.java @@ -0,0 +1,151 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.transform.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.shard.ShardId; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Objects; +import java.util.Set; + +public class GetCheckpointNodeAction extends ActionType { + + public static final GetCheckpointNodeAction INSTANCE = new GetCheckpointNodeAction(); + + // note: this is an index action and requires `view_index_metadata` + public static final String NAME = GetCheckpointAction.NAME + "[n]"; + + private GetCheckpointNodeAction() { + super(NAME, GetCheckpointNodeAction.Response::new); + } + + public static class Response extends ActionResponse { + private final Map checkpoints; + + public Response(Map checkpoints) { + this.checkpoints = checkpoints; + } + + public Response(StreamInput in) throws IOException { + this.checkpoints = in.readOrderedMap(StreamInput::readString, StreamInput::readLongArray); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeMap(getCheckpoints(), StreamOutput::writeString, StreamOutput::writeLongArray); + } + + public Map getCheckpoints() { + return checkpoints; + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (obj == null || obj.getClass() != getClass()) { + return false; + } + Response that = (Response) obj; + + return this.checkpoints.size() == that.checkpoints.size() + && this.checkpoints.entrySet().stream().allMatch(e -> Arrays.equals(e.getValue(), that.checkpoints.get(e.getKey()))); + } + + @Override + public int hashCode() { + int hash = 1; + + for (Entry e : checkpoints.entrySet()) { + hash = 31 * hash + Objects.hash(e.getKey(), Arrays.hashCode(e.getValue())); + } + + return hash; + } + } + + public static class Request extends ActionRequest implements IndicesRequest { + + private final Set shards; + private final OriginalIndices originalIndices; + + public Request(Set shards, OriginalIndices originalIndices) { + this.shards = shards; + this.originalIndices = originalIndices; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.shards = Collections.unmodifiableSet(in.readSet(ShardId::new)); + this.originalIndices = OriginalIndices.readOriginalIndices(in); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeCollection(shards); + OriginalIndices.writeOriginalIndices(originalIndices, out); + } + + public Set getShards() { + return shards; + } + + public OriginalIndices getOriginalIndices() { + return originalIndices; + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (obj == null || obj.getClass() != getClass()) { + return false; + } + Request that = (Request) obj; + + return Objects.equals(shards, that.shards) && Objects.equals(originalIndices, that.originalIndices); + } + + @Override + public int hashCode() { + return Objects.hash(shards, originalIndices); + } + + @Override + public String[] indices() { + return originalIndices.indices(); + } + + @Override + public IndicesOptions indicesOptions() { + return originalIndices.indicesOptions(); + } + + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/xcontent/WatcherXContentParser.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/xcontent/WatcherXContentParser.java index 8fc38e22eb969..96fa4de6c0d9b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/xcontent/WatcherXContentParser.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/xcontent/WatcherXContentParser.java @@ -8,7 +8,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.FilterXContentParser; +import org.elasticsearch.xcontent.FilterXContentParserWrapper; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.watcher.common.secret.Secret; import org.elasticsearch.xpack.core.watcher.crypto.CryptoService; @@ -26,7 +26,7 @@ * {@link Secret}s are encrypted values that are stored in memory and are decrypted * on demand when needed. */ -public class WatcherXContentParser extends FilterXContentParser { +public class WatcherXContentParser extends FilterXContentParserWrapper { public static final String REDACTED_PASSWORD = "::es_redacted::"; diff --git a/x-pack/plugin/core/src/main/resources/monitoring-kibana-mb.json b/x-pack/plugin/core/src/main/resources/monitoring-kibana-mb.json index e155f74ae0486..262e07d37c5ea 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-kibana-mb.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-kibana-mb.json @@ -492,6 +492,10 @@ "uuid": { "type": "alias", "path": "service.id" + }, + "version": { + "type": "alias", + "path": "service.version" } } }, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java index c272eaeb9172b..bea688e9784de 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; @@ -16,16 +17,24 @@ import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; import org.elasticsearch.xpack.core.security.authc.AuthenticationServiceField; +import org.elasticsearch.xpack.core.security.authc.support.AuthenticationContextSerializer; +import org.elasticsearch.xpack.core.security.authc.support.SecondaryAuthentication; +import org.elasticsearch.xpack.core.security.user.User; +import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -37,6 +46,7 @@ import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; @@ -372,4 +382,87 @@ public void testFilterSecurityHeaders() { expectThrows(NullPointerException.class, () -> ClientHelper.filterSecurityHeaders(null)); } } + + public void testGetPersistableSafeSecurityHeaders() throws IOException { + final ClusterState clusterState = mock(ClusterState.class); + final DiscoveryNodes discoveryNodes = mock(DiscoveryNodes.class); + when(clusterState.nodes()).thenReturn(discoveryNodes); + when(discoveryNodes.getMinNodeVersion()).thenReturn(VersionUtils.randomPreviousCompatibleVersion(random(), Version.CURRENT)); + // No security header + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + final String nonSecurityHeaderKey = "not-a-security-header"; + if (randomBoolean()) { + threadContext.putHeader(nonSecurityHeaderKey, randomAlphaOfLength(8)); + } + assertThat(ClientHelper.getPersistableSafeSecurityHeaders(threadContext, clusterState), anEmptyMap()); + + final boolean hasRunAsHeader = randomBoolean(); + if (hasRunAsHeader) { + threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as_header"); + } + + final Authentication authentication = Authentication.newRealmAuthentication( + new User(randomAlphaOfLength(8)), + new Authentication.RealmRef("name", "type", "node") + ); + + final boolean hasAuthHeader = randomBoolean(); + // There maybe a secondary header + final boolean hasSecondaryAuthHeader = randomFrom(hasAuthHeader == false, true); + if (hasAuthHeader) { + new AuthenticationContextSerializer().writeToContext(authentication, threadContext); + } + if (hasSecondaryAuthHeader) { + new AuthenticationContextSerializer(SecondaryAuthentication.THREAD_CTX_KEY).writeToContext(authentication, threadContext); + } + + // No rewriting for current version + when(discoveryNodes.getMinNodeVersion()).thenReturn(Version.CURRENT); + final Map headers1; + if (randomBoolean()) { + headers1 = ClientHelper.getPersistableSafeSecurityHeaders(threadContext, clusterState); + } else { + headers1 = ClientHelper.getPersistableSafeSecurityHeaders(threadContext.getHeaders(), clusterState); + } + assertThat(headers1, not(hasKey(nonSecurityHeaderKey))); + if (hasAuthHeader) { + assertThat(headers1, hasKey(AuthenticationField.AUTHENTICATION_KEY)); + assertThat( + headers1.get(AuthenticationField.AUTHENTICATION_KEY), + equalTo(threadContext.getHeader(AuthenticationField.AUTHENTICATION_KEY)) + ); + } + if (hasSecondaryAuthHeader) { + assertThat(headers1, hasKey(SecondaryAuthentication.THREAD_CTX_KEY)); + assertThat( + headers1.get(SecondaryAuthentication.THREAD_CTX_KEY), + equalTo(threadContext.getHeader(SecondaryAuthentication.THREAD_CTX_KEY)) + ); + } + + // Rewritten for older version + final Version previousVersion = VersionUtils.randomPreviousCompatibleVersion(random(), Version.CURRENT); + when(discoveryNodes.getMinNodeVersion()).thenReturn(previousVersion); + final Map headers2; + if (randomBoolean()) { + headers2 = ClientHelper.getPersistableSafeSecurityHeaders(threadContext, clusterState); + } else { + headers2 = ClientHelper.getPersistableSafeSecurityHeaders(threadContext.getHeaders(), clusterState); + } + assertThat(headers2, not(hasKey(nonSecurityHeaderKey))); + if (hasAuthHeader) { + final Authentication rewrittenAuth = AuthenticationContextSerializer.decode( + headers2.get(AuthenticationField.AUTHENTICATION_KEY) + ); + assertThat(rewrittenAuth.getVersion(), equalTo(previousVersion)); + assertThat(rewrittenAuth.getUser(), equalTo(authentication.getUser())); + } + if (hasSecondaryAuthHeader) { + final Authentication rewrittenSecondaryAuth = AuthenticationContextSerializer.decode( + headers2.get(SecondaryAuthentication.THREAD_CTX_KEY) + ); + assertThat(rewrittenSecondaryAuth.getVersion(), equalTo(previousVersion)); + assertThat(rewrittenSecondaryAuth.getUser(), equalTo(authentication.getUser())); + } + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java index de78339b67492..35dccbb3ef9ed 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java @@ -20,6 +20,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.coordination.ElectionStrategy; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; @@ -104,6 +105,7 @@ import java.util.Optional; import java.util.Set; import java.util.function.BiConsumer; +import java.util.function.Consumer; import java.util.function.Function; import java.util.function.LongSupplier; import java.util.function.Predicate; @@ -568,6 +570,15 @@ public Map getInternalRepositories( return internalRepositories; } + @Override + public Consumer addPreRestoreCheck() { + List> checks = filterPlugins(RepositoryPlugin.class).stream() + .map(RepositoryPlugin::addPreRestoreCheck) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + return checks.isEmpty() ? null : imd -> checks.forEach(c -> c.accept(imd)); + } + @Override public void close() throws IOException { IOUtils.close(plugins); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/archive/ArchiveFeatureSetUsageTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/archive/ArchiveFeatureSetUsageTests.java new file mode 100644 index 0000000000000..db644c2261a1e --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/archive/ArchiveFeatureSetUsageTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.core.archive; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.io.IOException; + +public class ArchiveFeatureSetUsageTests extends AbstractWireSerializingTestCase { + + @Override + protected ArchiveFeatureSetUsage createTestInstance() { + boolean available = randomBoolean(); + return new ArchiveFeatureSetUsage(available, randomIntBetween(0, 100000)); + } + + @Override + protected ArchiveFeatureSetUsage mutateInstance(ArchiveFeatureSetUsage instance) throws IOException { + boolean available = instance.available(); + int numArchiveIndices = instance.getNumberOfArchiveIndices(); + switch (between(0, 1)) { + case 0 -> available = available == false; + case 1 -> numArchiveIndices = randomValueOtherThan(numArchiveIndices, () -> randomIntBetween(0, 100000)); + default -> throw new AssertionError("Illegal randomisation branch"); + } + return new ArchiveFeatureSetUsage(available, numArchiveIndices); + } + + @Override + protected Writeable.Reader instanceReader() { + return ArchiveFeatureSetUsage::new; + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CacheInfoTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CacheInfoTests.java new file mode 100644 index 0000000000000..eb8c65c884d88 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CacheInfoTests.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.ml.action.TrainedModelCacheInfoAction.Response.CacheInfo; + +import java.net.InetAddress; + +public class CacheInfoTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return CacheInfo::new; + } + + @Override + protected CacheInfo createTestInstance() { + DiscoveryNode node = new DiscoveryNode( + randomAlphaOfLength(20), + new TransportAddress(InetAddress.getLoopbackAddress(), randomIntBetween(1024, 65535)), + Version.CURRENT + ); + return createTestInstance(node); + } + + static CacheInfo createTestInstance(DiscoveryNode node) { + return new CacheInfo(node, ByteSizeValue.ofMb(randomLongBetween(1000, 30000)), ByteSizeValue.ofMb(randomLongBetween(0, 1000))); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryActionRequestTests.java new file mode 100644 index 0000000000000..eb4716bc05178 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryActionRequestTests.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +public class MlMemoryActionRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return MlMemoryAction.Request::new; + } + + @Override + protected MlMemoryAction.Request createTestInstance() { + return new MlMemoryAction.Request(randomAlphaOfLength(20)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryActionResponseTests.java new file mode 100644 index 0000000000000..2875ab7c80208 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryActionResponseTests.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.net.InetAddress; +import java.util.ArrayList; +import java.util.List; + +public class MlMemoryActionResponseTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return MlMemoryAction.Response::new; + } + + @Override + protected MlMemoryAction.Response createTestInstance() { + int numNodes = randomIntBetween(1, 20); + List nodes = new ArrayList<>(numNodes); + for (int i = 0; i < numNodes; ++i) { + DiscoveryNode node = new DiscoveryNode( + randomAlphaOfLength(20), + new TransportAddress(InetAddress.getLoopbackAddress(), 9200 + i), + Version.CURRENT + ); + nodes.add(MlMemoryStatsTests.createTestInstance(node)); + } + int numFailures = randomIntBetween(0, 5); + List failures = (numFailures > 0) ? new ArrayList<>(numFailures) : List.of(); + for (int i = 0; i < numFailures; ++i) { + failures.add( + new FailedNodeException( + randomAlphaOfLength(20), + randomAlphaOfLength(50), + new ElasticsearchException(randomAlphaOfLength(30)) + ) + ); + } + return new MlMemoryAction.Response(ClusterName.DEFAULT, nodes, failures); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryStatsTests.java new file mode 100644 index 0000000000000..d5093e7f4adcc --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryStatsTests.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.ml.action.MlMemoryAction.Response.MlMemoryStats; + +import java.net.InetAddress; + +public class MlMemoryStatsTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return MlMemoryStats::new; + } + + @Override + protected MlMemoryStats createTestInstance() { + DiscoveryNode node = new DiscoveryNode( + randomAlphaOfLength(20), + new TransportAddress(InetAddress.getLoopbackAddress(), randomIntBetween(1024, 65535)), + Version.CURRENT + ); + return createTestInstance(node); + } + + static MlMemoryStats createTestInstance(DiscoveryNode node) { + return new MlMemoryStats( + node, + ByteSizeValue.ofGb(randomLongBetween(1, 64)), + ByteSizeValue.ofGb(randomLongBetween(1, 64)), + ByteSizeValue.ofGb(randomLongBetween(0, 48)), + ByteSizeValue.ofMb(randomLongBetween(0, 20000)), + ByteSizeValue.ofMb(randomLongBetween(0, 20000)), + ByteSizeValue.ofMb(randomLongBetween(0, 20000)), + ByteSizeValue.ofKb(randomLongBetween(0, 30000)), + ByteSizeValue.ofGb(randomLongBetween(0, 32)), + ByteSizeValue.ofGb(randomLongBetween(0, 16)), + ByteSizeValue.ofMb(randomLongBetween(0, 10000)) + ); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoRequestTests.java new file mode 100644 index 0000000000000..e879dc62a4c6a --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoRequestTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.net.InetAddress; + +public class TrainedModelCacheInfoRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return TrainedModelCacheInfoAction.Request::new; + } + + @Override + protected TrainedModelCacheInfoAction.Request createTestInstance() { + int numNodes = randomIntBetween(1, 20); + DiscoveryNode[] nodes = new DiscoveryNode[numNodes]; + for (int i = 0; i < numNodes; ++i) { + nodes[i] = new DiscoveryNode( + randomAlphaOfLength(20), + new TransportAddress(InetAddress.getLoopbackAddress(), 9200 + i), + Version.CURRENT + ); + } + return new TrainedModelCacheInfoAction.Request(nodes); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoResponseTests.java new file mode 100644 index 0000000000000..2964a47e9c3f0 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoResponseTests.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.ml.action.TrainedModelCacheInfoAction.Response.CacheInfo; + +import java.net.InetAddress; +import java.util.ArrayList; +import java.util.List; + +public class TrainedModelCacheInfoResponseTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return TrainedModelCacheInfoAction.Response::new; + } + + @Override + protected TrainedModelCacheInfoAction.Response createTestInstance() { + int numNodes = randomIntBetween(1, 20); + List nodes = new ArrayList<>(numNodes); + for (int i = 0; i < numNodes; ++i) { + DiscoveryNode node = new DiscoveryNode( + randomAlphaOfLength(20), + new TransportAddress(InetAddress.getLoopbackAddress(), 9200 + i), + Version.CURRENT + ); + nodes.add(CacheInfoTests.createTestInstance(node)); + } + int numFailures = randomIntBetween(0, 5); + List failures = (numFailures > 0) ? new ArrayList<>(numFailures) : List.of(); + for (int i = 0; i < numFailures; ++i) { + failures.add( + new FailedNodeException( + randomAlphaOfLength(20), + randomAlphaOfLength(50), + new ElasticsearchException(randomAlphaOfLength(30)) + ) + ); + } + return new TrainedModelCacheInfoAction.Response(ClusterName.DEFAULT, nodes, failures); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java index 8c820737fda9c..ecb67833eacd6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java @@ -10,6 +10,8 @@ import org.elasticsearch.Version; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -47,6 +49,7 @@ import org.elasticsearch.xpack.core.ml.job.config.JobTests; import org.elasticsearch.xpack.core.ml.utils.QueryProvider; import org.elasticsearch.xpack.core.ml.utils.XContentObjectTransformer; +import org.junit.Before; import java.io.IOException; import java.time.ZoneOffset; @@ -63,9 +66,21 @@ import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class DatafeedUpdateTests extends AbstractSerializingTestCase { + private ClusterState clusterState; + + @Before + public void init() { + clusterState = mock(ClusterState.class); + final DiscoveryNodes discoveryNodes = mock(DiscoveryNodes.class); + when(clusterState.nodes()).thenReturn(discoveryNodes); + when(discoveryNodes.getMinNodeVersion()).thenReturn(Version.CURRENT); + } + @Override protected DatafeedUpdate createTestInstance() { return createRandomized(DatafeedConfigTests.randomValidDatafeedId()); @@ -210,20 +225,20 @@ public void testMultipleDefinedAggParse() throws IOException { public void testApply_failBecauseTargetDatafeedHasDifferentId() { DatafeedConfig datafeed = DatafeedConfigTests.createRandomizedDatafeedConfig("foo"); - expectThrows(IllegalArgumentException.class, () -> createRandomized(datafeed.getId() + "_2").apply(datafeed, null)); + expectThrows(IllegalArgumentException.class, () -> createRandomized(datafeed.getId() + "_2").apply(datafeed, null, clusterState)); } public void testApply_failBecauseJobIdChanged() { DatafeedConfig datafeed = DatafeedConfigTests.createRandomizedDatafeedConfig("foo"); DatafeedUpdate datafeedUpdateWithUnchangedJobId = new DatafeedUpdate.Builder(datafeed.getId()).setJobId("foo").build(); - DatafeedConfig updatedDatafeed = datafeedUpdateWithUnchangedJobId.apply(datafeed, Collections.emptyMap()); + DatafeedConfig updatedDatafeed = datafeedUpdateWithUnchangedJobId.apply(datafeed, Collections.emptyMap(), clusterState); assertThat(updatedDatafeed, equalTo(datafeed)); DatafeedUpdate datafeedUpdateWithChangedJobId = new DatafeedUpdate.Builder(datafeed.getId()).setJobId("bar").build(); ElasticsearchStatusException ex = expectThrows( ElasticsearchStatusException.class, - () -> datafeedUpdateWithChangedJobId.apply(datafeed, Collections.emptyMap()) + () -> datafeedUpdateWithChangedJobId.apply(datafeed, Collections.emptyMap(), clusterState) ); assertThat(ex.status(), equalTo(RestStatus.BAD_REQUEST)); assertThat(ex.getMessage(), equalTo(DatafeedUpdate.ERROR_MESSAGE_ON_JOB_ID_UPDATE)); @@ -231,7 +246,8 @@ public void testApply_failBecauseJobIdChanged() { public void testApply_givenEmptyUpdate() { DatafeedConfig datafeed = DatafeedConfigTests.createRandomizedDatafeedConfig("foo"); - DatafeedConfig updatedDatafeed = new DatafeedUpdate.Builder(datafeed.getId()).build().apply(datafeed, Collections.emptyMap()); + DatafeedConfig updatedDatafeed = new DatafeedUpdate.Builder(datafeed.getId()).build() + .apply(datafeed, Collections.emptyMap(), clusterState); assertThat(datafeed, equalTo(updatedDatafeed)); } @@ -242,7 +258,7 @@ public void testApply_givenPartialUpdate() { DatafeedUpdate.Builder updated = new DatafeedUpdate.Builder(datafeed.getId()); updated.setScrollSize(datafeed.getScrollSize() + 1); - DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap()); + DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap(), clusterState); DatafeedConfig.Builder expectedDatafeed = new DatafeedConfig.Builder(datafeed); expectedDatafeed.setScrollSize(datafeed.getScrollSize() + 1); @@ -270,7 +286,7 @@ public void testApply_givenFullUpdateNoAggregations() { field.put("updated_runtime_field_foo", settings); update.setRuntimeMappings(field); - DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap()); + DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap(), clusterState); assertThat(updatedDatafeed.getJobId(), equalTo("foo-feed")); assertThat(updatedDatafeed.getIndices(), equalTo(Collections.singletonList("i_2"))); @@ -303,7 +319,7 @@ public void testApply_givenAggregations() throws IOException { ); update.setAggregations(aggProvider); - DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap()); + DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap(), clusterState); assertThat(updatedDatafeed.getIndices(), equalTo(Collections.singletonList("i_1"))); assertThat(updatedDatafeed.getParsedAggregations(xContentRegistry()), equalTo(aggProvider.getParsedAggs())); @@ -314,7 +330,7 @@ public void testApply_givenIndicesOptions() { DatafeedConfig datafeed = DatafeedConfigTests.createRandomizedDatafeedConfig("foo"); DatafeedConfig updatedDatafeed = new DatafeedUpdate.Builder(datafeed.getId()).setIndicesOptions( IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN - ).build().apply(datafeed, Collections.emptyMap()); + ).build().apply(datafeed, Collections.emptyMap(), clusterState); assertThat(datafeed.getIndicesOptions(), is(not(equalTo(updatedDatafeed.getIndicesOptions())))); assertThat(updatedDatafeed.getIndicesOptions(), equalTo(IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN)); } @@ -332,7 +348,7 @@ public void testApply_GivenRandomUpdates_AssertImmutability() { update = createRandomized(datafeed.getId(), datafeed); } - DatafeedConfig updatedDatafeed = update.apply(datafeed, Collections.emptyMap()); + DatafeedConfig updatedDatafeed = update.apply(datafeed, Collections.emptyMap(), clusterState); assertThat("update was " + update, datafeed, not(equalTo(updatedDatafeed))); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyCauseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyCauseTests.java index 41b4be1276783..a99945d2361d6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyCauseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyCauseTests.java @@ -6,11 +6,11 @@ */ package org.elasticsearch.xpack.core.ml.job.results; -import org.elasticsearch.client.ml.job.config.DetectorFunction; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.core.ml.job.config.DetectorFunction; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java index 44d8898c2cd34..8ee11741325e8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.ml.job.results; -import org.elasticsearch.client.ml.job.config.DetectorFunction; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentHelper; @@ -16,6 +15,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.ml.MachineLearningField; +import org.elasticsearch.xpack.core.ml.job.config.DetectorFunction; import org.elasticsearch.xpack.core.ml.utils.MlStrings; import java.io.IOException; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/NamedXContentObjectHelperTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/NamedXContentObjectHelperTests.java index 33e692bd3c723..d40f39fb5efe3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/NamedXContentObjectHelperTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/NamedXContentObjectHelperTests.java @@ -6,8 +6,6 @@ */ package org.elasticsearch.xpack.core.ml.utils; -import org.elasticsearch.client.ml.inference.NamedXContentObject; -import org.elasticsearch.client.ml.inference.NamedXContentObjectHelper; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.SearchModule; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/XContentObjectTransformerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/XContentObjectTransformerTests.java index 38a1ab4f45c79..ae043a6e73de4 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/XContentObjectTransformerTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/XContentObjectTransformerTests.java @@ -59,23 +59,7 @@ public void testFromMap() throws IOException { assertXContentAreEqual(aggTransformer.fromMap(aggMap), aggMap); assertXContentAreEqual(aggTransformer.fromMap(aggMap), aggTransformer.toMap(aggTransformer.fromMap(aggMap))); - Map queryMap = Collections.singletonMap( - "match", - Collections.singletonMap("fieldName", new HashMap() { - { - // Add all the default fields so they are not added dynamically when the object is parsed - put("query", "fieldValue"); - put("operator", "OR"); - put("prefix_length", 0); - put("max_expansions", 50); - put("fuzzy_transpositions", true); - put("lenient", false); - put("zero_terms_query", "NONE"); - put("auto_generate_synonyms_phrase_query", true); - put("boost", 1.0); - } - }) - ); + Map queryMap = Map.of("match", Map.of("fieldName", Map.of("query", "fieldValue"))); XContentObjectTransformer queryBuilderTransformer = XContentObjectTransformer.queryBuilderTransformer( xContentRegistry() diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java index 00d5f3d9aa69f..9ed8c2e2e99ca 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.core.XPackClientPlugin; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; import java.io.IOException; @@ -183,10 +184,34 @@ private PutRoleRequest buildRandomRequest() { .build(); } request.addApplicationPrivileges(applicationPrivileges); - - if (randomBoolean()) { - final String[] appNames = randomArray(1, 4, String[]::new, stringWithInitialLowercase); - request.conditionalCluster(new ConfigurableClusterPrivileges.ManageApplicationPrivileges(Sets.newHashSet(appNames))); + switch (randomIntBetween(0, 3)) { + case 0: + request.conditionalCluster(new ConfigurableClusterPrivilege[0]); + break; + case 1: + request.conditionalCluster( + new ConfigurableClusterPrivileges.ManageApplicationPrivileges( + Sets.newHashSet(randomArray(0, 3, String[]::new, stringWithInitialLowercase)) + ) + ); + break; + case 2: + request.conditionalCluster( + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( + Sets.newHashSet(randomArray(0, 3, String[]::new, stringWithInitialLowercase)) + ) + ); + break; + case 3: + request.conditionalCluster( + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( + Sets.newHashSet(randomArray(0, 3, String[]::new, stringWithInitialLowercase)) + ), + new ConfigurableClusterPrivileges.ManageApplicationPrivileges( + Sets.newHashSet(randomArray(0, 3, String[]::new, stringWithInitialLowercase)) + ) + ); + break; } request.runAs(generateRandomStringArray(4, 3, false, true)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilegesTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilegesTests.java index 034a035f12cbf..9b7443080639d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilegesTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilegesTests.java @@ -62,10 +62,14 @@ public void testGenerateAndParseXContent() throws Exception { } private ConfigurableClusterPrivilege[] buildSecurityPrivileges() { - return buildSecurityPrivileges(randomIntBetween(4, 7)); - } - - private ConfigurableClusterPrivilege[] buildSecurityPrivileges(int applicationNameLength) { - return new ConfigurableClusterPrivilege[] { ManageApplicationPrivilegesTests.buildPrivileges(applicationNameLength) }; + return switch (randomIntBetween(0, 3)) { + case 0 -> new ConfigurableClusterPrivilege[0]; + case 1 -> new ConfigurableClusterPrivilege[] { ManageApplicationPrivilegesTests.buildPrivileges() }; + case 2 -> new ConfigurableClusterPrivilege[] { WriteProfileDataPrivilegesTests.buildPrivileges() }; + case 3 -> new ConfigurableClusterPrivilege[] { + ManageApplicationPrivilegesTests.buildPrivileges(), + WriteProfileDataPrivilegesTests.buildPrivileges() }; + default -> throw new IllegalStateException("Unexpected value"); + }; } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java index e396460e88f79..421b76d089c40 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java @@ -149,7 +149,7 @@ private ManageApplicationPrivileges clone(ManageApplicationPrivileges original) return new ManageApplicationPrivileges(new LinkedHashSet<>(original.getApplicationNames())); } - private ManageApplicationPrivileges buildPrivileges() { + static ManageApplicationPrivileges buildPrivileges() { return buildPrivileges(randomIntBetween(4, 7)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/WriteProfileDataPrivilegesTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/WriteProfileDataPrivilegesTests.java new file mode 100644 index 0000000000000..8c045e001f2b2 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/WriteProfileDataPrivilegesTests.java @@ -0,0 +1,270 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.authz.privilege; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.EqualsHashCodeTestUtils; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParseException; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.XPackClientPlugin; +import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileAction; +import org.elasticsearch.xpack.core.security.action.profile.GetProfileAction; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesAction; +import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataAction; +import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataRequest; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authz.permission.ClusterPermission; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xcontent.DeprecationHandler.THROW_UNSUPPORTED_OPERATION; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.mockito.Mockito.mock; + +public class WriteProfileDataPrivilegesTests extends ESTestCase { + + public void testSerialization() throws Exception { + final ConfigurableClusterPrivileges.WriteProfileDataPrivileges original = buildPrivileges(); + try (BytesStreamOutput out = new BytesStreamOutput()) { + original.writeTo(out); + final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables()); + try (StreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry)) { + final ConfigurableClusterPrivileges.WriteProfileDataPrivileges copy = + ConfigurableClusterPrivileges.WriteProfileDataPrivileges.createFrom(in); + assertThat(copy, equalTo(original)); + assertThat(original, equalTo(copy)); + } + } + } + + public void testGenerateAndParseXContent() throws Exception { + final XContent xContent = randomFrom(XContentType.values()).xContent(); + try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { + final XContentBuilder builder = new XContentBuilder(xContent, out); + + final ConfigurableClusterPrivileges.WriteProfileDataPrivileges original = buildPrivileges(); + builder.startObject(); + original.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + builder.flush(); + + final byte[] bytes = out.toByteArray(); + try (XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY, THROW_UNSUPPORTED_OPERATION, bytes)) { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + final ConfigurableClusterPrivileges.WriteProfileDataPrivileges clone = + ConfigurableClusterPrivileges.WriteProfileDataPrivileges.parse(parser); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); + + assertThat(clone, equalTo(original)); + assertThat(original, equalTo(clone)); + } + } + } + + public void testActionAndRequestPredicate() { + final String prefix = randomAlphaOfLengthBetween(0, 3); + final String name = randomAlphaOfLengthBetween(0, 5); + String other = randomAlphaOfLengthBetween(0, 7); + if (other.startsWith(prefix) || other.equals(name)) { + other = null; + } + final ConfigurableClusterPrivileges.WriteProfileDataPrivileges writeProfileDataPrivileges = + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges(Sets.newHashSet(prefix + "*", name)); + final ClusterPermission writeProfileDataPermission = writeProfileDataPrivileges.buildPermission(ClusterPermission.builder()) + .build(); + assertThat(writeProfileDataPermission, notNullValue()); + + final Authentication authentication = mock(Authentication.class); + // request application name matches privilege wildcard + UpdateProfileDataRequest updateProfileDataRequest = randomBoolean() + ? newUpdateProfileDataRequest(Set.of(prefix + randomAlphaOfLengthBetween(0, 2)), Set.of()) + : newUpdateProfileDataRequest(Set.of(), Set.of(prefix + randomAlphaOfLengthBetween(0, 2))); + assertTrue( + writeProfileDataPermission.check("cluster:admin/xpack/security/profile/put/data", updateProfileDataRequest, authentication) + ); + // request application name matches privilege name + updateProfileDataRequest = randomBoolean() + ? newUpdateProfileDataRequest(Set.of(name), Set.of()) + : newUpdateProfileDataRequest(Set.of(), Set.of(name)); + assertTrue( + writeProfileDataPermission.check("cluster:admin/xpack/security/profile/put/data", updateProfileDataRequest, authentication) + ); + // different action name + assertFalse( + writeProfileDataPermission.check( + randomFrom(ActivateProfileAction.NAME, GetProfileAction.NAME, SearchProfilesAction.NAME), + updateProfileDataRequest, + authentication + ) + ); + if (other != null) { + updateProfileDataRequest = randomBoolean() + ? newUpdateProfileDataRequest( + randomBoolean() ? Set.of(prefix + randomAlphaOfLengthBetween(0, 2), other) : Set.of(other), + Set.of() + ) + : newUpdateProfileDataRequest( + Set.of(), + randomBoolean() ? Set.of(prefix + randomAlphaOfLengthBetween(0, 2), other) : Set.of(other) + ); + assertFalse(writeProfileDataPermission.check(UpdateProfileDataAction.NAME, updateProfileDataRequest, authentication)); + updateProfileDataRequest = randomBoolean() + ? newUpdateProfileDataRequest(randomBoolean() ? Set.of(name, other) : Set.of(other), Set.of()) + : newUpdateProfileDataRequest(Set.of(), randomBoolean() ? Set.of(name, other) : Set.of(other)); + assertFalse(writeProfileDataPermission.check(UpdateProfileDataAction.NAME, updateProfileDataRequest, authentication)); + } + assertFalse(writeProfileDataPermission.check(UpdateProfileDataAction.NAME, mock(TransportRequest.class), authentication)); + } + + public void testParseAbnormals() throws Exception { + final String nullApplications = "{\"write\":{\"applications\":null}}"; + try ( + XContentParser parser = XContentType.JSON.xContent() + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new ByteArrayInputStream(nullApplications.getBytes(StandardCharsets.UTF_8)) + ) + ) { + parser.nextToken(); // { + parser.nextToken(); // "write" field + expectThrows(XContentParseException.class, () -> ConfigurableClusterPrivileges.WriteProfileDataPrivileges.parse(parser)); + parser.nextToken(); + } + final String emptyApplications = "{\"write\":{\"applications\":[]}}"; + try ( + XContentParser parser = XContentType.JSON.xContent() + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new ByteArrayInputStream(emptyApplications.getBytes(StandardCharsets.UTF_8)) + ) + ) { + parser.nextToken(); // { + parser.nextToken(); // "write" field + ConfigurableClusterPrivileges.WriteProfileDataPrivileges priv = ConfigurableClusterPrivileges.WriteProfileDataPrivileges.parse( + parser + ); + parser.nextToken(); + assertThat(priv.getApplicationNames().size(), is(0)); + UpdateProfileDataRequest updateProfileDataRequest = randomBoolean() + ? newUpdateProfileDataRequest(Set.of(randomAlphaOfLengthBetween(0, 2)), Set.of()) + : newUpdateProfileDataRequest(Set.of(), Set.of(randomAlphaOfLengthBetween(0, 2))); + ClusterPermission perm = priv.buildPermission(ClusterPermission.builder()).build(); + assertFalse(perm.check(UpdateProfileDataAction.NAME, updateProfileDataRequest, mock(Authentication.class))); + } + final String aNullApplication = "{\"write\":{\"applications\":[null]}}"; + try ( + XContentParser parser = XContentType.JSON.xContent() + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new ByteArrayInputStream(aNullApplication.getBytes(StandardCharsets.UTF_8)) + ) + ) { + parser.nextToken(); // { + parser.nextToken(); // "write" field + expectThrows(ElasticsearchParseException.class, () -> ConfigurableClusterPrivileges.WriteProfileDataPrivileges.parse(parser)); + parser.nextToken(); + } + final String anEmptyApplication = "{\"write\":{\"applications\":[\"\"]}}"; + try ( + XContentParser parser = XContentType.JSON.xContent() + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new ByteArrayInputStream(anEmptyApplication.getBytes(StandardCharsets.UTF_8)) + ) + ) { + parser.nextToken(); // { + parser.nextToken(); // "write" field + ConfigurableClusterPrivileges.WriteProfileDataPrivileges priv = ConfigurableClusterPrivileges.WriteProfileDataPrivileges.parse( + parser + ); + parser.nextToken(); + assertThat(priv.getApplicationNames().size(), is(1)); + assertThat(priv.getApplicationNames().stream().findFirst().get(), is("")); + UpdateProfileDataRequest updateProfileDataRequest = randomBoolean() + ? newUpdateProfileDataRequest(Set.of(randomAlphaOfLengthBetween(1, 2)), Set.of()) + : newUpdateProfileDataRequest(Set.of(), Set.of(randomAlphaOfLengthBetween(1, 2))); + ClusterPermission perm = priv.buildPermission(ClusterPermission.builder()).build(); + assertFalse(perm.check(UpdateProfileDataAction.NAME, updateProfileDataRequest, mock(Authentication.class))); + updateProfileDataRequest = randomBoolean() + ? newUpdateProfileDataRequest(Set.of(""), Set.of()) + : newUpdateProfileDataRequest(Set.of(), Set.of("")); + perm = priv.buildPermission(ClusterPermission.builder()).build(); + assertTrue(perm.check("cluster:admin/xpack/security/profile/put/data", updateProfileDataRequest, mock(Authentication.class))); + } + } + + public void testEqualsAndHashCode() { + final int applicationNameLength = randomIntBetween(4, 7); + final ConfigurableClusterPrivileges.WriteProfileDataPrivileges privileges = buildPrivileges(applicationNameLength); + final EqualsHashCodeTestUtils.MutateFunction mutate = + orig -> buildPrivileges(applicationNameLength + randomIntBetween(1, 3)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(privileges, this::clone, mutate); + } + + private UpdateProfileDataRequest newUpdateProfileDataRequest(Set accessNames, Set dataNames) { + Map access = new HashMap<>(); + for (String accessName : accessNames) { + access.put(accessName, mock(Object.class)); + } + Map data = new HashMap<>(); + for (String dataName : dataNames) { + data.put(dataName, mock(Object.class)); + } + return new UpdateProfileDataRequest( + randomAlphaOfLengthBetween(4, 8), + access, + data, + randomLong(), + randomLong(), + randomFrom(WriteRequest.RefreshPolicy.values()) + ); + } + + private ConfigurableClusterPrivileges.WriteProfileDataPrivileges clone( + ConfigurableClusterPrivileges.WriteProfileDataPrivileges original + ) { + return new ConfigurableClusterPrivileges.WriteProfileDataPrivileges(new LinkedHashSet<>(original.getApplicationNames())); + } + + static ConfigurableClusterPrivileges.WriteProfileDataPrivileges buildPrivileges() { + return buildPrivileges(randomIntBetween(4, 7)); + } + + static ConfigurableClusterPrivileges.WriteProfileDataPrivileges buildPrivileges(int applicationNameLength) { + Set applicationNames = Sets.newHashSet(Arrays.asList(generateRandomStringArray(5, applicationNameLength, false, false))); + return new ConfigurableClusterPrivileges.WriteProfileDataPrivileges(applicationNames); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 4b166e15f8b56..b4aef65885f00 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -55,6 +55,7 @@ import org.elasticsearch.action.main.MainAction; import org.elasticsearch.action.search.MultiSearchAction; import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.IndexAbstraction; @@ -159,8 +160,12 @@ import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesAction; import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesRequest; import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileAction; +import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileRequest; import org.elasticsearch.xpack.core.security.action.profile.GetProfileAction; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesAction; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesRequest; import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataAction; +import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataRequest; import org.elasticsearch.xpack.core.security.action.role.PutRoleAction; import org.elasticsearch.xpack.core.security.action.saml.SamlAuthenticateAction; import org.elasticsearch.xpack.core.security.action.saml.SamlPrepareAuthenticationAction; @@ -209,6 +214,7 @@ import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.SortedMap; import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.RESTRICTED_INDICES_AUTOMATON; @@ -451,7 +457,79 @@ public void testKibanaSystemRole() { // User profile assertThat(kibanaRole.cluster().check(GetProfileAction.NAME, request, authentication), is(true)); assertThat(kibanaRole.cluster().check(ActivateProfileAction.NAME, request, authentication), is(true)); - assertThat(kibanaRole.cluster().check(UpdateProfileDataAction.NAME, request, authentication), is(true)); + UpdateProfileDataRequest updateProfileDataRequest = randomBoolean() + ? new UpdateProfileDataRequest( + randomAlphaOfLength(10), + Map.of("kibana-" + randomAlphaOfLengthBetween(0, 4), mock(Object.class)), + Map.of(), + randomFrom(-1L, randomLong()), + randomFrom(-1L, randomLong()), + randomFrom(WriteRequest.RefreshPolicy.values()) + ) + : new UpdateProfileDataRequest( + randomAlphaOfLength(10), + Map.of(), + Map.of("kibana-" + randomAlphaOfLengthBetween(0, 4), mock(Object.class)), + randomFrom(-1L, randomLong()), + randomFrom(-1L, randomLong()), + randomFrom(WriteRequest.RefreshPolicy.values()) + ); + assertThat(kibanaRole.cluster().check(UpdateProfileDataAction.NAME, updateProfileDataRequest, authentication), is(true)); + updateProfileDataRequest = new UpdateProfileDataRequest( + randomAlphaOfLength(10), + Map.of("kibana-" + randomAlphaOfLengthBetween(0, 4), mock(Object.class)), + Map.of("kibana-" + randomAlphaOfLengthBetween(0, 4), mock(Object.class)), + randomFrom(-1L, randomLong()), + randomFrom(-1L, randomLong()), + randomFrom(WriteRequest.RefreshPolicy.values()) + ); + assertThat(kibanaRole.cluster().check(UpdateProfileDataAction.NAME, updateProfileDataRequest, authentication), is(true)); + updateProfileDataRequest = randomBoolean() + ? new UpdateProfileDataRequest( + randomAlphaOfLength(10), + Map.of(randomAlphaOfLengthBetween(0, 6), mock(Object.class)), + Map.of(), + randomFrom(-1L, randomLong()), + randomFrom(-1L, randomLong()), + randomFrom(WriteRequest.RefreshPolicy.values()) + ) + : new UpdateProfileDataRequest( + randomAlphaOfLength(10), + Map.of(), + Map.of(randomAlphaOfLengthBetween(0, 6), mock(Object.class)), + randomFrom(-1L, randomLong()), + randomFrom(-1L, randomLong()), + randomFrom(WriteRequest.RefreshPolicy.values()) + ); + assertThat(kibanaRole.cluster().check(UpdateProfileDataAction.NAME, updateProfileDataRequest, authentication), is(false)); + updateProfileDataRequest = randomBoolean() + ? new UpdateProfileDataRequest( + randomAlphaOfLength(10), + Map.of( + "kibana-" + randomAlphaOfLengthBetween(0, 4), + mock(Object.class), + randomAlphaOfLengthBetween(0, 6), + mock(Object.class) + ), + Map.of("kibana-" + randomAlphaOfLengthBetween(0, 4), mock(Object.class)), + randomFrom(-1L, randomLong()), + randomFrom(-1L, randomLong()), + randomFrom(WriteRequest.RefreshPolicy.values()) + ) + : new UpdateProfileDataRequest( + randomAlphaOfLength(10), + Map.of("kibana-" + randomAlphaOfLengthBetween(0, 4), mock(Object.class)), + Map.of( + "kibana-" + randomAlphaOfLengthBetween(0, 4), + mock(Object.class), + randomAlphaOfLengthBetween(0, 6), + mock(Object.class) + ), + randomFrom(-1L, randomLong()), + randomFrom(-1L, randomLong()), + randomFrom(WriteRequest.RefreshPolicy.values()) + ); + assertThat(kibanaRole.cluster().check(UpdateProfileDataAction.NAME, updateProfileDataRequest, authentication), is(false)); // Everything else assertThat(kibanaRole.runAs().check(randomAlphaOfLengthBetween(1, 12)), is(false)); @@ -1535,6 +1613,13 @@ public void testSuperuserRole() { assertThat(superuserRole.cluster().check(PutIndexTemplateAction.NAME, request, authentication), is(true)); assertThat(superuserRole.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(true)); assertThat(superuserRole.cluster().check("internal:admin/foo", request, authentication), is(false)); + assertThat( + superuserRole.cluster().check(UpdateProfileDataAction.NAME, mock(UpdateProfileDataRequest.class), authentication), + is(true) + ); + assertThat(superuserRole.cluster().check(GetProfileAction.NAME, mock(UpdateProfileDataRequest.class), authentication), is(true)); + assertThat(superuserRole.cluster().check(SearchProfilesAction.NAME, mock(SearchProfilesRequest.class), authentication), is(true)); + assertThat(superuserRole.cluster().check(ActivateProfileAction.NAME, mock(ActivateProfileRequest.class), authentication), is(true)); final Settings indexSettings = Settings.builder().put("index.version.created", Version.CURRENT).build(); final String internalSecurityIndex = randomFrom( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointActionRequestTests.java new file mode 100644 index 0000000000000..48704068e3d8f --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointActionRequestTests.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.transform.action; + +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction.Request; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Locale; + +public class GetCheckpointActionRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected Request createTestInstance() { + return new Request( + randomBoolean() ? null : generateRandomStringArray(10, 10, false, false), + IndicesOptions.fromParameters( + randomFrom(IndicesOptions.WildcardStates.values()).name().toLowerCase(Locale.ROOT), + Boolean.toString(randomBoolean()), + Boolean.toString(randomBoolean()), + Boolean.toString(randomBoolean()), + SearchRequest.DEFAULT_INDICES_OPTIONS + ) + ); + } + + @Override + protected Reader instanceReader() { + return Request::new; + } + + @Override + protected Request mutateInstance(Request instance) throws IOException { + List indices = instance.indices() != null ? new ArrayList<>(Arrays.asList(instance.indices())) : new ArrayList<>(); + IndicesOptions indicesOptions = instance.indicesOptions(); + + switch (between(0, 1)) { + case 0: + indices.add(randomAlphaOfLengthBetween(1, 20)); + break; + case 1: + indicesOptions = IndicesOptions.fromParameters( + randomFrom(IndicesOptions.WildcardStates.values()).name().toLowerCase(Locale.ROOT), + Boolean.toString(instance.indicesOptions().ignoreUnavailable() == false), + Boolean.toString(instance.indicesOptions().allowNoIndices() == false), + Boolean.toString(instance.indicesOptions().ignoreThrottled() == false), + SearchRequest.DEFAULT_INDICES_OPTIONS + ); + break; + default: + throw new AssertionError("Illegal randomization branch"); + } + + return new Request(indices.toArray(new String[0]), indicesOptions); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointActionResponseTests.java new file mode 100644 index 0000000000000..fdb7a59a4792e --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointActionResponseTests.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.transform.action; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction.Response; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + +public class GetCheckpointActionResponseTests extends AbstractWireSerializingTestCase { + + public static Response randomCheckpointResponse() { + Map checkpointsByIndex = new TreeMap<>(); + int indices = randomIntBetween(1, 10); + for (int i = 0; i < indices; ++i) { + List checkpoints = new ArrayList<>(); + int shards = randomIntBetween(1, 20); + for (int j = 0; j < shards; ++j) { + checkpoints.add(randomLongBetween(0, 1_000_000)); + } + checkpointsByIndex.put(randomAlphaOfLengthBetween(1, 10), checkpoints.stream().mapToLong(l -> l).toArray()); + } + return new Response(checkpointsByIndex); + } + + @Override + protected Reader instanceReader() { + return Response::new; + } + + @Override + protected Response createTestInstance() { + return randomCheckpointResponse(); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeActionRequestTests.java new file mode 100644 index 0000000000000..fd3573f1acae8 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeActionRequestTests.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.transform.action; + +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction.Request; + +import java.io.IOException; +import java.util.HashSet; +import java.util.Set; + +public class GetCheckpointNodeActionRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected Reader instanceReader() { + return Request::new; + } + + @Override + protected Request createTestInstance() { + Set shards = new HashSet<>(); + OriginalIndices originalIndices = randomOriginalIndices(randomIntBetween(0, 20)); + int numberOfRandomShardIds = randomInt(10); + + for (int i = 0; i < numberOfRandomShardIds; ++i) { + shards.add(new ShardId(randomAlphaOfLength(4) + i, randomAlphaOfLength(4), randomInt(5))); + } + + return new Request(shards, originalIndices); + } + + @Override + protected Request mutateInstance(Request instance) throws IOException { + + switch (random().nextInt(1)) { + case 0 -> { + Set shards = new HashSet<>(instance.getShards()); + if (randomBoolean() && shards.size() > 0) { + ShardId firstShard = shards.iterator().next(); + shards.remove(firstShard); + if (randomBoolean()) { + shards.add(new ShardId(randomAlphaOfLength(8), randomAlphaOfLength(4), randomInt(5))); + } + } else { + shards.add(new ShardId(randomAlphaOfLength(8), randomAlphaOfLength(4), randomInt(5))); + } + return new Request(shards, instance.getOriginalIndices()); + } + case 1 -> { + OriginalIndices originalIndices = randomOriginalIndices(instance.indices().length + 1); + return new Request(instance.getShards(), originalIndices); + } + default -> throw new IllegalStateException("The test should only allow 1 parameters mutated"); + } + } + + private OriginalIndices randomOriginalIndices(int numIndices) { + String[] randomIndices = new String[numIndices]; + for (int i = 0; i < numIndices; i++) { + randomIndices[i] = randomAlphaOfLengthBetween(5, 10); + } + IndicesOptions indicesOptions = randomBoolean() ? IndicesOptions.strictExpand() : IndicesOptions.lenientExpandOpen(); + return new OriginalIndices(randomIndices, indicesOptions); + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeActionResponseTests.java new file mode 100644 index 0000000000000..f189a4f0faae2 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeActionResponseTests.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.transform.action; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction.Response; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class GetCheckpointNodeActionResponseTests extends AbstractWireSerializingTestCase { + + public static Response randomNodeCheckpointResponse() { + Map checkpointsByIndex = new HashMap<>(); + int indices = randomIntBetween(1, 10); + for (int i = 0; i < indices; ++i) { + List checkpoints = new ArrayList<>(); + int shards = randomIntBetween(1, 20); + for (int j = 0; j < shards; ++j) { + checkpoints.add(randomLongBetween(0, 1_000_000)); + } + checkpointsByIndex.put(randomAlphaOfLengthBetween(1, 10), checkpoints.stream().mapToLong(l -> l).toArray()); + } + return new Response(checkpointsByIndex); + } + + @Override + protected Reader instanceReader() { + return Response::new; + } + + @Override + protected Response createTestInstance() { + return randomNodeCheckpointResponse(); + } +} diff --git a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java index 3aada49d33ac5..2e58bb8f673b5 100644 --- a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java +++ b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java @@ -7,28 +7,17 @@ package org.elasticsearch.xpack.deprecation; -import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.WarningsHandler; -import org.elasticsearch.client.ml.PutJobRequest; -import org.elasticsearch.client.ml.job.config.AnalysisConfig; -import org.elasticsearch.client.ml.job.config.DataDescription; -import org.elasticsearch.client.ml.job.config.Detector; -import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.XContentType; import org.junit.After; import java.io.IOException; -import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; @@ -44,12 +33,6 @@ public class MlDeprecationIT extends ESRestTestCase { .setWarningsHandler(WarningsHandler.PERMISSIVE) .build(); - private static class HLRC extends RestHighLevelClient { - HLRC(RestClient restClient) { - super(restClient, RestClient::close, new ArrayList<>()); - } - } - @After public void resetFeatures() throws IOException { Response response = adminClient().performRequest(new Request("POST", "/_features/_reset")); @@ -69,32 +52,21 @@ protected boolean enableWarningsCheck() { @SuppressWarnings("unchecked") public void testMlDeprecationChecks() throws Exception { - HLRC hlrc = new HLRC(client()); String jobId = "deprecation_check_job"; - hlrc.machineLearning() - .putJob( - new PutJobRequest( - Job.builder(jobId) - .setAnalysisConfig( - AnalysisConfig.builder(Collections.singletonList(Detector.builder().setFunction("count").build())) - ) - .setDataDescription(new DataDescription.Builder().setTimeField("time")) - .build() - ), - REQUEST_OPTIONS - ); - - IndexRequest indexRequest = new IndexRequest(".ml-anomalies-.write-" + jobId).id(jobId + "_model_snapshot_1") - .source("{\"job_id\":\"deprecation_check_job\",\"snapshot_id\":\"1\", \"snapshot_doc_count\":1}", XContentType.JSON); - hlrc.index(indexRequest, REQUEST_OPTIONS); - - indexRequest = new IndexRequest(".ml-anomalies-.write-" + jobId).id(jobId + "_model_snapshot_2") - .source( - "{\"job_id\":\"deprecation_check_job\",\"snapshot_id\":\"2\",\"snapshot_doc_count\":1,\"min_version\":\"8.0.0\"}", - XContentType.JSON - ); - hlrc.index(indexRequest, REQUEST_OPTIONS); - hlrc.indices().refresh(new RefreshRequest(".ml-anomalies-*"), REQUEST_OPTIONS); + buildAndPutJob(jobId); + + indexDoc( + ".ml-anomalies-.write-" + jobId, + jobId + "_model_snapshot_1", + "{\"job_id\":\"deprecation_check_job\",\"snapshot_id\":\"1\", \"snapshot_doc_count\":1}" + ); + + indexDoc( + ".ml-anomalies-.write-" + jobId, + jobId + "_model_snapshot_2", + "{\"job_id\":\"deprecation_check_job\",\"snapshot_id\":\"2\",\"snapshot_doc_count\":1,\"min_version\":\"8.0.0\"}" + ); + client().performRequest(new Request("POST", "/.ml-anomalies-*/_refresh")); // specify an index so that deprecation checks don't run against any accidentally existing indices Request getDeprecations = new Request("GET", "/does-not-exist-*/_migration/deprecations"); @@ -108,4 +80,30 @@ public void testMlDeprecationChecks() throws Exception { assertThat(mlSettingsDeprecations.get(0).get("_meta"), equalTo(Map.of("job_id", jobId, "snapshot_id", "1"))); } + private Response buildAndPutJob(String jobId) throws Exception { + String jobConfig = """ + { + "analysis_config" : { + "bucket_span": "3600s", + "detectors" :[{"function":"count"}] + }, + "data_description" : { + "time_field":"time", + "time_format":"yyyy-MM-dd HH:mm:ssX" + } + }"""; + + Request request = new Request("PUT", "/_ml/anomaly_detectors/" + jobId); + request.setOptions(REQUEST_OPTIONS); + request.setJsonEntity(jobConfig); + return client().performRequest(request); + } + + private Response indexDoc(String index, String docId, String source) throws IOException { + Request request = new Request("PUT", "/" + index + "/_doc/" + docId); + request.setOptions(REQUEST_OPTIONS); + request.setJsonEntity(source); + return client().performRequest(request); + } + } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java index b0869c2b41eeb..28548d71932fc 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java @@ -82,7 +82,8 @@ private DeprecationChecks() {} NodeDeprecationChecks::checkScriptContextCacheExpirationSetting, NodeDeprecationChecks::checkEnforceDefaultTierPreferenceSetting, NodeDeprecationChecks::checkLifecyleStepMasterTimeoutSetting, - NodeDeprecationChecks::checkEqlEnabledSetting + NodeDeprecationChecks::checkEqlEnabledSetting, + NodeDeprecationChecks::checkNodeAttrData ); static List> INDEX_SETTINGS_CHECKS = List.of( diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java index ab3230cd0baaf..40be68851a765 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java @@ -616,4 +616,20 @@ static DeprecationIssue checkEqlEnabledSetting(final Settings settings, final Pl ); } + static DeprecationIssue checkNodeAttrData(final Settings settings, final PluginsAndModules pluginsAndModules) { + String nodeAttrDataValue = settings.get("node.attr.data"); + if (nodeAttrDataValue == null) { + return null; + } + return new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "Setting node.attributes.data is not recommended", + "https://ela.st/es-deprecation-7-node-attr-data-setting", + "One or more of your nodes is configured with node.attributes.data settings. This is typically used to create a " + + "hot/warm or tiered architecture, based on legacy guidelines. Data tiers are a recommended replacement for tiered " + + "architecture clusters.", + false, + null + ); + } } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java index 3a004dc4da692..124d2f19ee62b 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java @@ -675,4 +675,21 @@ public void testEqlEnabledSetting() { ) ); } + + public void testCheckNodeAttrData() { + Settings settings = Settings.builder().put("node.attr.data", randomAlphaOfLength(randomIntBetween(4, 20))).build(); + final PluginsAndModules pluginsAndModules = new PluginsAndModules(Collections.emptyList(), Collections.emptyList()); + final List issues = getDeprecationIssues(settings, pluginsAndModules); + final DeprecationIssue expected = new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "Setting node.attributes.data is not recommended", + "https://ela.st/es-deprecation-7-node-attr-data-setting", + "One or more of your nodes is configured with node.attributes.data settings. This is typically used to create a " + + "hot/warm or tiered architecture, based on legacy guidelines. Data tiers are a recommended replacement for tiered " + + "architecture clusters.", + false, + null + ); + assertThat(issues, hasItem(expected)); + } } diff --git a/x-pack/plugin/eql/qa/correctness/build.gradle b/x-pack/plugin/eql/qa/correctness/build.gradle index 681a0acf71483..27f88dc65364e 100644 --- a/x-pack/plugin/eql/qa/correctness/build.gradle +++ b/x-pack/plugin/eql/qa/correctness/build.gradle @@ -15,13 +15,11 @@ dependencies { } File serviceAccountFile = providers.environmentVariable('eql_test_credentials_file') - .forUseAtConfigurationTime() .orElse(providers.systemProperty('eql.test.credentials.file').forUseAtConfigurationTime()) .map { s -> new File(s)} .getOrNull() Boolean preserveData = providers.systemProperty('eql.test.preserve.data') - .forUseAtConfigurationTime() .map { s -> Boolean.parseBoolean(s) } .getOrElse(false) diff --git a/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/10_basic.yml b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/10_basic.yml index 866b9dfda214c..e35282bb6bfde 100644 --- a/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/10_basic.yml +++ b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/10_basic.yml @@ -20,7 +20,7 @@ setup: body: - index: _index: eql_test - _id: 1 + _id: "1" - event: - category: process "@timestamp": 2020-02-03T12:34:56Z @@ -30,7 +30,7 @@ setup: some_keyword: longer than normal - index: _index: eql_test - _id: 2 + _id: "2" - event: - category: process "@timestamp": 2020-02-04T12:34:56Z @@ -39,7 +39,7 @@ setup: valid: true - index: _index: eql_test - _id: 3 + _id: "3" - event: - category: process "@timestamp": 2020-02-05T12:34:56Z @@ -48,7 +48,7 @@ setup: valid: true - index: _index: eql_test - _id: 4 + _id: "4" - event: - category: network "@timestamp": 2020-02-06T12:34:56Z @@ -57,7 +57,7 @@ setup: valid: true - index: _index: eql_test - _id: 5 + _id: "5" - event: - category: network "@timestamp": 2020-02-07T12:34:56Z @@ -66,7 +66,7 @@ setup: valid: true - index: _index: eql_test - _id: 6 + _id: "6" - event: - category: network "@timestamp": 2020-02-08T12:34:56Z @@ -75,7 +75,7 @@ setup: valid: true - index: _index: eql_test - _id: 7 + _id: "7" - event: - category: network "@timestamp": 2020-02-09T12:34:56Z diff --git a/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/20_runtime_mappings.yml b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/20_runtime_mappings.yml index e5cbebbb7af76..292f69e3d6bef 100644 --- a/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/20_runtime_mappings.yml +++ b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/20_runtime_mappings.yml @@ -16,7 +16,7 @@ setup: body: - index: _index: eql_test - _id: 1 + _id: "1" - event: - category: process "@timestamp": 2020-02-03T12:34:56Z @@ -26,7 +26,7 @@ setup: raw_message: "199.72.81.55 - - [01/Jul/1995:00:00:01 -0400] GET /history/apollo/ HTTP/1.0 200 6245" - index: _index: eql_test - _id: 2 + _id: "2" - event: - category: process "@timestamp": 2020-02-04T12:34:56Z @@ -36,7 +36,7 @@ setup: raw_message: "199.72.81.123 - - [01/Jul/1995:00:00:02 -0400] GET /history/apollo/a HTTP/1.0 200 500" - index: _index: eql_test - _id: 3 + _id: "3" - event: - category: process "@timestamp": 2020-02-05T12:34:56Z @@ -46,7 +46,7 @@ setup: raw_message: "199.72.81.34 - - [01/Jul/1995:00:00:03 -0400] GET /history/apollo/b HTTP/1.0 200 1500" - index: _index: eql_test - _id: 4 + _id: "4" - event: - category: process "@timestamp": 2020-02-05T12:34:57Z @@ -88,7 +88,7 @@ setup: query: 'process where true' fields: ["address"] runtime_mappings: {"address": {"type": "ip","script": "if (doc[\"raw_message.keyword\"].size() == 0) return; else {Matcher m = /\\d+\\.\\d+\\.\\d+\\.\\d+/.matcher(doc[\"raw_message.keyword\"].value);if (m.find()) emit(m.group());}"}} - + - match: {hits.events.0._id: "1"} - match: {hits.events.0.fields.address: ["199.72.81.55"]} - match: {hits.events.0._source.raw_message: "199.72.81.55 - - [01/Jul/1995:00:00:01 -0400] GET /history/apollo/ HTTP/1.0 200 6245"} @@ -165,7 +165,7 @@ setup: - match: {hits.sequences.1.events.2._id: "4"} - match: {hits.sequences.1.events.2._source.@timestamp: "2020-02-05T12:34:57Z"} - match: {hits.sequences.1.events.2.fields.day_of_week: [3]} - + --- "Validate valid runtime mappings request": - do: diff --git a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/action/TransportFreezeIndexAction.java b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/action/TransportFreezeIndexAction.java index 2b702044ce92a..690625887d445 100644 --- a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/action/TransportFreezeIndexAction.java +++ b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/action/TransportFreezeIndexAction.java @@ -147,7 +147,7 @@ private void toggleFrozenSettings( .masterNodeTimeout(request.masterNodeTimeout()) .indices(concreteIndices) .waitForActiveShards(request.waitForActiveShards()); - indexStateService.openIndex( + indexStateService.openIndices( updateRequest, delegate.delegateFailure( (l, openIndexClusterStateUpdateResponse) -> l.onResponse( diff --git a/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/IdpRestTestCase.java b/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/IdpRestTestCase.java index 568a68181f1e2..5e4e4afb1a60b 100644 --- a/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/IdpRestTestCase.java +++ b/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/IdpRestTestCase.java @@ -6,18 +6,17 @@ */ package org.elasticsearch.xpack.idp; +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.client.security.ChangePasswordRequest; import org.elasticsearch.client.security.DeleteRoleRequest; -import org.elasticsearch.client.security.DeleteUserRequest; import org.elasticsearch.client.security.PutPrivilegesRequest; import org.elasticsearch.client.security.PutRoleRequest; -import org.elasticsearch.client.security.PutUserRequest; import org.elasticsearch.client.security.RefreshPolicy; -import org.elasticsearch.client.security.user.User; import org.elasticsearch.client.security.user.privileges.ApplicationPrivilege; import org.elasticsearch.client.security.user.privileges.ApplicationResourcePrivileges; import org.elasticsearch.client.security.user.privileges.IndicesPrivileges; @@ -29,6 +28,7 @@ import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.ObjectPath; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.idp.saml.sp.SamlServiceProviderIndex; import java.io.IOException; @@ -67,18 +67,40 @@ private RestHighLevelClient getHighLevelAdminClient() { return highLevelAdminClient; } - protected User createUser(String username, SecureString password, String... roles) throws IOException { - final RestHighLevelClient client = getHighLevelAdminClient(); - final User user = new User(username, List.of(roles), Map.of(), username + " in " + getTestName(), username + "@test.example.com"); - final PutUserRequest request = PutUserRequest.withPassword(user, password.getChars(), true, RefreshPolicy.IMMEDIATE); - client.security().putUser(request, RequestOptions.DEFAULT); + protected User createUser(String username, SecureString password, String role) throws IOException { + final User user = new User( + username, + new String[] { role }, + username + " in " + getTestName(), + username + "@test.example.com", + Map.of(), + true + ); + final String endpoint = "/_security/user/" + username; + final Request request = new Request(HttpPut.METHOD_NAME, endpoint); + final String body = """ + { + "username": "%s", + "full_name": "%s", + "email": "%s", + "password": "%s", + "roles": [ "%s" ] + } + """.formatted(user.principal(), user.fullName(), user.email(), password.toString(), role); + request.setJsonEntity(body); + request.addParameters(Map.of("refresh", "true")); + request.setOptions(RequestOptions.DEFAULT); + adminClient().performRequest(request); + return user; } protected void deleteUser(String username) throws IOException { - final RestHighLevelClient client = getHighLevelAdminClient(); - final DeleteUserRequest request = new DeleteUserRequest(username, RefreshPolicy.WAIT_UNTIL); - client.security().deleteUser(request, RequestOptions.DEFAULT); + final String endpoint = "/_security/user/" + username; + final Request request = new Request(HttpDelete.METHOD_NAME, endpoint); + request.addParameters(Map.of("refresh", "true")); + request.setOptions(RequestOptions.DEFAULT); + adminClient().performRequest(request); } protected void createRole( @@ -114,9 +136,16 @@ protected void createApplicationPrivileges(String applicationName, Map body) throws IOException { diff --git a/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/WildcardServiceProviderRestIT.java b/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/WildcardServiceProviderRestIT.java index a7d8c9c798a08..0a7d628a7cc3c 100644 --- a/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/WildcardServiceProviderRestIT.java +++ b/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/WildcardServiceProviderRestIT.java @@ -8,13 +8,13 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; -import org.elasticsearch.client.security.user.User; import org.elasticsearch.client.security.user.privileges.ApplicationResourcePrivileges; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; +import org.elasticsearch.xpack.core.security.user.User; import org.junit.Before; import java.io.IOException; @@ -78,9 +78,9 @@ public void testInitSingleSignOnToWildcardServiceProvider() throws Exception { assertThat(samlResponse, containsString("FriendlyName=\"" + attr + "\"")); } - assertThat(samlResponse, containsString(user.getUsername())); - assertThat(samlResponse, containsString(user.getEmail())); - assertThat(samlResponse, containsString(user.getFullName())); + assertThat(samlResponse, containsString(user.principal())); + assertThat(samlResponse, containsString(user.email())); + assertThat(samlResponse, containsString(user.fullName())); assertThat(samlResponse, containsString(">admin<")); deleteUser(username); diff --git a/x-pack/plugin/ilm/build.gradle b/x-pack/plugin/ilm/build.gradle index b3c64ed4cde8e..e03c22953021d 100644 --- a/x-pack/plugin/ilm/build.gradle +++ b/x-pack/plugin/ilm/build.gradle @@ -1,3 +1,5 @@ +import org.elasticsearch.gradle.internal.info.BuildParams + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' @@ -20,5 +22,7 @@ dependencies { addQaCheckDependencies() tasks.named("test").configure { - systemProperty 'es.rollup_v2_feature_flag_enabled', 'true' + if (BuildParams.isSnapshotBuild() == false) { + systemProperty 'es.rollup_v2_feature_flag_enabled', 'true' + } } diff --git a/x-pack/plugin/ilm/qa/multi-node/build.gradle b/x-pack/plugin/ilm/qa/multi-node/build.gradle index 4139f24fa4983..f9e2bfb4c2913 100644 --- a/x-pack/plugin/ilm/qa/multi-node/build.gradle +++ b/x-pack/plugin/ilm/qa/multi-node/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.util.GradleUtils import org.elasticsearch.gradle.internal.info.BuildParams @@ -14,7 +15,9 @@ File repoDir = file("$buildDir/testclusters/repo") tasks.named("javaRestTest").configure { /* To support taking index snapshots, we have to set path.repo setting */ systemProperty 'tests.path.repo', repoDir - systemProperty 'es.rollup_v2_feature_flag_enabled', 'true' + if (BuildParams.isSnapshotBuild() == false) { + systemProperty 'es.rollup_v2_feature_flag_enabled', 'true' + } } testClusters.configureEach { @@ -38,7 +41,7 @@ testClusters.configureEach { * cached time. So the policy's action date is always after the snapshot's start. */ setting 'thread_pool.estimated_time_interval', '0' - systemProperty 'es.rollup_v2_feature_flag_enabled', 'true' + requiresFeature 'es.rollup_v2_feature_flag_enabled', Version.fromString("8.0.0") } if (BuildParams.inFipsJvm){ diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java index b9b93e37cf67f..b2d943207f16c 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java @@ -26,9 +26,11 @@ import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.health.HealthIndicatorService; import org.elasticsearch.index.IndexModule; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.HealthPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; @@ -107,6 +109,7 @@ import org.elasticsearch.xpack.ilm.history.ILMHistoryTemplateRegistry; import org.elasticsearch.xpack.slm.SLMInfoTransportAction; import org.elasticsearch.xpack.slm.SLMUsageTransportAction; +import org.elasticsearch.xpack.slm.SlmHealthIndicatorService; import org.elasticsearch.xpack.slm.SnapshotLifecycleService; import org.elasticsearch.xpack.slm.SnapshotLifecycleTask; import org.elasticsearch.xpack.slm.SnapshotRetentionService; @@ -144,7 +147,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.INDEX_LIFECYCLE_ORIGIN; -public class IndexLifecycle extends Plugin implements ActionPlugin { +public class IndexLifecycle extends Plugin implements ActionPlugin, HealthPlugin { public static final List NAMED_X_CONTENT_ENTRIES = xContentEntries(); @@ -153,6 +156,8 @@ public class IndexLifecycle extends Plugin implements ActionPlugin { private final SetOnce snapshotLifecycleService = new SetOnce<>(); private final SetOnce snapshotRetentionService = new SetOnce<>(); private final SetOnce snapshotHistoryStore = new SetOnce<>(); + private final SetOnce ilmHealthIndicatorService = new SetOnce<>(); + private final SetOnce slmHealthIndicatorService = new SetOnce<>(); private final Settings settings; public IndexLifecycle(Settings settings) { @@ -264,7 +269,8 @@ public Collection createComponents( ); snapshotRetentionService.get().init(clusterService); components.addAll(Arrays.asList(snapshotLifecycleService.get(), snapshotHistoryStore.get(), snapshotRetentionService.get())); - + ilmHealthIndicatorService.set(new IlmHealthIndicatorService(clusterService)); + slmHealthIndicatorService.set(new SlmHealthIndicatorService(clusterService)); return components; } @@ -414,6 +420,11 @@ public List getRestHandlers( return actions; } + @Override + public Collection getHealthIndicatorServices() { + return List.of(ilmHealthIndicatorService.get(), slmHealthIndicatorService.get()); + } + @Override public void onIndexModule(IndexModule indexModule) { assert indexLifecycleInitialisationService.get() != null; diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunner.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunner.java index a0b3391c0958e..c66618185245f 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunner.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunner.java @@ -61,13 +61,11 @@ class IndexLifecycleRunner { for (IndexLifecycleClusterStateUpdateTask task : tasks) { try { state = task.execute(state); - builder.success(task); + builder.success(task, new ClusterStateTaskExecutor.LegacyClusterTaskResultActionListener(task, currentState)); } catch (Exception e) { builder.failure(task, e); } } - // Trigger indices lookup creation and related validation - state.metadata().getIndicesLookup(); return builder.build(state); }; diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransition.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransition.java index bf81d3af7c02c..6c91c78d3644b 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransition.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransition.java @@ -430,7 +430,6 @@ public static ClusterState.Builder newClusterStateWithLifecycleState( IndexMetadata.builder(clusterState.getMetadata().index(index)) .putCustom(ILM_CUSTOM_METADATA_KEY, lifecycleState.asMap()) ) - .build(false) ); return newClusterStateBuilder; } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportPutLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportPutLifecycleAction.java index a2d9e01ae1111..9d4076e2d4f95 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportPutLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportPutLifecycleAction.java @@ -96,7 +96,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A // REST layer and the Transport layer here must be accessed within this thread and not in the // cluster state thread in the ClusterStateUpdateTask below since that thread does not share the // same context, and therefore does not have access to the appropriate security headers. - Map filteredHeaders = ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()); + Map filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders(threadPool.getThreadContext(), state); LifecyclePolicy.validatePolicyName(request.getPolicy().getName()); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/TransportPutSnapshotLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/TransportPutSnapshotLifecycleAction.java index 745c715de3d14..a2955fe118d91 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/TransportPutSnapshotLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/TransportPutSnapshotLifecycleAction.java @@ -80,7 +80,7 @@ protected void masterOperation( // REST layer and the Transport layer here must be accessed within this thread and not in the // cluster state thread in the ClusterStateUpdateTask below since that thread does not share the // same context, and therefore does not have access to the appropriate security headers. - final Map filteredHeaders = ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()); + final Map filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders(threadPool.getThreadContext(), state); LifecyclePolicy.validatePolicyName(request.getLifecycleId()); clusterService.submitStateUpdateTask( "put-snapshot-lifecycle-" + request.getLifecycleId(), diff --git a/x-pack/plugin/mapper-unsigned-long/build.gradle b/x-pack/plugin/mapper-unsigned-long/build.gradle index e301221112f46..eaff7fe362552 100644 --- a/x-pack/plugin/mapper-unsigned-long/build.gradle +++ b/x-pack/plugin/mapper-unsigned-long/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams /* @@ -50,7 +51,5 @@ if (BuildParams.isSnapshotBuild() == false) { } testClusters.configureEach { - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index e1fa36abe0451..153e3f63dc71a 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -20,7 +20,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; @@ -30,10 +29,10 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.SimpleMappedFieldType; import org.elasticsearch.index.mapper.SourceValueFetcher; import org.elasticsearch.index.mapper.TextSearchInfo; -import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesParams; import org.elasticsearch.index.mapper.TimeSeriesParams.MetricType; import org.elasticsearch.index.mapper.ValueFetcher; @@ -554,12 +553,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio } if (dimension && numericValue != null) { - // We encode the tsid part of the dimension field. However, there is no point - // in encoding the tsid value if we do not generate the _tsid field. - BytesReference bytes = context.getMetadataMapper(TimeSeriesIdFieldMapper.NAME) != null - ? TimeSeriesIdFieldMapper.encodeTsidUnsignedLongValue(numericValue) - : null; - context.doc().addDimensionBytes(fieldType().name(), bytes); + context.getDimensions().addUnsignedLong(fieldType().name(), numericValue); } List fields = new ArrayList<>(); @@ -655,4 +649,12 @@ protected static long sortableSignedLongToUnsigned(long value) { return value ^ MASK_2_63; } + @Override + public void doValidate(MappingLookup lookup) { + if (dimension && null != lookup.nestedLookup().getNestedParent(name())) { + throw new IllegalArgumentException( + TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + name() + "]" + ); + } + } } diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 68254206ec675..495e4c1097260 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -16,7 +16,7 @@ repositories { forRepository { ivy { name "ml-cpp" - url providers.systemProperty('build.ml_cpp.repo').forUseAtConfigurationTime().orElse('https://prelert-artifacts.s3.amazonaws.com').get() + url providers.systemProperty('build.ml_cpp.repo').orElse('https://prelert-artifacts.s3.amazonaws.com').get() metadataSources { // no repository metadata, look directly for the artifact artifact() @@ -72,6 +72,8 @@ dependencies { // ml deps api project(':libs:elasticsearch-grok') api "org.apache.commons:commons-math3:3.6.1" + api "com.ibm.icu:icu4j:${versions.icu4j}" + api "org.apache.lucene:lucene-analysis-icu:${versions.lucene}" nativeBundle("org.elasticsearch.ml:ml-cpp:${project.version}@zip") { changing = true } @@ -102,4 +104,8 @@ project.afterEvaluate { } } +tasks.named("dependencyLicenses").configure { + mapping from: /lucene-.*/, to: 'lucene' +} + addQaCheckDependencies() diff --git a/x-pack/plugin/ml/licenses/icu4j-68.2.jar.sha1 b/x-pack/plugin/ml/licenses/icu4j-68.2.jar.sha1 new file mode 100644 index 0000000000000..fcb3d79075099 --- /dev/null +++ b/x-pack/plugin/ml/licenses/icu4j-68.2.jar.sha1 @@ -0,0 +1 @@ +76893e6000401ace133a65262254be0ebe556d46 \ No newline at end of file diff --git a/x-pack/plugin/ml/licenses/icu4j-LICENSE.txt b/x-pack/plugin/ml/licenses/icu4j-LICENSE.txt new file mode 100644 index 0000000000000..e76faec4ad20f --- /dev/null +++ b/x-pack/plugin/ml/licenses/icu4j-LICENSE.txt @@ -0,0 +1,33 @@ +ICU License - ICU 1.8.1 and later + +COPYRIGHT AND PERMISSION NOTICE + +Copyright (c) 1995-2012 International Business Machines Corporation and others + +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, and/or sell copies of the +Software, and to permit persons to whom the Software is furnished to do so, +provided that the above copyright notice(s) and this permission notice appear +in all copies of the Software and that both the above copyright notice(s) and +this permission notice appear in supporting documentation. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE +LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR +ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER +IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +Except as contained in this notice, the name of a copyright holder shall not +be used in advertising or otherwise to promote the sale, use or other +dealings in this Software without prior written authorization of the +copyright holder. + +All trademarks and registered trademarks mentioned herein are the property of +their respective owners. diff --git a/x-pack/plugin/ml/licenses/icu4j-NOTICE.txt b/x-pack/plugin/ml/licenses/icu4j-NOTICE.txt new file mode 100644 index 0000000000000..47eeab14f2ef6 --- /dev/null +++ b/x-pack/plugin/ml/licenses/icu4j-NOTICE.txt @@ -0,0 +1,3 @@ +ICU4J, (under lucene/analysis/icu) is licensed under an MIT style license +(modules/analysis/icu/lib/icu4j-LICENSE-BSD_LIKE.txt) and Copyright (c) 1995-2012 +International Business Machines Corporation and others \ No newline at end of file diff --git a/x-pack/plugin/ml/licenses/lucene-LICENSE.txt b/x-pack/plugin/ml/licenses/lucene-LICENSE.txt new file mode 100644 index 0000000000000..28b134f5f8e4d --- /dev/null +++ b/x-pack/plugin/ml/licenses/lucene-LICENSE.txt @@ -0,0 +1,475 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + +Some code in core/src/java/org/apache/lucene/util/UnicodeUtil.java was +derived from unicode conversion examples available at +http://www.unicode.org/Public/PROGRAMS/CVTUTF. Here is the copyright +from those sources: + +/* + * Copyright 2001-2004 Unicode, Inc. + * + * Disclaimer + * + * This source code is provided as is by Unicode, Inc. No claims are + * made as to fitness for any particular purpose. No warranties of any + * kind are expressed or implied. The recipient agrees to determine + * applicability of information provided. If this file has been + * purchased on magnetic or optical media from Unicode, Inc., the + * sole remedy for any claim will be exchange of defective media + * within 90 days of receipt. + * + * Limitations on Rights to Redistribute This Code + * + * Unicode, Inc. hereby grants the right to freely use the information + * supplied in this file in the creation of products supporting the + * Unicode Standard, and to make copies of this file in any form + * for internal or external distribution as long as this notice + * remains attached. + */ + + +Some code in core/src/java/org/apache/lucene/util/ArrayUtil.java was +derived from Python 2.4.2 sources available at +http://www.python.org. Full license is here: + + http://www.python.org/download/releases/2.4.2/license/ + +Some code in core/src/java/org/apache/lucene/util/UnicodeUtil.java was +derived from Python 3.1.2 sources available at +http://www.python.org. Full license is here: + + http://www.python.org/download/releases/3.1.2/license/ + +Some code in core/src/java/org/apache/lucene/util/automaton was +derived from Brics automaton sources available at +www.brics.dk/automaton/. Here is the copyright from those sources: + +/* + * Copyright (c) 2001-2009 Anders Moeller + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +The levenshtein automata tables in core/src/java/org/apache/lucene/util/automaton +were automatically generated with the moman/finenight FSA package. +Here is the copyright for those sources: + +# Copyright (c) 2010, Jean-Philippe Barrette-LaPierre, +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. + +Some code in core/src/java/org/apache/lucene/util/UnicodeUtil.java was +derived from ICU (http://www.icu-project.org) +The full license is available here: + http://source.icu-project.org/repos/icu/icu/trunk/license.html + +/* + * Copyright (C) 1999-2010, International Business Machines + * Corporation and others. All Rights Reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, and/or sell copies of the + * Software, and to permit persons to whom the Software is furnished to do so, + * provided that the above copyright notice(s) and this permission notice appear + * in all copies of the Software and that both the above copyright notice(s) and + * this permission notice appear in supporting documentation. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. + * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE + * LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR + * ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER + * IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT + * OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + * + * Except as contained in this notice, the name of a copyright holder shall not + * be used in advertising or otherwise to promote the sale, use or other + * dealings in this Software without prior written authorization of the + * copyright holder. + */ + +The following license applies to the Snowball stemmers: + +Copyright (c) 2001, Dr Martin Porter +Copyright (c) 2002, Richard Boulton +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * Neither the name of the copyright holders nor the names of its contributors + * may be used to endorse or promote products derived from this software + * without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +The following license applies to the KStemmer: + +Copyright © 2003, +Center for Intelligent Information Retrieval, +University of Massachusetts, Amherst. +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation +and/or other materials provided with the distribution. + +3. The names "Center for Intelligent Information Retrieval" and +"University of Massachusetts" must not be used to endorse or promote products +derived from this software without prior written permission. To obtain +permission, contact info@ciir.cs.umass.edu. + +THIS SOFTWARE IS PROVIDED BY UNIVERSITY OF MASSACHUSETTS AND OTHER CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. + +The following license applies to the Morfologik project: + +Copyright (c) 2006 Dawid Weiss +Copyright (c) 2007-2011 Dawid Weiss, Marcin Miłkowski +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name of Morfologik nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +--- + +The dictionary comes from Morfologik project. Morfologik uses data from +Polish ispell/myspell dictionary hosted at http://www.sjp.pl/slownik/en/ and +is licenced on the terms of (inter alia) LGPL and Creative Commons +ShareAlike. The part-of-speech tags were added in Morfologik project and +are not found in the data from sjp.pl. The tagset is similar to IPI PAN +tagset. + +--- + +The following license applies to the Morfeusz project, +used by org.apache.lucene.analysis.morfologik. + +BSD-licensed dictionary of Polish (SGJP) +http://sgjp.pl/morfeusz/ + +Copyright © 2011 Zygmunt Saloni, Włodzimierz Gruszczyński, + Marcin Woliński, Robert Wołosz + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the + distribution. + +THIS SOFTWARE IS PROVIDED BY COPYRIGHT HOLDERS “AS IS” AND ANY EXPRESS +OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/x-pack/plugin/ml/licenses/lucene-NOTICE.txt b/x-pack/plugin/ml/licenses/lucene-NOTICE.txt new file mode 100644 index 0000000000000..1a1d51572432a --- /dev/null +++ b/x-pack/plugin/ml/licenses/lucene-NOTICE.txt @@ -0,0 +1,192 @@ +Apache Lucene +Copyright 2014 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +Includes software from other Apache Software Foundation projects, +including, but not limited to: + - Apache Ant + - Apache Jakarta Regexp + - Apache Commons + - Apache Xerces + +ICU4J, (under analysis/icu) is licensed under an MIT styles license +and Copyright (c) 1995-2008 International Business Machines Corporation and others + +Some data files (under analysis/icu/src/data) are derived from Unicode data such +as the Unicode Character Database. See http://unicode.org/copyright.html for more +details. + +Brics Automaton (under core/src/java/org/apache/lucene/util/automaton) is +BSD-licensed, created by Anders Møller. See http://www.brics.dk/automaton/ + +The levenshtein automata tables (under core/src/java/org/apache/lucene/util/automaton) were +automatically generated with the moman/finenight FSA library, created by +Jean-Philippe Barrette-LaPierre. This library is available under an MIT license, +see http://sites.google.com/site/rrettesite/moman and +http://bitbucket.org/jpbarrette/moman/overview/ + +The class org.apache.lucene.util.WeakIdentityMap was derived from +the Apache CXF project and is Apache License 2.0. + +The Google Code Prettify is Apache License 2.0. +See http://code.google.com/p/google-code-prettify/ + +JUnit (junit-4.10) is licensed under the Common Public License v. 1.0 +See http://junit.sourceforge.net/cpl-v10.html + +This product includes code (JaspellTernarySearchTrie) from Java Spelling Checkin +g Package (jaspell): http://jaspell.sourceforge.net/ +License: The BSD License (http://www.opensource.org/licenses/bsd-license.php) + +The snowball stemmers in + analysis/common/src/java/net/sf/snowball +were developed by Martin Porter and Richard Boulton. +The snowball stopword lists in + analysis/common/src/resources/org/apache/lucene/analysis/snowball +were developed by Martin Porter and Richard Boulton. +The full snowball package is available from + http://snowball.tartarus.org/ + +The KStem stemmer in + analysis/common/src/org/apache/lucene/analysis/en +was developed by Bob Krovetz and Sergio Guzman-Lara (CIIR-UMass Amherst) +under the BSD-license. + +The Arabic,Persian,Romanian,Bulgarian, Hindi and Bengali analyzers (common) come with a default +stopword list that is BSD-licensed created by Jacques Savoy. These files reside in: +analysis/common/src/resources/org/apache/lucene/analysis/ar/stopwords.txt, +analysis/common/src/resources/org/apache/lucene/analysis/fa/stopwords.txt, +analysis/common/src/resources/org/apache/lucene/analysis/ro/stopwords.txt, +analysis/common/src/resources/org/apache/lucene/analysis/bg/stopwords.txt, +analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt, +analysis/common/src/resources/org/apache/lucene/analysis/bn/stopwords.txt +See http://members.unine.ch/jacques.savoy/clef/index.html. + +The German,Spanish,Finnish,French,Hungarian,Italian,Portuguese,Russian and Swedish light stemmers +(common) are based on BSD-licensed reference implementations created by Jacques Savoy and +Ljiljana Dolamic. These files reside in: +analysis/common/src/java/org/apache/lucene/analysis/de/GermanLightStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/de/GermanMinimalStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/es/SpanishLightStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/fi/FinnishLightStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/fr/FrenchLightStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/fr/FrenchMinimalStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/hu/HungarianLightStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/it/ItalianLightStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/pt/PortugueseLightStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/ru/RussianLightStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/sv/SwedishLightStemmer.java + +The Stempel analyzer (stempel) includes BSD-licensed software developed +by the Egothor project http://egothor.sf.net/, created by Leo Galambos, Martin Kvapil, +and Edmond Nolan. + +The Polish analyzer (stempel) comes with a default +stopword list that is BSD-licensed created by the Carrot2 project. The file resides +in stempel/src/resources/org/apache/lucene/analysis/pl/stopwords.txt. +See http://project.carrot2.org/license.html. + +The SmartChineseAnalyzer source code (smartcn) was +provided by Xiaoping Gao and copyright 2009 by www.imdict.net. + +WordBreakTestUnicode_*.java (under modules/analysis/common/src/test/) +is derived from Unicode data such as the Unicode Character Database. +See http://unicode.org/copyright.html for more details. + +The Morfologik analyzer (morfologik) includes BSD-licensed software +developed by Dawid Weiss and Marcin Miłkowski (http://morfologik.blogspot.com/). + +Morfologik uses data from Polish ispell/myspell dictionary +(http://www.sjp.pl/slownik/en/) licenced on the terms of (inter alia) +LGPL and Creative Commons ShareAlike. + +Morfologic includes data from BSD-licensed dictionary of Polish (SGJP) +(http://sgjp.pl/morfeusz/) + +Servlet-api.jar and javax.servlet-*.jar are under the CDDL license, the original +source code for this can be found at http://www.eclipse.org/jetty/downloads.php + +=========================================================================== +Kuromoji Japanese Morphological Analyzer - Apache Lucene Integration +=========================================================================== + +This software includes a binary and/or source version of data from + + mecab-ipadic-2.7.0-20070801 + +which can be obtained from + + http://atilika.com/releases/mecab-ipadic/mecab-ipadic-2.7.0-20070801.tar.gz + +or + + http://jaist.dl.sourceforge.net/project/mecab/mecab-ipadic/2.7.0-20070801/mecab-ipadic-2.7.0-20070801.tar.gz + +=========================================================================== +mecab-ipadic-2.7.0-20070801 Notice +=========================================================================== + +Nara Institute of Science and Technology (NAIST), +the copyright holders, disclaims all warranties with regard to this +software, including all implied warranties of merchantability and +fitness, in no event shall NAIST be liable for +any special, indirect or consequential damages or any damages +whatsoever resulting from loss of use, data or profits, whether in an +action of contract, negligence or other tortuous action, arising out +of or in connection with the use or performance of this software. + +A large portion of the dictionary entries +originate from ICOT Free Software. The following conditions for ICOT +Free Software applies to the current dictionary as well. + +Each User may also freely distribute the Program, whether in its +original form or modified, to any third party or parties, PROVIDED +that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear +on, or be attached to, the Program, which is distributed substantially +in the same form as set out herein and that such intended +distribution, if actually made, will neither violate or otherwise +contravene any of the laws and regulations of the countries having +jurisdiction over the User or the intended distribution itself. + +NO WARRANTY + +The program was produced on an experimental basis in the course of the +research and development conducted during the project and is provided +to users as so produced on an experimental basis. Accordingly, the +program is provided without any warranty whatsoever, whether express, +implied, statutory or otherwise. The term "warranty" used herein +includes, but is not limited to, any warranty of the quality, +performance, merchantability and fitness for a particular purpose of +the program and the nonexistence of any infringement or violation of +any right of any third party. + +Each user of the program will agree and understand, and be deemed to +have agreed and understood, that there is no warranty whatsoever for +the program and, accordingly, the entire risk arising from or +otherwise connected with the program is assumed by the user. + +Therefore, neither ICOT, the copyright holder, or any other +organization that participated in or was otherwise related to the +development of the program and their respective officials, directors, +officers and other employees shall be held liable for any and all +damages, including, without limitation, general, special, incidental +and consequential damages, arising out of or otherwise in connection +with the use or inability to use the program or any product, material +or result produced or otherwise obtained by using the program, +regardless of whether they have been advised of, or otherwise had +knowledge of, the possibility of such damages at any time during the +project or thereafter. Each user will be deemed to have agreed to the +foregoing by his or her commencement of use of the program. The term +"use" as used herein includes, but is not limited to, the use, +modification, copying and distribution of the program and the +production of secondary products from the program. + +In the case where the program, whether in its original form or +modified, was distributed or delivered to or received by a user from +any person, organization or entity other than ICOT, unless it makes or +grants independently of ICOT any specific warranty to the user in +writing, such person, organization or entity, will also be exempted +from and not be held liable to the user for any such damages as noted +above as far as the program is concerned. diff --git a/x-pack/plugin/ml/licenses/lucene-analysis-icu-9.0.0.jar.sha1 b/x-pack/plugin/ml/licenses/lucene-analysis-icu-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..a0df1a4b7cb2e --- /dev/null +++ b/x-pack/plugin/ml/licenses/lucene-analysis-icu-9.0.0.jar.sha1 @@ -0,0 +1 @@ +a23a2c1c9baad61b6fb5380f072e41534c275875 \ No newline at end of file diff --git a/x-pack/plugin/ml/qa/ml-with-security/src/yamlRestTest/java/org/elasticsearch/smoketest/MlWithSecurityUserRoleIT.java b/x-pack/plugin/ml/qa/ml-with-security/src/yamlRestTest/java/org/elasticsearch/smoketest/MlWithSecurityUserRoleIT.java index 4f874713606ff..0f594e47c4a76 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/src/yamlRestTest/java/org/elasticsearch/smoketest/MlWithSecurityUserRoleIT.java +++ b/x-pack/plugin/ml/qa/ml-with-security/src/yamlRestTest/java/org/elasticsearch/smoketest/MlWithSecurityUserRoleIT.java @@ -28,7 +28,6 @@ public class MlWithSecurityUserRoleIT extends MlWithSecurityIT { */ private static final List ALLOWED_ACTION_PATTERNS = Arrays.asList( Pattern.compile("ml\\.get_.*"), - Pattern.compile("ml\\.find_file_structure"), Pattern.compile("ml\\.evaluate_data_frame") ); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceIngestIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceIngestIT.java index a7ac49d564130..f3d46d2ea1f61 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceIngestIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceIngestIT.java @@ -10,12 +10,11 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; -import org.elasticsearch.client.ml.GetTrainedModelsStatsResponse; -import org.elasticsearch.client.ml.inference.TrainedModelStats; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ExternalTestCluster; @@ -24,9 +23,7 @@ import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.inference.InferenceDefinitionTests; import org.elasticsearch.xpack.core.ml.integration.MlRestTestStateCleaner; @@ -36,6 +33,7 @@ import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -124,8 +122,8 @@ public void testPathologicalPipelineCreationAndDeletion() throws Exception { assertThat(EntityUtils.toString(searchResponse.getEntity()), containsString("\"value\":10")); assertBusy(() -> { try { - assertStatsWithCacheMisses(classificationModelId, 10L); - assertStatsWithCacheMisses(regressionModelId, 10L); + assertStatsWithCacheMisses(classificationModelId, 10); + assertStatsWithCacheMisses(regressionModelId, 10); } catch (ResponseException ex) { // this could just mean shard failures. fail(ex.getMessage()); @@ -176,8 +174,8 @@ public void testPipelineIngest() throws Exception { assertBusy(() -> { try { - assertStatsWithCacheMisses(classificationModelId, 10L); - assertStatsWithCacheMisses(regressionModelId, 15L); + assertStatsWithCacheMisses(classificationModelId, 10); + assertStatsWithCacheMisses(regressionModelId, 15); } catch (ResponseException ex) { // this could just mean shard failures. fail(ex.getMessage()); @@ -185,6 +183,7 @@ public void testPipelineIngest() throws Exception { }, 30, TimeUnit.SECONDS); } + @SuppressWarnings("unchecked") public void testPipelineIngestWithModelAliases() throws Exception { String regressionModelId = "test_regression_1"; putModel(regressionModelId, REGRESSION_CONFIG); @@ -255,17 +254,13 @@ public void testPipelineIngestWithModelAliases() throws Exception { assertThat(EntityUtils.toString(searchResponse.getEntity()), not(containsString("\"value\":0"))); assertBusy(() -> { - try ( - XContentParser parser = createParser( - JsonXContent.jsonXContent, - client().performRequest(new Request("GET", "_ml/trained_models/" + modelAlias + "/_stats")).getEntity().getContent() - ) - ) { - GetTrainedModelsStatsResponse response = GetTrainedModelsStatsResponse.fromXContent(parser); - assertThat(response.toString(), response.getTrainedModelStats(), hasSize(1)); - TrainedModelStats trainedModelStats = response.getTrainedModelStats().get(0); - assertThat(trainedModelStats.getModelId(), equalTo(regressionModelId2)); - assertThat(trainedModelStats.getInferenceStats(), is(notNullValue())); + try { + Response response = client().performRequest(new Request("GET", "_ml/trained_models/" + modelAlias + "/_stats")); + var responseMap = entityAsMap(response); + assertThat((List) responseMap.get("trained_model_stats"), hasSize(1)); + var stats = ((List>) responseMap.get("trained_model_stats")).get(0); + assertThat(stats.get("model_id"), equalTo(regressionModelId2)); + assertThat(stats.get("inference_stats"), is(notNullValue())); } catch (ResponseException ex) { // this could just mean shard failures. fail(ex.getMessage()); @@ -273,16 +268,19 @@ public void testPipelineIngestWithModelAliases() throws Exception { }); } - public void assertStatsWithCacheMisses(String modelId, long inferenceCount) throws IOException { + @SuppressWarnings("unchecked") + public void assertStatsWithCacheMisses(String modelId, int inferenceCount) throws IOException { Response statsResponse = client().performRequest(new Request("GET", "_ml/trained_models/" + modelId + "/_stats")); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, statsResponse.getEntity().getContent())) { - GetTrainedModelsStatsResponse response = GetTrainedModelsStatsResponse.fromXContent(parser); - assertThat(response.getTrainedModelStats(), hasSize(1)); - TrainedModelStats trainedModelStats = response.getTrainedModelStats().get(0); - assertThat(trainedModelStats.getInferenceStats(), is(notNullValue())); - assertThat(trainedModelStats.getInferenceStats().getInferenceCount(), equalTo(inferenceCount)); - assertThat(trainedModelStats.getInferenceStats().getCacheMissCount(), greaterThan(0L)); - } + var responseMap = entityAsMap(statsResponse); + assertThat((List) responseMap.get("trained_model_stats"), hasSize(1)); + var stats = ((List>) responseMap.get("trained_model_stats")).get(0); + assertThat(stats.get("inference_stats"), is(notNullValue())); + assertThat( + stats.toString(), + (Integer) XContentMapValues.extractValue("inference_stats.inference_count", stats), + equalTo(inferenceCount) + ); + assertThat(stats.toString(), (Integer) XContentMapValues.extractValue("inference_stats.cache_miss_count", stats), greaterThan(0)); } public void testSimulate() throws IOException { diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlMemoryIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlMemoryIT.java new file mode 100644 index 0000000000000..14c6e7a770ed3 --- /dev/null +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlMemoryIT.java @@ -0,0 +1,200 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.integration; + +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction; +import org.elasticsearch.xpack.core.ml.action.MlMemoryAction; +import org.elasticsearch.xpack.core.ml.action.MlMemoryAction.Response.MlMemoryStats; +import org.elasticsearch.xpack.core.ml.action.NodeAcknowledgedResponse; +import org.elasticsearch.xpack.core.ml.action.OpenJobAction; +import org.elasticsearch.xpack.core.ml.action.PutJobAction; +import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction; +import org.elasticsearch.xpack.core.ml.action.PutTrainedModelDefinitionPartAction; +import org.elasticsearch.xpack.core.ml.action.PutTrainedModelVocabularyAction; +import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; +import org.elasticsearch.xpack.core.ml.dataframe.analyses.Classification; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; +import org.elasticsearch.xpack.core.ml.inference.allocation.AllocationStatus; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenization; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.PassThroughConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; +import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.core.ml.job.config.JobState; +import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizer; +import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; +import org.junit.After; + +import java.util.Base64; +import java.util.List; + +import static org.elasticsearch.xpack.ml.integration.ClassificationIT.KEYWORD_FIELD; +import static org.elasticsearch.xpack.ml.integration.PyTorchModelIT.BASE_64_ENCODED_MODEL; +import static org.elasticsearch.xpack.ml.integration.PyTorchModelIT.RAW_MODEL_SIZE; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.emptyString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.not; + +public class MlMemoryIT extends MlNativeDataFrameAnalyticsIntegTestCase { + + @After + public void cleanUpAfterTest() { + cleanUp(); + } + + public void testMemoryStats() throws Exception { + + deployTrainedModel(); + openAnomalyDetectionJob(); + String dfaJobId = "dfa"; + startDataFrameAnalyticsJob(dfaJobId); + + MlMemoryAction.Response response = client().execute(MlMemoryAction.INSTANCE, new MlMemoryAction.Request("_all")).actionGet(); + + assertThat(response.failures(), empty()); + + List statsList = response.getNodes(); + // There are 4 nodes: 3 in the external cluster plus the test harness + assertThat(statsList, hasSize(4)); + + int mlNodes = 0; + int nodesWithPytorchModel = 0; + int nodesWithAnomalyJob = 0; + int nodesWithDfaJob = 0; + + for (MlMemoryStats stats : statsList) { + assertThat(stats.getMemTotal().getBytes(), greaterThan(0L)); + assertThat(stats.getMemAdjustedTotal().getBytes(), greaterThan(0L)); + assertThat(stats.getMemAdjustedTotal().getBytes(), lessThanOrEqualTo(stats.getMemTotal().getBytes())); + boolean isMlNode = stats.getNode().getRoles().contains(DiscoveryNodeRole.ML_ROLE); + boolean hasPyTorchModel = (stats.getMlNativeInference().getBytes() > 0); + boolean hasAnomalyJob = (stats.getMlAnomalyDetectors().getBytes() > 0); + boolean hasDfaJob = (stats.getMlDataFrameAnalytics().getBytes() > 0); + if (isMlNode) { + ++mlNodes; + assertThat(stats.getMlMax().getBytes(), greaterThan(0L)); + if (hasPyTorchModel) { + ++nodesWithPytorchModel; + } + if (hasAnomalyJob) { + ++nodesWithAnomalyJob; + } + if (hasDfaJob) { + ++nodesWithDfaJob; + } + } else { + assertThat(stats.getMlMax().getBytes(), equalTo(0L)); + assertThat(stats.getMlAnomalyDetectors().getBytes(), equalTo(0L)); + assertThat(stats.getMlDataFrameAnalytics().getBytes(), equalTo(0L)); + assertThat(stats.getMlNativeInference().getBytes(), equalTo(0L)); + assertThat(stats.getMlNativeCodeOverhead().getBytes(), equalTo(0L)); + } + if (hasAnomalyJob || hasDfaJob || hasPyTorchModel) { + assertThat(stats.getMlNativeCodeOverhead().getBytes(), greaterThan(0L)); + } else { + assertThat(stats.getMlNativeCodeOverhead().getBytes(), equalTo(0L)); + } + assertThat(stats.getJvmHeapMax().getBytes(), greaterThan(0L)); + assertThat(stats.getJvmInferenceMax().getBytes(), greaterThan(0L)); + // This next one has to be >= 0 rather than 0 because the cache is invalidated + // lazily after models are no longer in use, and previous tests could have + // caused a model to be cached. + assertThat(stats.getJvmInference().getBytes(), greaterThanOrEqualTo(0L)); + } + assertThat(mlNodes, is(2)); + assertThat(nodesWithPytorchModel, equalTo(mlNodes)); + assertThat(nodesWithAnomalyJob, is(1)); + // It's possible that the DFA job could have finished before the stats call was made + assumeFalse( + "Data frame analytics job finished really quickly, so cannot assert DFA memory stats", + getProgress(dfaJobId).stream().allMatch(phaseProgress -> phaseProgress.getProgressPercent() == 100) + ); + assertThat(nodesWithDfaJob, is(1)); + } + + private void openAnomalyDetectionJob() throws Exception { + Job.Builder job = BaseMlIntegTestCase.createFareQuoteJob("ad", ByteSizeValue.ofMb(20)); + client().execute(PutJobAction.INSTANCE, new PutJobAction.Request(job)).actionGet(); + client().execute(OpenJobAction.INSTANCE, new OpenJobAction.Request(job.getId())).actionGet(); + assertBusy(() -> { + GetJobsStatsAction.Response response = client().execute( + GetJobsStatsAction.INSTANCE, + new GetJobsStatsAction.Request(job.getId()) + ).actionGet(); + assertEquals(JobState.OPENED, response.getResponse().results().get(0).getState()); + }); + } + + private void startDataFrameAnalyticsJob(String jobId) throws Exception { + String sourceIndex = "source"; + String destIndex = "dest"; + ClassificationIT.createIndex(sourceIndex, false); + ClassificationIT.indexData(sourceIndex, 350, 0, KEYWORD_FIELD); + + DataFrameAnalyticsConfig config = buildAnalytics(jobId, sourceIndex, destIndex, null, new Classification(KEYWORD_FIELD)); + putAnalytics(config); + + NodeAcknowledgedResponse response = startAnalytics(jobId); + assertThat(response.getNode(), not(emptyString())); + + waitUntilSomeProgressHasBeenMadeForPhase(jobId, "loading_data"); + } + + private void deployTrainedModel() { + String modelId = "pytorch"; + client().execute( + PutTrainedModelAction.INSTANCE, + new PutTrainedModelAction.Request( + TrainedModelConfig.builder() + .setModelType(TrainedModelType.PYTORCH) + .setInferenceConfig( + new PassThroughConfig(null, new BertTokenization(null, false, null, Tokenization.Truncate.NONE), null) + ) + .setModelId(modelId) + .build(), + false + ) + ).actionGet(); + client().execute( + PutTrainedModelDefinitionPartAction.INSTANCE, + new PutTrainedModelDefinitionPartAction.Request( + modelId, + new BytesArray(Base64.getDecoder().decode(BASE_64_ENCODED_MODEL)), + 0, + RAW_MODEL_SIZE, + 1 + ) + ).actionGet(); + client().execute( + PutTrainedModelVocabularyAction.INSTANCE, + new PutTrainedModelVocabularyAction.Request( + modelId, + List.of("these", "are", "my", "words", BertTokenizer.UNKNOWN_TOKEN, BertTokenizer.PAD_TOKEN) + ) + ).actionGet(); + client().execute( + StartTrainedModelDeploymentAction.INSTANCE, + new StartTrainedModelDeploymentAction.Request(modelId).setWaitForState(AllocationStatus.State.STARTED) + ).actionGet(); + } + + @Override + boolean supportsInference() { + return true; + } +} diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java index c57e41537f722..90fb4e81b7364 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java @@ -205,7 +205,7 @@ protected void assertProgressComplete(String id) { abstract boolean supportsInference(); - private List getProgress(String id) { + protected List getProgress(String id) { GetDataFrameAnalyticsStatsAction.Response.Stats stats = getAnalyticsStats(id); assertThat(stats.getId(), equalTo(id)); List progress = stats.getProgress(); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelIT.java index 3072504be5399..37e78c850b2c6 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelIT.java @@ -10,25 +10,12 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; -import org.elasticsearch.client.ml.inference.MlInferenceNamedXContentProvider; -import org.elasticsearch.client.ml.inference.TrainedModelConfig; -import org.elasticsearch.client.ml.inference.TrainedModelDefinition; -import org.elasticsearch.client.ml.inference.TrainedModelInput; -import org.elasticsearch.client.ml.inference.trainedmodel.RegressionConfig; -import org.elasticsearch.client.ml.inference.trainedmodel.TargetType; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.Ensemble; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.WeightedSum; -import org.elasticsearch.client.ml.inference.trainedmodel.tree.Tree; -import org.elasticsearch.client.ml.inference.trainedmodel.tree.TreeNode; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; @@ -41,8 +28,6 @@ import org.junit.After; import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.Map; @@ -70,11 +55,6 @@ protected Settings restClientSettings() { return Settings.builder().put(super.restClientSettings()).put(ThreadContext.PREFIX + ".Authorization", BASIC_AUTH_VALUE).build(); } - @Override - protected NamedXContentRegistry xContentRegistry() { - return new NamedXContentRegistry(new MlInferenceNamedXContentProvider().getNamedXContentParsers()); - } - @Override protected boolean preserveTemplatesUponCompletion() { return true; @@ -268,56 +248,95 @@ public void testExportImportModel() throws IOException { } private void putRegressionModel(String modelId) throws IOException { - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - TrainedModelDefinition.Builder definition = new TrainedModelDefinition.Builder().setPreProcessors(Collections.emptyList()) - .setTrainedModel(buildRegression()); - TrainedModelConfig.builder() - .setDefinition(definition) - .setInferenceConfig(new RegressionConfig()) - .setModelId(modelId) - .setInput(new TrainedModelInput(Arrays.asList("col1", "col2", "col3"))) - .build() - .toXContent(builder, ToXContent.EMPTY_PARAMS); - Request model = new Request("PUT", "_ml/trained_models/" + modelId); - model.setJsonEntity(XContentHelper.convertToJson(BytesReference.bytes(builder), false, XContentType.JSON)); - assertThat(client().performRequest(model).getStatusLine().getStatusCode(), equalTo(200)); - } - } - - private static TrainedModel buildRegression() { - List featureNames = Arrays.asList("field.foo", "field.bar", "animal_cat", "animal_dog"); - Tree tree1 = Tree.builder() - .setFeatureNames(featureNames) - .setNodes( - TreeNode.builder(0).setLeftChild(1).setRightChild(2).setSplitFeature(0).setThreshold(0.5), - TreeNode.builder(1).setLeafValue(Collections.singletonList(0.3)), - TreeNode.builder(2).setThreshold(0.0).setSplitFeature(3).setLeftChild(3).setRightChild(4), - TreeNode.builder(3).setLeafValue(Collections.singletonList(0.1)), - TreeNode.builder(4).setLeafValue(Collections.singletonList(0.2)) - ) - .build(); - Tree tree2 = Tree.builder() - .setFeatureNames(featureNames) - .setNodes( - TreeNode.builder(0).setLeftChild(1).setRightChild(2).setSplitFeature(2).setThreshold(1.0), - TreeNode.builder(1).setLeafValue(Collections.singletonList(1.5)), - TreeNode.builder(2).setLeafValue(Collections.singletonList(0.9)) - ) - .build(); - Tree tree3 = Tree.builder() - .setFeatureNames(featureNames) - .setNodes( - TreeNode.builder(0).setLeftChild(1).setRightChild(2).setSplitFeature(1).setThreshold(0.2), - TreeNode.builder(1).setLeafValue(Collections.singletonList(1.5)), - TreeNode.builder(2).setLeafValue(Collections.singletonList(0.9)) - ) - .build(); - return Ensemble.builder() - .setTargetType(TargetType.REGRESSION) - .setFeatureNames(featureNames) - .setTrainedModels(Arrays.asList(tree1, tree2, tree3)) - .setOutputAggregator(new WeightedSum(Arrays.asList(0.5, 0.5, 0.5))) - .build(); + String modelConfig = """ + { + "definition": { + "trained_model": { + "ensemble": { + "feature_names": ["field.foo", "field.bar", "animal_cat", "animal_dog"], + "trained_models": [{ + "tree": { + "feature_names": ["field.foo", "field.bar", "animal_cat", "animal_dog"], + "tree_structure": [{ + "threshold": 0.5, + "split_feature": 0, + "node_index": 0, + "left_child": 1, + "right_child": 2 + }, { + "node_index": 1, + "leaf_value": [0.3] + }, { + "threshold": 0.0, + "split_feature": 3, + "node_index": 2, + "left_child": 3, + "right_child": 4 + }, { + "node_index": 3, + "leaf_value": [0.1] + }, { + "node_index": 4, + "leaf_value": [0.2] + }] + } + }, { + "tree": { + "feature_names": ["field.foo", "field.bar", "animal_cat", "animal_dog"], + "tree_structure": [{ + "threshold": 1.0, + "split_feature": 2, + "node_index": 0, + "left_child": 1, + "right_child": 2 + }, { + "node_index": 1, + "leaf_value": [1.5] + }, { + "node_index": 2, + "leaf_value": [0.9] + }] + } + }, { + "tree": { + "feature_names": ["field.foo", "field.bar", "animal_cat", "animal_dog"], + "tree_structure": [{ + "threshold": 0.2, + "split_feature": 1, + "node_index": 0, + "left_child": 1, + "right_child": 2 + }, { + "node_index": 1, + "leaf_value": [1.5] + }, { + "node_index": 2, + "leaf_value": [0.9] + }] + } + }], + "aggregate_output": { + "weighted_sum": { + "weights": [0.5, 0.5, 0.5] + } + }, + "target_type": "regression" + } + }, + "preprocessors": [] + }, + "input": { + "field_names": ["col1", "col2", "col3"] + }, + "inference_config": { + "regression": {} + } + } + """; + + Request model = new Request("PUT", "_ml/trained_models/" + modelId); + model.setJsonEntity(modelConfig); + assertThat(client().performRequest(model).getStatusLine().getStatusCode(), equalTo(200)); } public void testStartDeploymentWithInconsistentTotalLengths() throws IOException { diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java index aa1a2f0f35fdf..2f065bf681aaf 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java @@ -40,7 +40,8 @@ public void createComponents() throws Exception { configProvider = new DataFrameAnalyticsConfigProvider( client(), xContentRegistry(), - new DataFrameAnalyticsAuditor(client(), getInstanceFromNode(ClusterService.class)) + new DataFrameAnalyticsAuditor(client(), getInstanceFromNode(ClusterService.class)), + getInstanceFromNode(ClusterService.class) ); waitForMlTemplates(); } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsConfigProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsConfigProviderIT.java index 0248ec0a8df8a..34e04b586dbf2 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsConfigProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsConfigProviderIT.java @@ -25,6 +25,8 @@ import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfigUpdate; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsState; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsTaskState; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; import org.elasticsearch.xpack.ml.dataframe.persistence.DataFrameAnalyticsConfigProvider; import org.elasticsearch.xpack.ml.notifications.DataFrameAnalyticsAuditor; @@ -46,14 +48,20 @@ public class DataFrameAnalyticsConfigProviderIT extends MlSingleNodeTestCase { private static final TimeValue TIMEOUT = TimeValue.timeValueSeconds(5); private DataFrameAnalyticsConfigProvider configProvider; + private String dummyAuthenticationHeader; @Before public void createComponents() throws Exception { configProvider = new DataFrameAnalyticsConfigProvider( client(), xContentRegistry(), - new DataFrameAnalyticsAuditor(client(), getInstanceFromNode(ClusterService.class)) + new DataFrameAnalyticsAuditor(client(), getInstanceFromNode(ClusterService.class)), + getInstanceFromNode(ClusterService.class) ); + dummyAuthenticationHeader = Authentication.newRealmAuthentication( + new User("dummy"), + new Authentication.RealmRef("name", "type", "node") + ).encode(); waitForMlTemplates(); } @@ -97,7 +105,7 @@ public void testPutAndGet() throws InterruptedException { public void testPutAndGet_WithSecurityHeaders() throws InterruptedException { String configId = "config-id"; DataFrameAnalyticsConfig config = DataFrameAnalyticsConfigTests.createRandom(configId); - Map securityHeaders = Collections.singletonMap("_xpack_security_authentication", "dummy"); + Map securityHeaders = Collections.singletonMap("_xpack_security_authentication", dummyAuthenticationHeader); { // Put the config and verify the response AtomicReference configHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); @@ -275,7 +283,7 @@ public void testUpdate() throws Exception { ); } { // Update that applies security headers - Map securityHeaders = Collections.singletonMap("_xpack_security_authentication", "dummy"); + Map securityHeaders = Collections.singletonMap("_xpack_security_authentication", dummyAuthenticationHeader); AtomicReference updatedConfigHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedConfigProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedConfigProviderIT.java index 6e5c77e073657..1202d61b674ab 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedConfigProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedConfigProviderIT.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ClientHelper; @@ -20,6 +21,10 @@ import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.AuthenticationField; +import org.elasticsearch.xpack.core.security.authc.support.SecondaryAuthentication; +import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; import org.elasticsearch.xpack.ml.datafeed.persistence.DatafeedConfigProvider; import org.hamcrest.core.IsInstanceOf; @@ -45,6 +50,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; @@ -53,11 +59,16 @@ public class DatafeedConfigProviderIT extends MlSingleNodeTestCase { private DatafeedConfigProvider datafeedConfigProvider; + private String dummyAuthenticationHeader; @Before public void createComponents() throws Exception { - datafeedConfigProvider = new DatafeedConfigProvider(client(), xContentRegistry()); + datafeedConfigProvider = new DatafeedConfigProvider(client(), xContentRegistry(), getInstanceFromNode(ClusterService.class)); waitForMlTemplates(); + dummyAuthenticationHeader = Authentication.newRealmAuthentication( + new User("dummy"), + new Authentication.RealmRef("name", "type", "node") + ).encode(); } public void testCrud() throws InterruptedException { @@ -94,10 +105,7 @@ public void testCrud() throws InterruptedException { DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedId); List updateIndices = Collections.singletonList("a-different-index"); update.setIndices(updateIndices); - Map updateHeaders = new HashMap<>(); - // Only security headers are updated, grab the first one - String securityHeader = ClientHelper.SECURITY_HEADER_FILTERS.iterator().next(); - updateHeaders.put(securityHeader, "CHANGED"); + Map updateHeaders = createSecurityHeader(); AtomicReference configHolder = new AtomicReference<>(); blockingCall( @@ -113,7 +121,7 @@ public void testCrud() throws InterruptedException { ); assertNull(exceptionHolder.get()); assertThat(configHolder.get().getIndices(), equalTo(updateIndices)); - assertThat(configHolder.get().getHeaders().get(securityHeader), equalTo("CHANGED")); + updateHeaders.forEach((key, value) -> assertThat(configHolder.get().getHeaders(), hasEntry(key, value))); // Read the updated config configBuilderHolder.set(null); @@ -124,7 +132,7 @@ public void testCrud() throws InterruptedException { ); assertNull(exceptionHolder.get()); assertThat(configBuilderHolder.get().build().getIndices(), equalTo(updateIndices)); - assertThat(configBuilderHolder.get().build().getHeaders().get(securityHeader), equalTo("CHANGED")); + updateHeaders.forEach((key, value) -> assertThat(configHolder.get().getHeaders(), hasEntry(key, value))); // Delete AtomicReference deleteResponseHolder = new AtomicReference<>(); @@ -498,7 +506,11 @@ private Map createSecurityHeader() { Map headers = new HashMap<>(); // Only security headers are updated, grab the first one String securityHeader = ClientHelper.SECURITY_HEADER_FILTERS.iterator().next(); - headers.put(securityHeader, "SECURITY_"); + if (Set.of(AuthenticationField.AUTHENTICATION_KEY, SecondaryAuthentication.THREAD_CTX_KEY).contains(securityHeader)) { + headers.put(securityHeader, dummyAuthenticationHeader); + } else { + headers.put(securityHeader, "SECURITY_"); + } return headers; } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlAutoUpdateServiceIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlAutoUpdateServiceIT.java index 317740efe4850..a0f72d738642e 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlAutoUpdateServiceIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlAutoUpdateServiceIT.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.xcontent.XContentType; @@ -41,7 +42,7 @@ public class MlAutoUpdateServiceIT extends MlSingleNodeTestCase { @Before public void createComponents() throws Exception { - datafeedConfigProvider = new DatafeedConfigProvider(client(), xContentRegistry()); + datafeedConfigProvider = new DatafeedConfigProvider(client(), xContentRegistry(), getInstanceFromNode(ClusterService.class)); waitForMlTemplates(); } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java index 9651115c658ee..ac2416c899028 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; +import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; import org.junit.After; import java.time.ZonedDateTime; @@ -57,7 +58,7 @@ public class TestFeatureLicenseTrackingIT extends MlSingleNodeTestCase { private final Set createdPipelines = new HashSet<>(); @After - public void cleanup() { + public void cleanup() throws Exception { for (String pipeline : createdPipelines) { try { client().execute(DeletePipelineAction.INSTANCE, new DeletePipelineRequest(pipeline)).actionGet(); @@ -65,6 +66,9 @@ public void cleanup() { logger.warn(() -> new ParameterizedMessage("error cleaning up pipeline [{}]", pipeline), ex); } } + // Some of the tests have async side effects. We need to wait for these to complete before continuing + // the cleanup, otherwise unexpected indices may get created during the cleanup process. + BaseMlIntegTestCase.waitForPendingTasks(client()); } public void testFeatureTrackingAnomalyJob() throws Exception { @@ -125,7 +129,7 @@ public void testFeatureTrackingInferenceModelPipeline() throws Exception { .setInferenceConfig(new ClassificationConfig(3)) .setParsedDefinition( new TrainedModelDefinition.Builder().setPreProcessors( - Arrays.asList(new OneHotEncoding("other.categorical", oneHotEncoding, false)) + List.of(new OneHotEncoding("other.categorical", oneHotEncoding, false)) ).setTrainedModel(buildClassification(true)) ) .build(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index b775591d152ab..527c395e6e3df 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -136,6 +136,7 @@ import org.elasticsearch.xpack.core.ml.action.IsolateDatafeedAction; import org.elasticsearch.xpack.core.ml.action.KillProcessAction; import org.elasticsearch.xpack.core.ml.action.MlInfoAction; +import org.elasticsearch.xpack.core.ml.action.MlMemoryAction; import org.elasticsearch.xpack.core.ml.action.OpenJobAction; import org.elasticsearch.xpack.core.ml.action.PersistJobAction; import org.elasticsearch.xpack.core.ml.action.PostCalendarEventsAction; @@ -161,6 +162,7 @@ import org.elasticsearch.xpack.core.ml.action.StopDataFrameAnalyticsAction; import org.elasticsearch.xpack.core.ml.action.StopDatafeedAction; import org.elasticsearch.xpack.core.ml.action.StopTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.action.TrainedModelCacheInfoAction; import org.elasticsearch.xpack.core.ml.action.UpdateCalendarJobAction; import org.elasticsearch.xpack.core.ml.action.UpdateDataFrameAnalyticsAction; import org.elasticsearch.xpack.core.ml.action.UpdateDatafeedAction; @@ -231,6 +233,7 @@ import org.elasticsearch.xpack.ml.action.TransportIsolateDatafeedAction; import org.elasticsearch.xpack.ml.action.TransportKillProcessAction; import org.elasticsearch.xpack.ml.action.TransportMlInfoAction; +import org.elasticsearch.xpack.ml.action.TransportMlMemoryAction; import org.elasticsearch.xpack.ml.action.TransportOpenJobAction; import org.elasticsearch.xpack.ml.action.TransportPersistJobAction; import org.elasticsearch.xpack.ml.action.TransportPostCalendarEventsAction; @@ -256,6 +259,7 @@ import org.elasticsearch.xpack.ml.action.TransportStopDataFrameAnalyticsAction; import org.elasticsearch.xpack.ml.action.TransportStopDatafeedAction; import org.elasticsearch.xpack.ml.action.TransportStopTrainedModelDeploymentAction; +import org.elasticsearch.xpack.ml.action.TransportTrainedModelCacheInfoAction; import org.elasticsearch.xpack.ml.action.TransportUpdateCalendarJobAction; import org.elasticsearch.xpack.ml.action.TransportUpdateDataFrameAnalyticsAction; import org.elasticsearch.xpack.ml.action.TransportUpdateDatafeedAction; @@ -342,6 +346,7 @@ import org.elasticsearch.xpack.ml.process.NativeStorageProvider; import org.elasticsearch.xpack.ml.rest.RestDeleteExpiredDataAction; import org.elasticsearch.xpack.ml.rest.RestMlInfoAction; +import org.elasticsearch.xpack.ml.rest.RestMlMemoryAction; import org.elasticsearch.xpack.ml.rest.RestSetUpgradeModeAction; import org.elasticsearch.xpack.ml.rest.calendar.RestDeleteCalendarAction; import org.elasticsearch.xpack.ml.rest.calendar.RestDeleteCalendarEventAction; @@ -786,7 +791,7 @@ public Collection createComponents( anomalyDetectionAuditor ); JobConfigProvider jobConfigProvider = new JobConfigProvider(client, xContentRegistry); - DatafeedConfigProvider datafeedConfigProvider = new DatafeedConfigProvider(client, xContentRegistry); + DatafeedConfigProvider datafeedConfigProvider = new DatafeedConfigProvider(client, xContentRegistry, clusterService); this.datafeedConfigProvider.set(datafeedConfigProvider); UpdateJobProcessNotifier notifier = new UpdateJobProcessNotifier(client, clusterService, threadPool); JobManager jobManager = new JobManager( @@ -969,7 +974,8 @@ public Collection createComponents( DataFrameAnalyticsConfigProvider dataFrameAnalyticsConfigProvider = new DataFrameAnalyticsConfigProvider( client, xContentRegistry, - dataFrameAnalyticsAuditor + dataFrameAnalyticsAuditor, + clusterService ); assert client instanceof NodeClient; DataFrameAnalyticsManager dataFrameAnalyticsManager = new DataFrameAnalyticsManager( @@ -1136,6 +1142,7 @@ public List getRestHandlers( new RestGetJobsAction(), new RestGetJobStatsAction(), new RestMlInfoAction(), + new RestMlMemoryAction(), new RestPutJobAction(), new RestPostJobUpdateAction(), new RestDeleteJobAction(), @@ -1222,6 +1229,7 @@ public List getRestHandlers( new ActionHandler<>(GetJobsAction.INSTANCE, TransportGetJobsAction.class), new ActionHandler<>(GetJobsStatsAction.INSTANCE, TransportGetJobsStatsAction.class), new ActionHandler<>(MlInfoAction.INSTANCE, TransportMlInfoAction.class), + new ActionHandler<>(MlMemoryAction.INSTANCE, TransportMlMemoryAction.class), new ActionHandler<>(PutJobAction.INSTANCE, TransportPutJobAction.class), new ActionHandler<>(UpdateJobAction.INSTANCE, TransportUpdateJobAction.class), new ActionHandler<>(DeleteJobAction.INSTANCE, TransportDeleteJobAction.class), @@ -1280,6 +1288,7 @@ public List getRestHandlers( new ActionHandler<>(EvaluateDataFrameAction.INSTANCE, TransportEvaluateDataFrameAction.class), new ActionHandler<>(ExplainDataFrameAnalyticsAction.INSTANCE, TransportExplainDataFrameAnalyticsAction.class), new ActionHandler<>(InternalInferModelAction.INSTANCE, TransportInternalInferModelAction.class), + new ActionHandler<>(TrainedModelCacheInfoAction.INSTANCE, TransportTrainedModelCacheInfoAction.class), new ActionHandler<>(GetTrainedModelsAction.INSTANCE, TransportGetTrainedModelsAction.class), new ActionHandler<>(DeleteTrainedModelAction.INSTANCE, TransportDeleteTrainedModelAction.class), new ActionHandler<>(GetTrainedModelsStatsAction.INSTANCE, TransportGetTrainedModelsStatsAction.class), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java index 777b0c2c9ad3c..e4dfd64fd8dea 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ml.MachineLearningField; @@ -48,7 +49,6 @@ import java.util.Objects; import java.util.Optional; -import static org.elasticsearch.xpack.core.ClientHelper.filterSecurityHeaders; import static org.elasticsearch.xpack.ml.utils.SecondaryAuthorizationUtils.useSecondaryAuthIfAvailable; /** @@ -134,7 +134,7 @@ private void explain( // Set the auth headers (preferring the secondary headers) to the caller's. // Regardless if the config was previously stored or not. DataFrameAnalyticsConfig config = new DataFrameAnalyticsConfig.Builder(request.getConfig()).setHeaders( - filterSecurityHeaders(threadPool.getThreadContext().getHeaders()) + ClientHelper.getPersistableSafeSecurityHeaders(threadPool.getThreadContext(), clusterService.state()) ).build(); extractedFieldsDetectorFactory.createFromSource( config, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlMemoryAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlMemoryAction.java new file mode 100644 index 0000000000000..08c6361c09408 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlMemoryAction.java @@ -0,0 +1,231 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.monitor.os.OsStats; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ml.action.MlMemoryAction; +import org.elasticsearch.xpack.core.ml.action.MlMemoryAction.Response.MlMemoryStats; +import org.elasticsearch.xpack.core.ml.action.TrainedModelCacheInfoAction; +import org.elasticsearch.xpack.core.ml.action.TrainedModelCacheInfoAction.Response.CacheInfo; +import org.elasticsearch.xpack.ml.job.NodeLoad; +import org.elasticsearch.xpack.ml.job.NodeLoadDetector; +import org.elasticsearch.xpack.ml.process.MlMemoryTracker; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ml.MachineLearning.MAX_MACHINE_MEMORY_PERCENT; +import static org.elasticsearch.xpack.ml.MachineLearning.MAX_OPEN_JOBS_PER_NODE; +import static org.elasticsearch.xpack.ml.MachineLearning.USE_AUTO_MACHINE_MEMORY_PERCENT; + +public class TransportMlMemoryAction extends TransportMasterNodeAction { + + private final Client client; + private final MlMemoryTracker memoryTracker; + + @Inject + public TransportMlMemoryAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + Client client, + MlMemoryTracker memoryTracker + ) { + super( + MlMemoryAction.NAME, + transportService, + clusterService, + threadPool, + actionFilters, + MlMemoryAction.Request::new, + indexNameExpressionResolver, + MlMemoryAction.Response::new, + ThreadPool.Names.SAME + ); + this.client = new OriginSettingClient(client, ML_ORIGIN); + this.memoryTracker = memoryTracker; + } + + @Override + protected void masterOperation( + Task task, + MlMemoryAction.Request request, + ClusterState state, + ActionListener listener + ) throws Exception { + + ClusterSettings clusterSettings = clusterService.getClusterSettings(); + + // Resolve the node specification to some concrete nodes + String[] nodeIds = state.nodes().resolveNodes(request.getNodeId()); + + ParentTaskAssigningClient parentTaskClient = new ParentTaskAssigningClient(client, task.getParentTaskId()); + + ActionListener nodeStatsListener = ActionListener.wrap(nodesStatsResponse -> { + TrainedModelCacheInfoAction.Request trainedModelCacheInfoRequest = new TrainedModelCacheInfoAction.Request( + nodesStatsResponse.getNodes().stream().map(NodeStats::getNode).toArray(DiscoveryNode[]::new) + ).timeout(request.timeout()); + + parentTaskClient.execute( + TrainedModelCacheInfoAction.INSTANCE, + trainedModelCacheInfoRequest, + ActionListener.wrap( + trainedModelCacheInfoResponse -> handleResponses( + state, + clusterSettings, + nodesStatsResponse, + trainedModelCacheInfoResponse, + listener + ), + listener::onFailure + ) + ); + }, listener::onFailure); + + // Next get node stats related to the OS and JVM + ActionListener memoryTrackerRefreshListener = ActionListener.wrap( + r -> parentTaskClient.admin() + .cluster() + .prepareNodesStats(nodeIds) + .clear() + .setOs(true) + .setJvm(true) + .setTimeout(request.timeout()) + .execute(nodeStatsListener), + listener::onFailure + ); + + // If the memory tracker has never been refreshed, do that first + if (memoryTracker.isEverRefreshed()) { + memoryTrackerRefreshListener.onResponse(null); + } else { + memoryTracker.refresh(state.getMetadata().custom(PersistentTasksCustomMetadata.TYPE), memoryTrackerRefreshListener); + } + } + + void handleResponses( + ClusterState state, + ClusterSettings clusterSettings, + NodesStatsResponse nodesStatsResponse, + TrainedModelCacheInfoAction.Response trainedModelCacheInfoResponse, + ActionListener listener + ) { + List nodeResponses = new ArrayList<>(nodesStatsResponse.getNodes().size()); + + int maxOpenJobsPerNode = clusterSettings.get(MAX_OPEN_JOBS_PER_NODE); + int maxMachineMemoryPercent = clusterSettings.get(MAX_MACHINE_MEMORY_PERCENT); + boolean useAutoMachineMemoryPercent = clusterSettings.get(USE_AUTO_MACHINE_MEMORY_PERCENT); + NodeLoadDetector nodeLoadDetector = new NodeLoadDetector(memoryTracker); + Map cacheInfoByNode = trainedModelCacheInfoResponse.getNodesMap(); + List failures = new ArrayList<>(nodesStatsResponse.failures()); + + for (NodeStats nodeStats : nodesStatsResponse.getNodes()) { + DiscoveryNode node = nodeStats.getNode(); + String nodeId = node.getId(); + // We only provide a response if both requests we issued to all nodes returned. + // The loop is iterating successes of the node stats call with failures already + // accumulated. This check adds failures of the trained model cache call that + // happened on nodes where the node stats call succeeded. + Optional trainedModelCacheInfoFailure = trainedModelCacheInfoResponse.failures() + .stream() + .filter(e -> nodeId.equals(e.nodeId())) + .findFirst(); + if (trainedModelCacheInfoFailure.isPresent()) { + failures.add(trainedModelCacheInfoFailure.get()); + continue; + } + OsStats.Mem mem = nodeStats.getOs().getMem(); + ByteSizeValue mlMax; + ByteSizeValue mlNativeCodeOverhead; + ByteSizeValue mlAnomalyDetectors; + ByteSizeValue mlDataFrameAnalytics; + ByteSizeValue mlNativeInference; + if (node.getRoles().contains(DiscoveryNodeRole.ML_ROLE)) { + NodeLoad nodeLoad = nodeLoadDetector.detectNodeLoad( + state, + node, + maxOpenJobsPerNode, + maxMachineMemoryPercent, + useAutoMachineMemoryPercent + ); + mlMax = ByteSizeValue.ofBytes(nodeLoad.getMaxMlMemory()); + mlNativeCodeOverhead = ByteSizeValue.ofBytes(nodeLoad.getAssignedNativeCodeOverheadMemory()); + mlAnomalyDetectors = ByteSizeValue.ofBytes(nodeLoad.getAssignedAnomalyDetectorMemory()); + mlDataFrameAnalytics = ByteSizeValue.ofBytes(nodeLoad.getAssignedDataFrameAnalyticsMemory()); + mlNativeInference = ByteSizeValue.ofBytes(nodeLoad.getAssignedNativeInferenceMemory()); + } else { + mlMax = ByteSizeValue.ZERO; + mlNativeCodeOverhead = ByteSizeValue.ZERO; + mlAnomalyDetectors = ByteSizeValue.ZERO; + mlDataFrameAnalytics = ByteSizeValue.ZERO; + mlNativeInference = ByteSizeValue.ZERO; + } + ByteSizeValue jvmHeapMax = nodeStats.getJvm().getMem().getHeapMax(); + ByteSizeValue jvmInferenceMax; + ByteSizeValue jvmInference; + CacheInfo cacheInfoForNode = cacheInfoByNode.get(nodeId); + if (cacheInfoForNode != null) { + jvmInferenceMax = cacheInfoForNode.getJvmInferenceMax(); + jvmInference = cacheInfoForNode.getJvmInference(); + } else { + jvmInferenceMax = ByteSizeValue.ZERO; + jvmInference = ByteSizeValue.ZERO; + } + nodeResponses.add( + new MlMemoryStats( + node, + mem.getTotal(), + mem.getAdjustedTotal(), + mlMax, + mlNativeCodeOverhead, + mlAnomalyDetectors, + mlDataFrameAnalytics, + mlNativeInference, + jvmHeapMax, + jvmInferenceMax, + jvmInference + ) + ); + } + + listener.onResponse(new MlMemoryAction.Response(state.getClusterName(), nodeResponses, failures)); + } + + @Override + protected ClusterBlockException checkBlock(MlMemoryAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java index 6634d6c1a4425..80cd62b4831e4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.LicenseUtils; @@ -18,6 +19,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ml.MachineLearningField; @@ -37,7 +39,6 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; -import static org.elasticsearch.xpack.core.ClientHelper.filterSecurityHeaders; import static org.elasticsearch.xpack.ml.utils.SecondaryAuthorizationUtils.useSecondaryAuthIfAvailable; /** @@ -50,6 +51,7 @@ public class TransportPreviewDataFrameAnalyticsAction extends HandledTransportAc private final SecurityContext securityContext; private final ThreadPool threadPool; private final Settings settings; + private final ClusterService clusterService; @Inject public TransportPreviewDataFrameAnalyticsAction( @@ -58,7 +60,8 @@ public TransportPreviewDataFrameAnalyticsAction( NodeClient client, XPackLicenseState licenseState, Settings settings, - ThreadPool threadPool + ThreadPool threadPool, + ClusterService clusterService ) { super(PreviewDataFrameAnalyticsAction.NAME, transportService, actionFilters, Request::new); this.client = Objects.requireNonNull(client); @@ -68,6 +71,7 @@ public TransportPreviewDataFrameAnalyticsAction( this.securityContext = XPackSettings.SECURITY_ENABLED.get(settings) ? new SecurityContext(settings, threadPool.getThreadContext()) : null; + this.clusterService = clusterService; } private static Map mergeRow(DataFrameDataExtractor.Row row, List fieldNames) { @@ -87,7 +91,7 @@ protected void doExecute(Task task, Request request, ActionListener li // Set the auth headers (preferring the secondary headers) to the caller's. // Regardless if the config was previously stored or not. DataFrameAnalyticsConfig config = new DataFrameAnalyticsConfig.Builder(request.getConfig()).setHeaders( - filterSecurityHeaders(threadPool.getThreadContext().getHeaders()) + ClientHelper.getPersistableSafeSecurityHeaders(threadPool.getThreadContext(), clusterService.state()) ).build(); preview(task, config, listener); }); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java index 197e3e9aa3027..987ef5523218a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java @@ -24,6 +24,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ml.action.PreviewDatafeedAction; import org.elasticsearch.xpack.core.ml.datafeed.ChunkingConfig; @@ -48,7 +49,6 @@ import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeWithHeadersAsync; -import static org.elasticsearch.xpack.core.ClientHelper.filterSecurityHeaders; import static org.elasticsearch.xpack.ml.utils.SecondaryAuthorizationUtils.useSecondaryAuthIfAvailable; public class TransportPreviewDatafeedAction extends HandledTransportAction { @@ -115,7 +115,9 @@ private void previewDatafeed( ) { DatafeedConfig.Builder previewDatafeedBuilder = buildPreviewDatafeed(datafeedConfig); useSecondaryAuthIfAvailable(securityContext, () -> { - previewDatafeedBuilder.setHeaders(filterSecurityHeaders(threadPool.getThreadContext().getHeaders())); + previewDatafeedBuilder.setHeaders( + ClientHelper.getPersistableSafeSecurityHeaders(threadPool.getThreadContext(), clusterService.state()) + ); // NB: this is using the client from the transport layer, NOT the internal client. // This is important because it means the datafeed search will fail if the user // requesting the preview doesn't have permission to search the relevant indices. diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportTrainedModelCacheInfoAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportTrainedModelCacheInfoAction.java new file mode 100644 index 0000000000000..832fb72010bcf --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportTrainedModelCacheInfoAction.java @@ -0,0 +1,115 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.nodes.TransportNodesAction; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ml.action.TrainedModelCacheInfoAction; +import org.elasticsearch.xpack.core.ml.action.TrainedModelCacheInfoAction.Response.CacheInfo; +import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +public class TransportTrainedModelCacheInfoAction extends TransportNodesAction< + TrainedModelCacheInfoAction.Request, + TrainedModelCacheInfoAction.Response, + TransportTrainedModelCacheInfoAction.NodeModelCacheInfoRequest, + CacheInfo> { + + private final ModelLoadingService modelLoadingService; + + @Inject + public TransportTrainedModelCacheInfoAction( + ThreadPool threadPool, + ClusterService clusterService, + TransportService transportService, + ActionFilters actionFilters, + ModelLoadingService modelLoadingService + ) { + super( + TrainedModelCacheInfoAction.NAME, + threadPool, + clusterService, + transportService, + actionFilters, + TrainedModelCacheInfoAction.Request::new, + NodeModelCacheInfoRequest::new, + ThreadPool.Names.MANAGEMENT, + CacheInfo.class + ); + this.modelLoadingService = modelLoadingService; + } + + @Override + protected TrainedModelCacheInfoAction.Response newResponse( + TrainedModelCacheInfoAction.Request request, + List responses, + List failures + ) { + return new TrainedModelCacheInfoAction.Response(clusterService.getClusterName(), responses, failures); + } + + @Override + protected NodeModelCacheInfoRequest newNodeRequest(TrainedModelCacheInfoAction.Request request) { + return new NodeModelCacheInfoRequest(request); + } + + @Override + protected CacheInfo newNodeResponse(StreamInput in, DiscoveryNode node) throws IOException { + return new CacheInfo(in); + } + + @Override + protected CacheInfo nodeOperation(NodeModelCacheInfoRequest nodeModelCacheInfoRequest, Task task) { + assert task instanceof CancellableTask; + return new CacheInfo( + transportService.getLocalNode(), + modelLoadingService.getMaxCacheSize(), + modelLoadingService.getCurrentCacheSize() + ); + } + + public static class NodeModelCacheInfoRequest extends TransportRequest { + + TrainedModelCacheInfoAction.Request request; + + public NodeModelCacheInfoRequest(StreamInput in) throws IOException { + super(in); + request = new TrainedModelCacheInfoAction.Request(in); + } + + NodeModelCacheInfoRequest(TrainedModelCacheInfoAction.Request request) { + this.request = request; + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, "", parentTaskId, headers); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + request.writeTo(out); + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregationBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregationBuilder.java index 6608bc32a4536..10eb7311ba321 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregationBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregationBuilder.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.aggs.correlation; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.plugins.SearchPlugin; @@ -142,4 +143,9 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(super.hashCode(), correlationFunction); } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_14_0; + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java index 5ff742f366aa9..44a308519dd3b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.aggs.inference; import org.apache.lucene.util.SetOnce; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.Strings; @@ -376,4 +377,9 @@ public boolean equals(Object obj) { && Objects.equals(modelId, other.modelId) && Objects.equals(inferenceConfig, other.inferenceConfig); } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_9_0; + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregationBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregationBuilder.java index 4c6623eb55c6b..eb6cc48a31635 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregationBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregationBuilder.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.aggs.kstest; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -190,4 +191,9 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(super.hashCode(), Arrays.hashCode(fractions), alternative, samplingMethod); } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_14_0; + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java index 34db2e7d039d8..c225e08639a05 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java @@ -214,7 +214,7 @@ static Optional>> determineUnassignab if (nodeLoad.getFreeMemory() >= requiredMemory) { assignmentIter.remove(); // Remove and add to the priority queue to make sure the biggest node with availability is first - mostFreeMemoryFirst.add(mostFreeMemoryFirst.poll().incNumAssignedJobs().incAssignedJobMemory(requiredMemory)); + mostFreeMemoryFirst.add(mostFreeMemoryFirst.poll().incNumAssignedJobs().incAssignedAnomalyDetectorMemory(requiredMemory)); } } List adjustedLoads = mostFreeMemoryFirst.stream().map(NodeLoad.Builder::build).collect(Collectors.toList()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java index 0f7bc46d9a7d0..1925e1975e827 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.regex.Regex; @@ -46,6 +47,7 @@ import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.action.util.ExpandedIdsMatcher; import org.elasticsearch.xpack.core.ml.MlConfigIndex; import org.elasticsearch.xpack.core.ml.MlTasks; @@ -71,7 +73,6 @@ import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; -import static org.elasticsearch.xpack.core.ClientHelper.filterSecurityHeaders; /** * This class implements CRUD operation for the @@ -87,12 +88,14 @@ public class DatafeedConfigProvider { private static final Logger logger = LogManager.getLogger(DatafeedConfigProvider.class); private final Client client; private final NamedXContentRegistry xContentRegistry; + private final ClusterService clusterService; public static final Map TO_XCONTENT_PARAMS = Map.of(ToXContentParams.FOR_INTERNAL_STORAGE, "true"); - public DatafeedConfigProvider(Client client, NamedXContentRegistry xContentRegistry) { + public DatafeedConfigProvider(Client client, NamedXContentRegistry xContentRegistry, ClusterService clusterService) { this.client = client; this.xContentRegistry = xContentRegistry; + this.clusterService = clusterService; } /** @@ -107,7 +110,9 @@ public void putDatafeedConfig(DatafeedConfig config, Map headers if (headers.isEmpty() == false) { // Filter any values in headers that aren't security fields - config = new DatafeedConfig.Builder(config).setHeaders(filterSecurityHeaders(headers)).build(); + config = new DatafeedConfig.Builder(config).setHeaders( + ClientHelper.getPersistableSafeSecurityHeaders(headers, clusterService.state()) + ).build(); } final String datafeedId = config.getId(); @@ -299,7 +304,7 @@ public void onResponse(GetResponse getResponse) { DatafeedConfig updatedConfig; try { - updatedConfig = update.apply(configBuilder.build(), headers); + updatedConfig = update.apply(configBuilder.build(), headers, clusterService.state()); } catch (Exception e) { delegate.onFailure(e); return; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java index 78f9885351f2c..1ab486cb061d0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.core.Nullable; @@ -41,6 +42,7 @@ import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.ml.MlConfigIndex; import org.elasticsearch.xpack.core.ml.MlTasks; @@ -66,7 +68,6 @@ import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; -import static org.elasticsearch.xpack.core.ClientHelper.filterSecurityHeaders; public class DataFrameAnalyticsConfigProvider { @@ -79,11 +80,18 @@ public class DataFrameAnalyticsConfigProvider { private final Client client; private final NamedXContentRegistry xContentRegistry; private final DataFrameAnalyticsAuditor auditor; + private final ClusterService clusterService; - public DataFrameAnalyticsConfigProvider(Client client, NamedXContentRegistry xContentRegistry, DataFrameAnalyticsAuditor auditor) { + public DataFrameAnalyticsConfigProvider( + Client client, + NamedXContentRegistry xContentRegistry, + DataFrameAnalyticsAuditor auditor, + ClusterService clusterService + ) { this.client = Objects.requireNonNull(client); this.xContentRegistry = xContentRegistry; this.auditor = Objects.requireNonNull(auditor); + this.clusterService = clusterService; } /** @@ -113,7 +121,11 @@ public void put( } private DataFrameAnalyticsConfig prepareConfigForIndex(DataFrameAnalyticsConfig config, Map headers) { - return headers.isEmpty() ? config : new DataFrameAnalyticsConfig.Builder(config).setHeaders(filterSecurityHeaders(headers)).build(); + return headers.isEmpty() + ? config + : new DataFrameAnalyticsConfig.Builder(config).setHeaders( + ClientHelper.getPersistableSafeSecurityHeaders(headers, clusterService.state()) + ).build(); } private void exists(String jobId, ActionListener listener) { @@ -183,7 +195,7 @@ public void update( // Merge the original config with the given update object DataFrameAnalyticsConfig.Builder updatedConfigBuilder = update.mergeWithConfig(originalConfig); if (headers.isEmpty() == false) { - updatedConfigBuilder.setHeaders(filterSecurityHeaders(headers)); + updatedConfigBuilder.setHeaders(ClientHelper.getPersistableSafeSecurityHeaders(headers, clusterService.state())); } DataFrameAnalyticsConfig updatedConfig = updatedConfigBuilder.build(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java index b49b2a950dcce..a8f744d7181e7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java @@ -29,6 +29,7 @@ import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; @@ -201,7 +202,11 @@ Vocabulary parseVocabularyDocLeniently(SearchHit hit) throws IOException { try ( InputStream stream = hit.getSourceRef().streamInput(); XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, stream) + .createParser( + XContentParserConfiguration.EMPTY.withRegistry(xContentRegistry) + .withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), + stream + ) ) { return Vocabulary.createParser(true).apply(parser, null); } catch (IOException e) { @@ -374,8 +379,8 @@ protected void doRun() throws Exception { NlpConfig nlpConfig = (NlpConfig) config; NlpTask.Request request = processor.getRequestBuilder(nlpConfig) .buildRequest(text, requestIdStr, nlpConfig.getTokenization().getTruncate()); - logger.debug(() -> "Inference Request " + request.processInput.utf8ToString()); - if (request.tokenization.anyTruncated()) { + logger.debug(() -> "Inference Request " + request.processInput().utf8ToString()); + if (request.tokenization().anyTruncated()) { logger.debug("[{}] [{}] input truncated", modelId, requestId); } processContext.getResultProcessor() @@ -385,14 +390,14 @@ protected void doRun() throws Exception { inferenceResult -> processResult( inferenceResult, processContext, - request.tokenization, + request.tokenization(), processor.getResultProcessor((NlpConfig) config), this ), this::onFailure ) ); - processContext.process.get().writeInferenceRequest(request.processInput); + processContext.process.get().writeInferenceRequest(request.processInput()); } catch (IOException e) { logger.error(new ParameterizedMessage("[{}] error writing to inference process", processContext.task.getModelId()), e); onFailure(ExceptionsHelper.serverError("Error writing to inference process", e)); @@ -448,8 +453,8 @@ class ProcessContext { private volatile Instant startTime; private volatile Integer inferenceThreads; private volatile Integer modelThreads; - private AtomicInteger rejectedExecutionCount = new AtomicInteger(); - private AtomicInteger timeoutCount = new AtomicInteger(); + private final AtomicInteger rejectedExecutionCount = new AtomicInteger(); + private final AtomicInteger timeoutCount = new AtomicInteger(); ProcessContext(TrainedModelDeploymentTask task, ExecutorService executorService) { this.task = Objects.requireNonNull(task); @@ -478,15 +483,19 @@ synchronized void startProcess() { synchronized void stopProcess() { resultProcessor.stop(); executorService.shutdown(); - if (process.get() == null) { - return; - } try { + if (process.get() == null) { + return; + } stateStreamer.cancel(); process.get().kill(true); processContextByAllocation.remove(task.getId()); } catch (IOException e) { logger.error(new ParameterizedMessage("[{}] Failed to kill process", task.getModelId()), e); + } finally { + if (nlpTaskProcessor.get() != null) { + nlpTaskProcessor.get().close(); + } } } @@ -496,6 +505,9 @@ private Consumer onProcessCrash() { resultProcessor.stop(); executorService.shutdownWithError(new IllegalStateException(reason)); processContextByAllocation.remove(task.getId()); + if (nlpTaskProcessor.get() != null) { + nlpTaskProcessor.get().close(); + } task.setFailed("inference process crashed due to reason [" + reason + "]"); }; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java index 7dcdac32e4e68..920e01e6ba972 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java @@ -181,6 +181,20 @@ boolean isModelCached(String modelId) { return localModelCache.get(modelAliasToId.getOrDefault(modelId, modelId)) != null; } + public ByteSizeValue getMaxCacheSize() { + return maxCacheSize; + } + + /** + * This method is intended for use in telemetry, not making decisions about what will fit in the cache. + * The value returned could immediately be out-of-date if cache changes are in progress. It is good + * enough for external reporting of vaguely up-to-date status, but not for anything related to immediate + * additions to the cache. + */ + public ByteSizeValue getCurrentCacheSize() { + return ByteSizeValue.ofBytes(localModelCache.weight()); + } + /** * Load the model for use by an ingest pipeline. The model will not be cached if there is no * ingest pipeline referencing it i.e. it is used in simulate mode diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilder.java deleted file mode 100644 index 88a6b6b8739f4..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilder.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.inference.nlp; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; -import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.NlpTokenizer; -import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; - -import java.io.IOException; -import java.util.List; -import java.util.stream.Collectors; - -public class BertRequestBuilder implements NlpTask.RequestBuilder { - - static final String REQUEST_ID = "request_id"; - static final String TOKENS = "tokens"; - static final String ARG1 = "arg_1"; - static final String ARG2 = "arg_2"; - static final String ARG3 = "arg_3"; - - private final NlpTokenizer tokenizer; - - public BertRequestBuilder(NlpTokenizer tokenizer) { - this.tokenizer = tokenizer; - } - - @Override - public NlpTask.Request buildRequest(List inputs, String requestId, Tokenization.Truncate truncate) throws IOException { - if (tokenizer.getPadTokenId().isEmpty()) { - throw new IllegalStateException("The input tokenizer does not have a " + tokenizer.getPadToken() + " token in its vocabulary"); - } - - TokenizationResult tokenization = tokenizer.buildTokenizationResult( - inputs.stream().map(s -> tokenizer.tokenize(s, truncate)).collect(Collectors.toList()) - ); - return buildRequest(tokenization, requestId); - } - - @Override - public NlpTask.Request buildRequest(TokenizationResult tokenization, String requestId) throws IOException { - if (tokenizer.getPadTokenId().isEmpty()) { - throw new IllegalStateException("The input tokenizer does not have a " + tokenizer.getPadToken() + " token in its vocabulary"); - } - return new NlpTask.Request(tokenization, jsonRequest(tokenization, tokenizer.getPadTokenId().getAsInt(), requestId)); - } - - static BytesReference jsonRequest(TokenizationResult tokenization, int padToken, String requestId) throws IOException { - XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.startObject(); - builder.field(REQUEST_ID, requestId); - - NlpTask.RequestBuilder.writePaddedTokens(TOKENS, tokenization, padToken, (tokens, i) -> tokens.getTokenIds()[i], builder); - NlpTask.RequestBuilder.writePaddedTokens(ARG1, tokenization, padToken, (tokens, i) -> 1, builder); - int batchSize = tokenization.getTokenizations().size(); - NlpTask.RequestBuilder.writeNonPaddedArguments(ARG2, batchSize, tokenization.getLongestSequenceLength(), i -> 0, builder); - NlpTask.RequestBuilder.writeNonPaddedArguments(ARG3, batchSize, tokenization.getLongestSequenceLength(), i -> i, builder); - builder.endObject(); - - // BytesReference.bytes closes the builder - return BytesReference.bytes(builder); - } - -} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java index 5343cd94e141d..2ce2fc1ea471d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java @@ -23,15 +23,14 @@ import java.util.ArrayList; import java.util.List; import java.util.Optional; +import java.util.OptionalInt; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; -public class FillMaskProcessor implements NlpTask.Processor { - - private final NlpTokenizer tokenizer; +public class FillMaskProcessor extends NlpTask.Processor { FillMaskProcessor(NlpTokenizer tokenizer, FillMaskConfig config) { - this.tokenizer = tokenizer; + super(tokenizer); } @Override @@ -92,7 +91,7 @@ static InferenceResults processResult( int numResults, String resultsField ) { - if (tokenization.getTokenizations().isEmpty() || tokenization.getTokenizations().get(0).getTokenIds().length == 0) { + if (tokenization.isEmpty()) { throw new ElasticsearchStatusException("tokenization is empty", RestStatus.INTERNAL_SERVER_ERROR); } @@ -103,25 +102,20 @@ static InferenceResults processResult( ); } - int maskTokenIndex = -1; int maskTokenId = tokenizer.getMaskTokenId().getAsInt(); - for (int i = 0; i < tokenization.getTokenizations().get(0).getTokenIds().length; i++) { - if (tokenization.getTokenizations().get(0).getTokenIds()[i] == maskTokenId) { - maskTokenIndex = i; - break; - } - } - if (maskTokenIndex == -1) { + OptionalInt maskTokenIndex = tokenization.getTokenization(0).getTokenIndex(maskTokenId); + if (maskTokenIndex.isEmpty()) { throw new ElasticsearchStatusException( - "mask token id [{}] not found in the tokenization {}", + "mask token id [{}] not found in the tokenization", RestStatus.INTERNAL_SERVER_ERROR, - maskTokenId, - List.of(tokenization.getTokenizations().get(0).getTokenIds()) + maskTokenId ); } // TODO - process all results in the batch - double[] normalizedScores = NlpHelpers.convertToProbabilitiesBySoftMax(pyTorchResult.getInferenceResult()[0][maskTokenIndex]); + double[] normalizedScores = NlpHelpers.convertToProbabilitiesBySoftMax( + pyTorchResult.getInferenceResult()[0][maskTokenIndex.getAsInt()] + ); NlpHelpers.ScoreAndIndex[] scoreAndIndices = NlpHelpers.topK( // We need at least one to record the result @@ -137,10 +131,7 @@ static InferenceResults processResult( } return new FillMaskResults( tokenization.getFromVocab(scoreAndIndices[0].index), - tokenization.getTokenizations() - .get(0) - .getInput() - .replace(tokenizer.getMaskToken(), tokenization.getFromVocab(scoreAndIndices[0].index)), + tokenization.getTokenization(0).input().replace(tokenizer.getMaskToken(), tokenization.getFromVocab(scoreAndIndices[0].index)), results, Optional.ofNullable(resultsField).orElse(DEFAULT_RESULTS_FIELD), scoreAndIndices[0].score, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilder.java deleted file mode 100644 index f8ea5a513aa76..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilder.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.inference.nlp; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; -import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.NlpTokenizer; -import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; - -import java.io.IOException; -import java.util.List; -import java.util.stream.Collectors; - -public class MPNetRequestBuilder implements NlpTask.RequestBuilder { - - static final String REQUEST_ID = "request_id"; - static final String TOKENS = "tokens"; - static final String ARG1 = "arg_1"; - - private final NlpTokenizer tokenizer; - - public MPNetRequestBuilder(NlpTokenizer tokenizer) { - this.tokenizer = tokenizer; - } - - @Override - public NlpTask.Request buildRequest(List inputs, String requestId, Tokenization.Truncate truncate) throws IOException { - if (tokenizer.getPadTokenId().isEmpty()) { - throw new IllegalStateException("The input tokenizer does not have a " + tokenizer.getPadToken() + " token in its vocabulary"); - } - - TokenizationResult tokenization = tokenizer.buildTokenizationResult( - inputs.stream().map(s -> tokenizer.tokenize(s, truncate)).collect(Collectors.toList()) - ); - return buildRequest(tokenization, requestId); - } - - @Override - public NlpTask.Request buildRequest(TokenizationResult tokenization, String requestId) throws IOException { - if (tokenizer.getPadTokenId().isEmpty()) { - throw new IllegalStateException("The input tokenizer does not have a " + tokenizer.getPadToken() + " token in its vocabulary"); - } - return new NlpTask.Request(tokenization, jsonRequest(tokenization, tokenizer.getPadTokenId().getAsInt(), requestId)); - } - - static BytesReference jsonRequest(TokenizationResult tokenization, int padToken, String requestId) throws IOException { - XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.startObject(); - builder.field(REQUEST_ID, requestId); - - NlpTask.RequestBuilder.writePaddedTokens(TOKENS, tokenization, padToken, (tokens, i) -> tokens.getTokenIds()[i], builder); - NlpTask.RequestBuilder.writePaddedTokens(ARG1, tokenization, padToken, (tokens, i) -> 1, builder); - builder.endObject(); - - // BytesReference.bytes closes the builder - return BytesReference.bytes(builder); - } - -} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java index ac0395ce31b48..e8c7253d3c5d2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java @@ -32,7 +32,7 @@ import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; -public class NerProcessor implements NlpTask.Processor { +public class NerProcessor extends NlpTask.Processor { public enum Entity implements Writeable { NONE, @@ -85,6 +85,7 @@ boolean isBeginning() { private final boolean ignoreCase; NerProcessor(NlpTokenizer tokenizer, NerConfig config) { + super(tokenizer); validate(config.getClassificationLabels()); this.iobMap = buildIobMap(config.getClassificationLabels()); this.requestBuilder = tokenizer.requestBuilder(); @@ -181,11 +182,7 @@ static String buildAnnotatedText(String seq, List entiti return annotatedResultBuilder.toString(); } - static class NerResultProcessor implements NlpTask.ResultProcessor { - private final IobTag[] iobMap; - private final String resultsField; - private final boolean ignoreCase; - + record NerResultProcessor(IobTag[] iobMap, String resultsField, boolean ignoreCase) implements NlpTask.ResultProcessor { NerResultProcessor(IobTag[] iobMap, String resultsField, boolean ignoreCase) { this.iobMap = iobMap; this.resultsField = Optional.ofNullable(resultsField).orElse(DEFAULT_RESULTS_FIELD); @@ -194,7 +191,7 @@ static class NerResultProcessor implements NlpTask.ResultProcessor { @Override public InferenceResults processResult(TokenizationResult tokenization, PyTorchInferenceResult pyTorchResult) { - if (tokenization.getTokenizations().isEmpty() || tokenization.getTokenizations().get(0).getTokenIds().length == 0) { + if (tokenization.isEmpty()) { throw new ElasticsearchStatusException("no valid tokenization to build result", RestStatus.INTERNAL_SERVER_ERROR); } // TODO - process all results in the batch @@ -206,18 +203,16 @@ public InferenceResults processResult(TokenizationResult tokenization, PyTorchIn // of maybe (1 + 0) / 2 = 0.5 while before softmax it'd be exp(10 - 5) / normalization // which could easily be close to 1. double[][] normalizedScores = NlpHelpers.convertToProbabilitiesBySoftMax(pyTorchResult.getInferenceResult()[0]); - List taggedTokens = tagTokens(tokenization.getTokenizations().get(0), normalizedScores, iobMap); + List taggedTokens = tagTokens(tokenization.getTokenization(0), normalizedScores, iobMap); List entities = groupTaggedTokens( taggedTokens, - ignoreCase - ? tokenization.getTokenizations().get(0).getInput().toLowerCase(Locale.ROOT) - : tokenization.getTokenizations().get(0).getInput() + ignoreCase ? tokenization.getTokenization(0).input().toLowerCase(Locale.ROOT) : tokenization.getTokenization(0).input() ); return new NerResults( resultsField, - buildAnnotatedText(tokenization.getTokenizations().get(0).getInput(), entities), + buildAnnotatedText(tokenization.getTokenization(0).input(), entities), entities, tokenization.anyTruncated() ); @@ -229,19 +224,20 @@ public InferenceResults processResult(TokenizationResult tokenization, PyTorchIn * in the original input replacing them with a single token that * gets labelled based on the average score of all its sub-tokens. */ - static List tagTokens(TokenizationResult.Tokenization tokenization, double[][] scores, IobTag[] iobMap) { + static List tagTokens(TokenizationResult.Tokens tokenization, double[][] scores, IobTag[] iobMap) { List taggedTokens = new ArrayList<>(); int startTokenIndex = 0; - while (startTokenIndex < tokenization.getTokenIds().length) { - int inputMapping = tokenization.getTokenMap()[startTokenIndex]; + int numSpecialTokens = 0; + while (startTokenIndex < tokenization.tokenIds().length) { + int inputMapping = tokenization.tokenMap()[startTokenIndex]; if (inputMapping < 0) { // This token does not map to a token in the input (special tokens) startTokenIndex++; + numSpecialTokens++; continue; } int endTokenIndex = startTokenIndex; - while (endTokenIndex < tokenization.getTokenMap().length - 1 - && tokenization.getTokenMap()[endTokenIndex + 1] == inputMapping) { + while (endTokenIndex < tokenization.tokenMap().length - 1 && tokenization.tokenMap()[endTokenIndex + 1] == inputMapping) { endTokenIndex++; } double[] avgScores = Arrays.copyOf(scores[startTokenIndex], iobMap.length); @@ -258,7 +254,9 @@ static List tagTokens(TokenizationResult.Tokenization tokenization, } int maxScoreIndex = NlpHelpers.argmax(avgScores); double score = avgScores[maxScoreIndex]; - taggedTokens.add(new TaggedToken(tokenization.getTokens().get(inputMapping), iobMap[maxScoreIndex], score)); + taggedTokens.add( + new TaggedToken(tokenization.tokens().get(startTokenIndex - numSpecialTokens), iobMap[maxScoreIndex], score) + ); startTokenIndex = endTokenIndex + 1; } return taggedTokens; @@ -296,8 +294,8 @@ static List groupTaggedTokens(List tokens, endTokenIndex++; } - int startPos = token.token.getStartPos(); - int endPos = tokens.get(endTokenIndex - 1).token.getEndPos(); + int startPos = token.token.startOffset(); + int endPos = tokens.get(endTokenIndex - 1).token.endOffset(); String entity = inputSeq.substring(startPos, endPos); entities.add( new NerResults.EntityGroup( @@ -314,17 +312,7 @@ static List groupTaggedTokens(List tokens, return entities; } - static class TaggedToken { - private final DelimitedToken token; - private final IobTag tag; - private final double score; - - TaggedToken(DelimitedToken token, IobTag tag, double score) { - this.token = token; - this.tag = tag; - this.score = score; - } - + record TaggedToken(DelimitedToken token, IobTag tag, double score) { @Override public String toString() { return new StringBuilder("{").append("token:") diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java index a7fe77dc67e84..43fa0d8a2488d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.core.Releasable; import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; import org.elasticsearch.xpack.core.ml.inference.results.InferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig; @@ -45,78 +45,37 @@ public Processor createProcessor() throws ValidationException { } public interface RequestBuilder { - @FunctionalInterface - interface IntToIntFunction { - int applyAsInt(int value); - } - - @FunctionalInterface - interface TokenLookupFunction { - int apply(TokenizationResult.Tokenization tokenization, int index); - } - Request buildRequest(List inputs, String requestId, Tokenization.Truncate truncate) throws IOException; - - Request buildRequest(TokenizationResult tokenizationResult, String requestId) throws IOException; - - static void writePaddedTokens( - String fieldName, - TokenizationResult tokenization, - int padToken, - TokenLookupFunction generator, - XContentBuilder builder - ) throws IOException { - builder.startArray(fieldName); - for (var inputTokens : tokenization.getTokenizations()) { - builder.startArray(); - int i = 0; - for (; i < inputTokens.getTokenIds().length; i++) { - builder.value(generator.apply(inputTokens, i)); - } - - for (; i < tokenization.getLongestSequenceLength(); i++) { - builder.value(padToken); - } - builder.endArray(); - } - builder.endArray(); - } - - static void writeNonPaddedArguments( - String fieldName, - int numTokenizations, - int longestSequenceLength, - IntToIntFunction generator, - XContentBuilder builder - ) throws IOException { - builder.startArray(fieldName); - for (int i = 0; i < numTokenizations; i++) { - builder.startArray(); - for (int j = 0; j < longestSequenceLength; j++) { - builder.value(generator.applyAsInt(j)); - } - builder.endArray(); - } - builder.endArray(); - } } public interface ResultProcessor { InferenceResults processResult(TokenizationResult tokenization, PyTorchInferenceResult pyTorchResult); } - public interface Processor { + public abstract static class Processor implements Releasable { + + protected final NlpTokenizer tokenizer; + + public Processor(NlpTokenizer tokenizer) { + this.tokenizer = tokenizer; + } + + @Override + public void close() { + tokenizer.close(); + } + /** * Validate the task input string. * Throws an exception if the inputs fail validation * * @param inputs Text to validate */ - void validateInputs(List inputs); + public abstract void validateInputs(List inputs); - RequestBuilder getRequestBuilder(NlpConfig config); + public abstract RequestBuilder getRequestBuilder(NlpConfig config); - ResultProcessor getResultProcessor(NlpConfig config); + public abstract ResultProcessor getResultProcessor(NlpConfig config); } public static String extractInput(TrainedModelInput input, Map doc) { @@ -132,10 +91,7 @@ public static String extractInput(TrainedModelInput input, Map d throw ExceptionsHelper.badRequestException("Input value [{}] for field [{}] must be a string", inputValue, inputField); } - public static class Request { - public final TokenizationResult tokenization; - public final BytesReference processInput; - + public record Request(TokenizationResult tokenization, BytesReference processInput) { public Request(TokenizationResult tokenization, BytesReference processInput) { this.tokenization = Objects.requireNonNull(tokenization); this.processInput = Objects.requireNonNull(processInput); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java index d613f1fc3da19..f4859405d35b9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java @@ -24,14 +24,13 @@ * A NLP processor that directly returns the PyTorch result * without any post-processing */ -public class PassThroughProcessor implements NlpTask.Processor { +public class PassThroughProcessor extends NlpTask.Processor { private final NlpTask.RequestBuilder requestBuilder; - private final String resultsField; PassThroughProcessor(NlpTokenizer tokenizer, PassThroughConfig config) { + super(tokenizer); this.requestBuilder = tokenizer.requestBuilder(); - this.resultsField = config.getResultsField(); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java index 7f8f66123cb3c..a3dd5e619e5e6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java @@ -27,13 +27,14 @@ import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; -public class TextClassificationProcessor implements NlpTask.Processor { +public class TextClassificationProcessor extends NlpTask.Processor { private final NlpTask.RequestBuilder requestBuilder; private final String[] classLabels; private final int numTopClasses; TextClassificationProcessor(NlpTokenizer tokenizer, TextClassificationConfig config) { + super(tokenizer); this.requestBuilder = tokenizer.requestBuilder(); List classLabels = config.getClassificationLabels(); this.classLabels = classLabels.toArray(String[]::new); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java index f035519b39c41..0671235176ad2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java @@ -23,11 +23,12 @@ /** * A NLP processor that returns a single double[] output from the model. Assumes that only one tensor is returned via inference **/ -public class TextEmbeddingProcessor implements NlpTask.Processor { +public class TextEmbeddingProcessor extends NlpTask.Processor { private final NlpTask.RequestBuilder requestBuilder; TextEmbeddingProcessor(NlpTokenizer tokenizer, TextEmbeddingConfig config) { + super(tokenizer); this.requestBuilder = tokenizer.requestBuilder(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java index 26db36fcd16d4..861506606e21f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java @@ -33,9 +33,8 @@ import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; -public class ZeroShotClassificationProcessor implements NlpTask.Processor { +public class ZeroShotClassificationProcessor extends NlpTask.Processor { - private final NlpTokenizer tokenizer; private final int entailmentPos; private final int contraPos; private final String[] labels; @@ -44,7 +43,7 @@ public class ZeroShotClassificationProcessor implements NlpTask.Processor { private final String resultsField; ZeroShotClassificationProcessor(NlpTokenizer tokenizer, ZeroShotClassificationConfig config) { - this.tokenizer = tokenizer; + super(tokenizer); List lowerCased = config.getClassificationLabels() .stream() .map(s -> s.toLowerCase(Locale.ROOT)) @@ -98,51 +97,25 @@ public NlpTask.ResultProcessor getResultProcessor(NlpConfig nlpConfig) { return new ResultProcessor(entailmentPos, contraPos, labelsValue, isMultiLabelValue, resultsFieldValue); } - static class RequestBuilder implements NlpTask.RequestBuilder { - - private final NlpTokenizer tokenizer; - private final String[] labels; - private final String hypothesisTemplate; - - RequestBuilder(NlpTokenizer tokenizer, String[] labels, String hypothesisTemplate) { - this.tokenizer = tokenizer; - this.labels = labels; - this.hypothesisTemplate = hypothesisTemplate; - } + record RequestBuilder(NlpTokenizer tokenizer, String[] labels, String hypothesisTemplate) implements NlpTask.RequestBuilder { @Override public NlpTask.Request buildRequest(List inputs, String requestId, Tokenization.Truncate truncate) throws IOException { if (inputs.size() > 1) { throw ExceptionsHelper.badRequestException("Unable to do zero-shot classification on more than one text input at a time"); } - List tokenizations = new ArrayList<>(labels.length); + List tokenizations = new ArrayList<>(labels.length); for (String label : labels) { tokenizations.add(tokenizer.tokenize(inputs.get(0), LoggerMessageFormat.format(null, hypothesisTemplate, label), truncate)); } TokenizationResult result = tokenizer.buildTokenizationResult(tokenizations); - return buildRequest(result, requestId); - } - - @Override - public NlpTask.Request buildRequest(TokenizationResult tokenizationResult, String requestId) throws IOException { - return tokenizer.requestBuilder().buildRequest(tokenizationResult, requestId); + return result.buildRequest(requestId, truncate); } } - static class ResultProcessor implements NlpTask.ResultProcessor { - private final int entailmentPos; - private final int contraPos; - private final String[] labels; - private final boolean isMultiLabel; - private final String resultsField; - - ResultProcessor(int entailmentPos, int contraPos, String[] labels, boolean isMultiLabel, String resultsField) { - this.entailmentPos = entailmentPos; - this.contraPos = contraPos; - this.labels = labels; - this.isMultiLabel = isMultiLabel; - this.resultsField = resultsField; - } + record ResultProcessor(int entailmentPos, int contraPos, String[] labels, boolean isMultiLabel, String resultsField) + implements + NlpTask.ResultProcessor { @Override public InferenceResults processResult(TokenizationResult tokenization, PyTorchInferenceResult pyTorchResult) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenFilter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenFilter.java new file mode 100644 index 0000000000000..8eba67c374aad --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenFilter.java @@ -0,0 +1,272 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import com.carrotsearch.hppc.IntArrayList; +import com.ibm.icu.text.Normalizer; +import com.ibm.icu.text.Normalizer2; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.TokenFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.core.WhitespaceTokenizer; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; + +import java.io.IOException; +import java.io.Reader; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import java.util.PrimitiveIterator; +import java.util.function.IntPredicate; + +/** + * Assumes that the text is already whitespace tokenized + */ +public final class BasicTokenFilter extends TokenFilter { + private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); + private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); + + private final CharSeqTokenTrieNode neverSplit; + private final LinkedList tokens; + private final boolean isStripAccents; + private final CharArraySet neverSplitSet; + private final Normalizer2 normalizer; + private final StringBuilder accentBuffer = new StringBuilder(); + private final IntPredicate splitOn; + + private State current; + + public static BasicTokenFilter build(boolean isTokenizeCjkChars, boolean isStripAccents, List neverSplit, TokenStream input) + throws IOException { + Analyzer analyzer = new Analyzer() { + @Override + protected TokenStreamComponents createComponents(String fieldName) { + WhitespaceTokenizer tokenizer = new WhitespaceTokenizer(); + TokenStream stream = new BasicTokenFilter( + tokenizer, + CharSeqTokenTrieNode.EMPTY, + CharArraySet.EMPTY_SET, + isStripAccents, + isTokenizeCjkChars + ); + return new TokenStreamComponents(tokenizer, stream); + } + + @Override + protected Reader initReader(String fieldName, Reader reader) { + return new ControlCharFilter(reader); + } + }; + CharArraySet neverSplitSet = new CharArraySet(neverSplit, false); + CharSeqTokenTrieNode neverSplitTree; + try (analyzer) { + neverSplitTree = CharSeqTokenTrieNode.build(neverSplit, c -> { + try (TokenStream ts = analyzer.tokenStream("never_split", c)) { + CharTermAttribute term = ts.addAttribute(CharTermAttribute.class); + ts.reset(); + List tokens = new ArrayList<>(); + while (ts.incrementToken()) { + tokens.add(term.toString()); + } + return tokens; + } + }); + } + return new BasicTokenFilter(input, neverSplitTree, neverSplitSet, isStripAccents, isTokenizeCjkChars); + } + + public BasicTokenFilter( + TokenStream input, + CharSeqTokenTrieNode neverSplit, + CharArraySet neverSplitSet, + boolean isStripAccents, + boolean isTokenizeCjkChars + ) { + super(input); + this.neverSplit = neverSplit; + this.neverSplitSet = neverSplitSet; + this.tokens = new LinkedList<>(); + this.isStripAccents = isStripAccents; + this.normalizer = Normalizer2.getNFDInstance(); + this.splitOn = cp -> (isTokenizeCjkChars && isCjkChar(cp)) || isPunctuationMark(cp); + } + + @Override + public void reset() throws IOException { + super.reset(); + tokens.clear(); + accentBuffer.setLength(0); + current = null; + } + + @Override + public boolean incrementToken() throws IOException { + if (tokens.isEmpty() == false) { + assert current != null; + DelimitedToken token = tokens.removeFirst(); + restoreState(current); // keep all other attributes untouched + termAtt.setEmpty().append(token.charSequence()); + offsetAtt.setOffset(token.startOffset(), token.endOffset()); + return true; + } + current = null; // not really needed, but for safety + if (input.incrementToken()) { + if (isStripAccents) { + stripAccent(); + } + if (neverSplitSet.contains(termAtt)) { + return true; + } + // split punctuation and maybe cjk chars!!! + LinkedList splits = split(); + // There is nothing to merge, nothing to store, simply return + if (splits.size() == 1) { + return true; + } + tokens.addAll(mergeSplits(splits)); + this.current = captureState(); + DelimitedToken token = tokens.removeFirst(); + termAtt.setEmpty().append(token.charSequence()); + offsetAtt.setOffset(token.startOffset(), token.endOffset()); + return true; + } + return false; + } + + void stripAccent() { + accentBuffer.setLength(0); + if (normalizer.quickCheck(termAtt) != Normalizer.YES) { + normalizer.normalize(termAtt, accentBuffer); + } + IntArrayList badIndices = new IntArrayList(); + IntArrayList charCount = new IntArrayList(); + int index = 0; + for (PrimitiveIterator.OfInt it = accentBuffer.codePoints().iterator(); it.hasNext();) { + int cp = it.next(); + if (Character.getType(cp) == Character.NON_SPACING_MARK) { + badIndices.add(index); + charCount.add(Character.charCount(cp)); + } + index++; + } + if (badIndices.isEmpty()) { + return; + } + for (int i = 0; i < badIndices.size(); i++) { + int badIndex = badIndices.get(i); + int count = charCount.get(i); + for (int j = 0; j < count && badIndex < accentBuffer.length(); j++) { + accentBuffer.deleteCharAt(badIndex); + } + } + termAtt.setEmpty().append(accentBuffer); + } + + private LinkedList split() { + LinkedList splits = new LinkedList<>(); + int startOffset = offsetAtt.startOffset(); + int charIndex = 0; + int lastCharSplit = 0; + for (PrimitiveIterator.OfInt it = termAtt.codePoints().iterator(); it.hasNext();) { + int cp = it.next(); + if (splitOn.test(cp)) { + int charCount = charIndex - lastCharSplit; + if (charCount > 0) { + splits.add( + new DelimitedToken( + termAtt.subSequence(lastCharSplit, charIndex), + lastCharSplit + startOffset, + charIndex + startOffset + ) + ); + } + splits.add( + new DelimitedToken(termAtt.subSequence(charIndex, charIndex + 1), charIndex + startOffset, charIndex + 1 + startOffset) + ); + lastCharSplit = charIndex + 1; + } + charIndex += Character.charCount(cp); + } + if (lastCharSplit < termAtt.length()) { + splits.add( + new DelimitedToken(termAtt.subSequence(lastCharSplit, termAtt.length()), lastCharSplit + startOffset, offsetAtt.endOffset()) + ); + } + return splits; + } + + private LinkedList mergeSplits(LinkedList splits) { + LinkedList mergedTokens = new LinkedList<>(); + List matchingTokens = new ArrayList<>(); + CharSeqTokenTrieNode current = neverSplit; + for (DelimitedToken token : splits) { + CharSeqTokenTrieNode childNode = current.getChild(token.charSequence()); + if (childNode == null) { + if (current != neverSplit) { + mergedTokens.addAll(matchingTokens); + matchingTokens = new ArrayList<>(); + current = neverSplit; + } + childNode = current.getChild(token.charSequence()); + if (childNode == null) { + mergedTokens.add(token); + } else { + matchingTokens.add(token); + current = childNode; + } + } else if (childNode.isLeaf()) { + matchingTokens.add(token); + DelimitedToken mergedToken = DelimitedToken.mergeTokens(matchingTokens); + if (neverSplitSet.contains(mergedToken.charSequence())) { + mergedTokens.add(mergedToken); + } else { + mergedTokens.addAll(matchingTokens); + } + matchingTokens = new ArrayList<>(); + current = neverSplit; + } else { + matchingTokens.add(token); + current = childNode; + } + } + if (matchingTokens.isEmpty() == false) { + mergedTokens.addAll(matchingTokens); + } + return mergedTokens; + } + + static boolean isPunctuationMark(int codePoint) { + if ((codePoint >= 33 && codePoint <= 47) + || (codePoint >= 58 && codePoint <= 64) + || (codePoint >= 91 && codePoint <= 96) + || (codePoint >= 123 && codePoint <= 126)) { + return true; + } + + int category = Character.getType(codePoint); + return (category >= Character.DASH_PUNCTUATION && category <= Character.OTHER_PUNCTUATION) + || (category >= Character.INITIAL_QUOTE_PUNCTUATION && category <= Character.FINAL_QUOTE_PUNCTUATION); + } + + private static boolean isCjkChar(int codePoint) { + // https://en.wikipedia.org/wiki/CJK_Unified_Ideographs_(Unicode_block) + Character.UnicodeBlock block = Character.UnicodeBlock.of(codePoint); + return Character.UnicodeBlock.CJK_COMPATIBILITY_IDEOGRAPHS.equals(block) + || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS.equals(block) + || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_A.equals(block) + || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_B.equals(block) + || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_C.equals(block) + || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_D.equals(block) + || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_E.equals(block) + || Character.UnicodeBlock.CJK_COMPATIBILITY_IDEOGRAPHS_SUPPLEMENT.equals(block); + } + +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenizer.java deleted file mode 100644 index 561fd429422bf..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenizer.java +++ /dev/null @@ -1,381 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; - -import joptsimple.internal.Strings; - -import java.text.Normalizer; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Locale; -import java.util.Set; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.stream.Collectors; - -/** - * Basic tokenization of text by whitespace with optional extras: - * 1. Lower case the input - * 2. Convert to Unicode NFD - * 3. Stip accents - * 4. Surround CJK characters with ' ' - * - * Derived from - * https://github.com/huggingface/transformers/blob/ba8c4d0ac04acfcdbdeaed954f698d6d5ec3e532/src/transformers/tokenization_bert.py - */ -public class BasicTokenizer { - - private final boolean isLowerCase; - private final boolean isTokenizeCjkChars; - private final boolean isStripAccents; - private final Set neverSplitTokens; - private final TokenTrieNode neverSplitTokenTrieRoot; - - /** - * Tokenizer behaviour is controlled by the options passed here. - * - * @param isLowerCase If true convert the input to lowercase - * @param isTokenizeCjkChars Should CJK ideographs be tokenized - * @param isStripAccents Strip all accents - * @param neverSplit The set of tokens that should not be split - */ - public BasicTokenizer(boolean isLowerCase, boolean isTokenizeCjkChars, boolean isStripAccents, Set neverSplit) { - this.isLowerCase = isLowerCase; - this.isTokenizeCjkChars = isTokenizeCjkChars; - this.isStripAccents = isStripAccents; - this.neverSplitTokens = neverSplit; - this.neverSplitTokenTrieRoot = TokenTrieNode.build(neverSplit, this::doTokenizeString); - } - - public BasicTokenizer(boolean isLowerCase, boolean isTokenizeCjkChars, boolean isStripAccents) { - this(isLowerCase, isTokenizeCjkChars, isStripAccents, Collections.emptySet()); - } - - /** - * Tokenize CJK chars defaults to the value of {@code isLowerCase} - * when not explicitly set - * @param isLowerCase If true convert the input to lowercase - * @param isTokenizeCjkChars Should CJK ideographs be tokenized - */ - public BasicTokenizer(boolean isLowerCase, boolean isTokenizeCjkChars) { - this(isLowerCase, isTokenizeCjkChars, isLowerCase); - } - - BasicTokenizer() { - this(true, true, true); - } - - /** - * Clean the text and whitespace tokenize then process depending - * on the values of {@code lowerCase}, {@code tokenizeCjkChars}, - * {@code stripAccents} and the contents of {@code neverSplit} - * - * @param text The input text to tokenize - * @return List of tokens - */ - public List tokenize(String text) { - return mergeNeverSplitTokens(text, doTokenize(text)); - } - - private List doTokenizeString(String text) { - return doTokenize(text).stream().map(DelimitedToken::getToken).collect(Collectors.toList()); - } - - private List doTokenize(String text) { - text = cleanText(text); - if (isTokenizeCjkChars) { - text = tokenizeCjkChars(text); - } - - List tokens = whiteSpaceTokenize(text); - - List processedTokens = new ArrayList<>(tokens.size()); - for (DelimitedToken tokenRecord : tokens) { - - String tokenStr = tokenRecord.getToken(); - if (Strings.EMPTY.equals(tokenStr)) { - continue; - } - - if (isLowerCase) { - tokenStr = tokenStr.toLowerCase(Locale.ROOT); - } - if (isStripAccents) { - tokenStr = stripAccents(tokenStr); - } - processedTokens.addAll(splitOnPunctuation(new DelimitedToken(tokenRecord.getStartPos(), tokenRecord.getEndPos(), tokenStr))); - } - - return processedTokens; - } - - private List mergeNeverSplitTokens(String originalText, List tokens) { - if (neverSplitTokenTrieRoot.isLeaf()) { - return tokens; - } - List mergedTokens = new ArrayList<>(tokens.size()); - List matchingTokens = new ArrayList<>(); - TokenTrieNode current = neverSplitTokenTrieRoot; - for (DelimitedToken token : tokens) { - TokenTrieNode childNode = current.getChild(token.getToken()); - if (childNode == null) { - if (current != neverSplitTokenTrieRoot) { - mergedTokens.addAll(matchingTokens); - matchingTokens = new ArrayList<>(); - current = neverSplitTokenTrieRoot; - } - childNode = current.getChild(token.getToken()); - if (childNode == null) { - mergedTokens.add(token); - } else { - matchingTokens.add(token); - current = childNode; - } - } else if (childNode.isLeaf()) { - matchingTokens.add(token); - DelimitedToken mergedToken = DelimitedToken.mergeTokens(matchingTokens); - String originalTokenText = originalText.substring(mergedToken.getStartPos(), mergedToken.getEndPos()); - if (neverSplitTokens.contains(originalTokenText)) { - mergedTokens.add(new DelimitedToken(mergedToken.getStartPos(), mergedToken.getEndPos(), originalTokenText)); - } else { - mergedTokens.addAll(matchingTokens); - } - matchingTokens = new ArrayList<>(); - current = neverSplitTokenTrieRoot; - } else { - matchingTokens.add(token); - current = childNode; - } - } - return mergedTokens; - } - - public boolean isLowerCase() { - return isLowerCase; - } - - public boolean isStripAccents() { - return isStripAccents; - } - - public boolean isTokenizeCjkChars() { - return isTokenizeCjkChars; - } - - /** - * Split the input text by whitespace. - * For the returned objects {@link DelimitedToken#getStartPos()} is the - * start character index inclusive and {@link DelimitedToken#getEndPos()} - * the index exclusive. The number of whitespace characters between 2 consecutive - * {@link DelimitedToken}s is the difference between the first's {@code endPos} - * and the second's {@code startPos}. - * - * The input should be normalized via a call to {@link #cleanText(String)} - * before it is passed to this function. - * - * @param text to tokenize - * @return White space separated strings - */ - static List whiteSpaceTokenize(String text) { - var tokens = new ArrayList(); - - // whitespace at beginning - int index = 0; - while (index < text.length() && text.charAt(index) == ' ') { - index++; - } - - int tokenStart = index; - - while (index < text.length()) { - if (text.charAt(index) == ' ') { - int tokenEnd = index; - index++; - // consume trail whitespace before the next word - // or end of text - while (index < text.length() && text.charAt(index) == ' ') { - index++; - } - - tokens.add(new DelimitedToken(tokenStart, tokenEnd, text.substring(tokenStart, tokenEnd))); - tokenStart = index; - } - index++; - } - - // trailing whitespace - if (tokenStart != text.length()) { - tokens.add(new DelimitedToken(tokenStart, text.length(), text.substring(tokenStart))); - } - - return tokens; - } - - /** - * Normalize unicode text to NFD form - * "Characters are decomposed by canonical equivalence, and multiple - * combining characters are arranged in a specific order" - * from https://en.wikipedia.org/wiki/Unicode_equivalence#Normal_forms - * - * And remove non-spacing marks https://www.compart.com/en/unicode/category/Mn - * - * @param word Word to strip - * @return {@code word} normalized and stripped. - */ - static String stripAccents(String word) { - String normalizedString = Normalizer.normalize(word, Normalizer.Form.NFD); - - int[] codePoints = normalizedString.codePoints() - .filter(codePoint -> Character.getType(codePoint) != Character.NON_SPACING_MARK) - .toArray(); - - return new String(codePoints, 0, codePoints.length); - } - - static List splitOnPunctuation(DelimitedToken word) { - List splits = new ArrayList<>(); - int[] codePoints = word.getToken().codePoints().toArray(); - - int lastSplit = 0; - for (int i = 0; i < codePoints.length; i++) { - if (isPunctuationMark(codePoints[i])) { - int charCount = i - lastSplit; - if (charCount > 0) { - // add a new string for what has gone before - splits.add( - new DelimitedToken( - word.getStartPos() + lastSplit, - word.getStartPos() + i, - new String(codePoints, lastSplit, i - lastSplit) - ) - ); - } - splits.add(new DelimitedToken(word.getStartPos() + i, word.getStartPos() + i + 1, new String(codePoints, i, 1))); - lastSplit = i + 1; - } - } - - if (lastSplit < codePoints.length) { - splits.add( - new DelimitedToken( - word.getStartPos() + lastSplit, - word.getStartPos() + codePoints.length, - new String(codePoints, lastSplit, codePoints.length - lastSplit) - ) - ); - } - - return splits; - } - - /** - * Surrounds any CJK character with whitespace - * @param text To tokenize - * @return tokenized text - */ - static String tokenizeCjkChars(String text) { - StringBuilder sb = new StringBuilder(text.length()); - AtomicBoolean cjkCharFound = new AtomicBoolean(false); - - text.codePoints().forEach(cp -> { - if (isCjkChar(cp)) { - sb.append(' '); - sb.appendCodePoint(cp); - sb.append(' '); - cjkCharFound.set(true); - } else { - sb.appendCodePoint(cp); - } - }); - - // no change - if (cjkCharFound.get() == false) { - return text; - } - - return sb.toString(); - } - - /** - * Remove control chars and normalize white space to ' ' - * @param text Text to clean - * @return Cleaned text - */ - static String cleanText(String text) { - int[] codePoints = text.codePoints() - .filter(codePoint -> (codePoint == 0x00 || codePoint == 0xFFFD || isControlChar(codePoint)) == false) - .map(codePoint -> isWhiteSpace(codePoint) ? ' ' : codePoint) - .toArray(); - - return new String(codePoints, 0, codePoints.length); - } - - static boolean isCjkChar(int codePoint) { - // https://en.wikipedia.org/wiki/CJK_Unified_Ideographs_(Unicode_block) - Character.UnicodeBlock block = Character.UnicodeBlock.of(codePoint); - return Character.UnicodeBlock.CJK_COMPATIBILITY_IDEOGRAPHS.equals(block) - || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS.equals(block) - || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_A.equals(block) - || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_B.equals(block) - || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_C.equals(block) - || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_D.equals(block) - || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_E.equals(block) - || Character.UnicodeBlock.CJK_COMPATIBILITY_IDEOGRAPHS_SUPPLEMENT.equals(block); - } - - /** - * newline, carriage return and tab are control chars but for - * tokenization purposes they are treated as whitespace. - * - * @param codePoint code point - * @return is control char - */ - static boolean isControlChar(int codePoint) { - if (codePoint == '\n' || codePoint == '\r' || codePoint == '\t') { - return false; - } - int category = Character.getType(codePoint); - - return category >= Character.CONTROL && category <= Character.SURROGATE; - } - - /** - * newline, carriage return and tab are technically control chars - * but are not part of the Unicode Space Separator (Zs) group. - * For tokenization purposes they are treated as whitespace - * - * @param codePoint code point - * @return is white space - */ - static boolean isWhiteSpace(int codePoint) { - if (codePoint == '\n' || codePoint == '\r' || codePoint == '\t') { - return true; - } - return Character.getType(codePoint) == Character.SPACE_SEPARATOR; - } - - /** - * We treat all non-letter/number ASCII as punctuation. - * Characters such as "^", "$", and "`" are not in the Unicode - * Punctuation class but are treated as punctuation for consistency. - * - * @param codePoint code point - * @return true if is punctuation - */ - static boolean isPunctuationMark(int codePoint) { - if ((codePoint >= 33 && codePoint <= 47) - || (codePoint >= 58 && codePoint <= 64) - || (codePoint >= 91 && codePoint <= 96) - || (codePoint >= 123 && codePoint <= 126)) { - return true; - } - - int category = Character.getType(codePoint); - return (category >= Character.DASH_PUNCTUATION && category <= Character.OTHER_PUNCTUATION) - || (category >= Character.INITIAL_QUOTE_PUNCTUATION && category <= Character.FINAL_QUOTE_PUNCTUATION); - } -} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizationResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizationResult.java new file mode 100644 index 0000000000000..87429d2bcf2eb --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizationResult.java @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; +import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; + +import java.io.IOException; +import java.util.List; +import java.util.function.Function; +import java.util.stream.IntStream; +import java.util.stream.Stream; + +public class BertTokenizationResult extends TokenizationResult { + + static final String REQUEST_ID = "request_id"; + static final String TOKENS = "tokens"; + static final String ARG1 = "arg_1"; + static final String ARG2 = "arg_2"; + static final String ARG3 = "arg_3"; + + public BertTokenizationResult(List vocab, List tokenizations, int padTokenId) { + super(vocab, tokenizations, padTokenId); + } + + @Override + public NlpTask.Request buildRequest(String requestId, Tokenization.Truncate t) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + builder.field(REQUEST_ID, requestId); + writePaddedTokens(TOKENS, builder); + writeAttentionMask(ARG1, builder); + writeTokenTypeIds(ARG2, builder); + writePositionIds(ARG3, builder); + builder.endObject(); + + // BytesReference.bytes closes the builder + BytesReference jsonRequest = BytesReference.bytes(builder); + return new NlpTask.Request(this, jsonRequest); + } + + static class BertTokensBuilder implements TokensBuilder { + protected final Stream.Builder tokenIds; + protected final Stream.Builder tokenMap; + protected final boolean withSpecialTokens; + protected final int clsTokenId; + protected final int sepTokenId; + + BertTokensBuilder(boolean withSpecialTokens, int clsTokenId, int sepTokenId) { + this.withSpecialTokens = withSpecialTokens; + this.clsTokenId = clsTokenId; + this.sepTokenId = sepTokenId; + this.tokenIds = Stream.builder(); + this.tokenMap = Stream.builder(); + } + + @Override + public TokensBuilder addSequence(List wordPieceTokenIds, List tokenPositionMap) { + if (withSpecialTokens) { + tokenIds.add(IntStream.of(clsTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + tokenIds.add(wordPieceTokenIds.stream().mapToInt(Integer::valueOf)); + tokenMap.add(tokenPositionMap.stream().mapToInt(Integer::valueOf)); + if (withSpecialTokens) { + tokenIds.add(IntStream.of(sepTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + return this; + } + + @Override + public TokensBuilder addSequencePair( + List tokenId1s, + List tokenMap1, + List tokenId2s, + List tokenMap2 + ) { + if (withSpecialTokens) { + tokenIds.add(IntStream.of(clsTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + tokenIds.add(tokenId1s.stream().mapToInt(Integer::valueOf)); + tokenMap.add(tokenMap1.stream().mapToInt(Integer::valueOf)); + int previouslyFinalMap = tokenMap1.get(tokenMap1.size() - 1); + if (withSpecialTokens) { + tokenIds.add(IntStream.of(sepTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + tokenIds.add(tokenId2s.stream().mapToInt(Integer::valueOf)); + tokenMap.add(tokenMap2.stream().mapToInt(i -> i + previouslyFinalMap)); + if (withSpecialTokens) { + tokenIds.add(IntStream.of(sepTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + return this; + } + + @Override + public Tokens build(String input, boolean truncated, List allTokens) { + return new Tokens( + input, + allTokens, + truncated, + tokenIds.build().flatMapToInt(Function.identity()).toArray(), + tokenMap.build().flatMapToInt(Function.identity()).toArray() + ); + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java index ab0d34860c0c8..4b9b63ca57f79 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java @@ -6,12 +6,15 @@ */ package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.inference.nlp.BertRequestBuilder; import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; +import java.io.IOException; +import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -19,9 +22,7 @@ import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; -import java.util.function.Function; -import java.util.stream.IntStream; -import java.util.stream.Stream; +import java.util.stream.Collectors; /** * Performs basic tokenization and normalization of input text @@ -41,26 +42,20 @@ public class BertTokenizer implements NlpTokenizer { public static final int SPECIAL_TOKEN_POSITION = -1; - public static final int DEFAULT_MAX_INPUT_CHARS_PER_WORD = 100; - private static final Set NEVER_SPLIT = Set.of(MASK_TOKEN); - private final WordPieceTokenizer wordPieceTokenizer; - private final List originalVocab; + private final WordPieceAnalyzer wordPieceAnalyzer; + protected final List originalVocab; // TODO Not sure this needs to be a sorted map private final SortedMap vocab; - private final boolean doLowerCase; - private final boolean doTokenizeCjKChars; - private final boolean doStripAccents; protected final boolean withSpecialTokens; - private final Set neverSplit; private final int maxSequenceLength; - private final NlpTask.RequestBuilder requestBuilder; private final String sepToken; protected final int sepTokenId; private final String clsToken; private final int clsTokenId; private final String padToken; + protected final int padTokenId; private final String maskToken; private final String unknownToken; @@ -72,7 +67,6 @@ protected BertTokenizer( boolean doStripAccents, boolean withSpecialTokens, int maxSequenceLength, - Function requestBuilderFactory, Set neverSplit ) { this( @@ -83,7 +77,6 @@ protected BertTokenizer( doStripAccents, withSpecialTokens, maxSequenceLength, - requestBuilderFactory, Sets.union(neverSplit, NEVER_SPLIT), SEPARATOR_TOKEN, CLASS_TOKEN, @@ -101,7 +94,6 @@ protected BertTokenizer( boolean doStripAccents, boolean withSpecialTokens, int maxSequenceLength, - Function requestBuilderFactory, Set neverSplit, String sepToken, String clsToken, @@ -109,22 +101,25 @@ protected BertTokenizer( String maskToken, String unknownToken ) { - wordPieceTokenizer = new WordPieceTokenizer(vocab, unknownToken, DEFAULT_MAX_INPUT_CHARS_PER_WORD); + wordPieceAnalyzer = new WordPieceAnalyzer( + originalVocab, + new ArrayList<>(neverSplit), + doLowerCase, + doTokenizeCjKChars, + doStripAccents, + unknownToken + ); this.originalVocab = originalVocab; this.vocab = vocab; - this.doLowerCase = doLowerCase; - this.doTokenizeCjKChars = doTokenizeCjKChars; - this.doStripAccents = doStripAccents; this.withSpecialTokens = withSpecialTokens; - this.neverSplit = neverSplit; this.maxSequenceLength = maxSequenceLength; - this.requestBuilder = requestBuilderFactory.apply(this); if (vocab.containsKey(unknownToken) == false) { throw ExceptionsHelper.conflictStatusException("stored vocabulary is missing required [{}] token", unknownToken); } if (vocab.containsKey(padToken) == false) { throw ExceptionsHelper.conflictStatusException("stored vocabulary is missing required [{}] token", padToken); } + this.padTokenId = vocab.get(padToken); if (withSpecialTokens) { Set missingSpecialTokens = Sets.difference(Set.of(sepToken, clsToken), vocab.keySet()); @@ -186,12 +181,12 @@ public String getMaskToken() { } @Override - public TokenizationResult buildTokenizationResult(List tokenizations) { - TokenizationResult tokenizationResult = new TokenizationResult(originalVocab); - for (TokenizationResult.Tokenization tokenization : tokenizations) { - tokenizationResult.addTokenization(tokenization); - } - return tokenizationResult; + public TokenizationResult buildTokenizationResult(List tokenizations) { + return new BertTokenizationResult(originalVocab, tokenizations, vocab.get(this.padToken)); + } + + TokenizationResult.TokensBuilder createTokensBuilder(int clsTokenId, int sepTokenId, boolean withSpecialTokens) { + return new BertTokenizationResult.BertTokensBuilder(withSpecialTokens, clsTokenId, sepTokenId); } /** @@ -206,9 +201,9 @@ public TokenizationResult buildTokenizationResult(List wordPieceTokenIds = innerResult.wordPieceTokenIds; + List wordPieceTokenIds = innerResult.tokens; List tokenPositionMap = innerResult.tokenPositionMap; int numTokens = withSpecialTokens ? wordPieceTokenIds.size() + 2 : wordPieceTokenIds.size(); boolean isTruncated = false; @@ -227,24 +222,19 @@ public TokenizationResult.Tokenization tokenize(String seq, Tokenization.Truncat ); } } - BertTokenizationBuilder bertTokenizationBuilder = bertTokenizationBuilder().addTokens(wordPieceTokenIds, tokenPositionMap) - .addEndTokensIfNecessary(); - return new TokenizationResult.Tokenization( - seq, - innerResult.tokens, - isTruncated, - bertTokenizationBuilder.buildIds(), - bertTokenizationBuilder.buildMap() - ); + return createTokensBuilder(clsTokenId, sepTokenId, withSpecialTokens).addSequence( + wordPieceTokenIds.stream().map(WordPieceTokenFilter.WordPieceToken::getEncoding).collect(Collectors.toList()), + tokenPositionMap + ).build(seq, isTruncated, innerResult.tokens); } @Override - public TokenizationResult.Tokenization tokenize(String seq1, String seq2, Tokenization.Truncate truncate) { + public TokenizationResult.Tokens tokenize(String seq1, String seq2, Tokenization.Truncate truncate) { var innerResultSeq1 = innerTokenize(seq1); - List wordPieceTokenIdsSeq1 = innerResultSeq1.wordPieceTokenIds; + List wordPieceTokenIdsSeq1 = innerResultSeq1.tokens; List tokenPositionMapSeq1 = innerResultSeq1.tokenPositionMap; var innerResultSeq2 = innerTokenize(seq2); - List wordPieceTokenIdsSeq2 = innerResultSeq2.wordPieceTokenIds; + List wordPieceTokenIdsSeq2 = innerResultSeq2.tokens; List tokenPositionMapSeq2 = innerResultSeq2.tokenPositionMap; if (withSpecialTokens == false) { throw new IllegalArgumentException("Unable to do sequence pair tokenization without special tokens"); @@ -298,22 +288,21 @@ public TokenizationResult.Tokenization tokenize(String seq1, String seq2, Tokeni ); } } - BertTokenizationBuilder bertTokenizationBuilder = bertTokenizationBuilder().addTokens(wordPieceTokenIdsSeq1, tokenPositionMapSeq1) - .addTokens(wordPieceTokenIdsSeq2, tokenPositionMapSeq2) - .addEndTokensIfNecessary(); - List tokens = new ArrayList<>(innerResultSeq1.tokens); + List tokens = new ArrayList<>(innerResultSeq1.tokens); tokens.addAll(innerResultSeq2.tokens); - return new TokenizationResult.Tokenization( - seq1 + seq2, - tokens, - isTruncated, - bertTokenizationBuilder.buildIds(), - bertTokenizationBuilder.buildMap() - ); + return createTokensBuilder(clsTokenId, sepTokenId, withSpecialTokens).addSequencePair( + wordPieceTokenIdsSeq1.stream().map(WordPieceTokenFilter.WordPieceToken::getEncoding).collect(Collectors.toList()), + tokenPositionMapSeq1, + wordPieceTokenIdsSeq2.stream().map(WordPieceTokenFilter.WordPieceToken::getEncoding).collect(Collectors.toList()), + tokenPositionMapSeq2 + ).build(seq1 + seq2, isTruncated, tokens); } - protected BertTokenizationBuilder bertTokenizationBuilder() { - return new BertTokenizationBuilder(); + @Override + public NlpTask.RequestBuilder requestBuilder() { + return (inputs, requestId, truncate) -> buildTokenizationResult( + inputs.stream().map(s -> tokenize(s, truncate)).collect(Collectors.toList()) + ).buildRequest(requestId, truncate); } protected int getNumExtraTokensForSeqPair() { @@ -321,45 +310,36 @@ protected int getNumExtraTokensForSeqPair() { } private InnerTokenization innerTokenize(String seq) { - BasicTokenizer basicTokenizer = new BasicTokenizer(doLowerCase, doTokenizeCjKChars, doStripAccents, neverSplit); - var tokenSequences = basicTokenizer.tokenize(seq); - List wordPieceTokens = new ArrayList<>(); List tokenPositionMap = new ArrayList<>(); - - for (int sourceIndex = 0; sourceIndex < tokenSequences.size(); sourceIndex++) { - String token = tokenSequences.get(sourceIndex).getToken(); - if (neverSplit.contains(token)) { - wordPieceTokens.add(vocab.getOrDefault(token, vocab.get(unknownToken))); - tokenPositionMap.add(sourceIndex); - } else { - List tokens = wordPieceTokenizer.tokenize(tokenSequences.get(sourceIndex)); - for (int tokenCount = 0; tokenCount < tokens.size(); tokenCount++) { - tokenPositionMap.add(sourceIndex); - } - wordPieceTokens.addAll(tokens); + try (TokenStream ts = wordPieceAnalyzer.tokenStream("input", seq)) { + ts.reset(); + PositionIncrementAttribute tokenPos = ts.addAttribute(PositionIncrementAttribute.class); + int currPos = -1; + while (ts.incrementToken()) { + currPos += tokenPos.getPositionIncrement(); + tokenPositionMap.add(currPos); } + } catch (IOException ex) { + throw new UncheckedIOException(ex); } + return new InnerTokenization(new ArrayList<>(wordPieceAnalyzer.getTokens()), tokenPositionMap); + } - return new InnerTokenization(tokenSequences, wordPieceTokens, tokenPositionMap); + @Override + public void close() { + wordPieceAnalyzer.close(); } private static class InnerTokenization { - List tokens; - List wordPieceTokenIds; + List tokens; List tokenPositionMap; - InnerTokenization(List tokens, List wordPieceTokenIds, List tokenPositionMap) { + InnerTokenization(List tokens, List tokenPositionMap) { this.tokens = tokens; - this.wordPieceTokenIds = wordPieceTokenIds; this.tokenPositionMap = tokenPositionMap; } } - @Override - public NlpTask.RequestBuilder requestBuilder() { - return requestBuilder; - } - public int getMaxSequenceLength() { return maxSequenceLength; } @@ -368,59 +348,16 @@ public static Builder builder(List vocab, Tokenization tokenization) { return new Builder(vocab, tokenization); } - protected class BertTokenizationBuilder { - Stream.Builder tokenIds; - Stream.Builder tokenMap; - int numSeq; - - BertTokenizationBuilder() { - tokenIds = Stream.builder(); - tokenMap = Stream.builder(); - if (withSpecialTokens) { - tokenIds.add(IntStream.of(clsTokenId)); - tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); - } - } - - BertTokenizationBuilder addTokens(List wordPieceTokenIds, List tokenPositionMap) { - if (numSeq > 0 && withSpecialTokens) { - tokenIds.add(IntStream.of(sepTokenId)); - tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); - } - tokenIds.add(wordPieceTokenIds.stream().mapToInt(Integer::valueOf)); - tokenMap.add(tokenPositionMap.stream().mapToInt(Integer::valueOf)); - numSeq++; - return this; - } - - BertTokenizationBuilder addEndTokensIfNecessary() { - if (withSpecialTokens) { - tokenIds.add(IntStream.of(sepTokenId)); - tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); - } - return this; - } - - int[] buildIds() { - return tokenIds.build().flatMapToInt(Function.identity()).toArray(); - } - - int[] buildMap() { - return tokenMap.build().flatMapToInt(Function.identity()).toArray(); - } - } - public static class Builder { protected final List originalVocab; protected final SortedMap vocab; - protected boolean doLowerCase = false; + protected boolean doLowerCase; protected boolean doTokenizeCjKChars = true; - protected boolean withSpecialTokens = true; + protected boolean withSpecialTokens; protected int maxSequenceLength; protected Boolean doStripAccents = null; protected Set neverSplit; - protected Function requestBuilderFactory = BertRequestBuilder::new; protected Builder(List vocab, Tokenization tokenization) { this.originalVocab = vocab; @@ -473,11 +410,6 @@ public Builder setWithSpecialTokens(boolean withSpecialTokens) { return this; } - public Builder setRequestBuilderFactory(Function requestBuilderFactory) { - this.requestBuilderFactory = requestBuilderFactory; - return this; - } - public BertTokenizer build() { // if not set strip accents defaults to the value of doLowerCase if (doStripAccents == null) { @@ -496,7 +428,6 @@ public BertTokenizer build() { doStripAccents, withSpecialTokens, maxSequenceLength, - requestBuilderFactory, neverSplit ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenTrieNode.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNode.java similarity index 58% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenTrieNode.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNode.java index a6716a9580372..7b5514b692285 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenTrieNode.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNode.java @@ -7,22 +7,20 @@ package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; +import org.apache.lucene.analysis.CharArrayMap; +import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Nullable; +import java.io.IOException; import java.util.Collection; -import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.Objects; -import java.util.function.Function; -class TokenTrieNode { +public record CharSeqTokenTrieNode(CharArrayMap children) { - private static final String EMPTY_STRING = ""; + public static final CharSeqTokenTrieNode EMPTY = new CharSeqTokenTrieNode(new CharArrayMap<>(0, false)); - private final Map children; - - private TokenTrieNode(Map children) { + public CharSeqTokenTrieNode(CharArrayMap children) { this.children = Objects.requireNonNull(children); } @@ -30,8 +28,18 @@ boolean isLeaf() { return children.isEmpty(); } + public void clear() { + if (isLeaf()) { + return; + } + for (CharSeqTokenTrieNode c : children.values()) { + c.clear(); + } + children.clear(); + } + @Nullable - TokenTrieNode getChild(String token) { + CharSeqTokenTrieNode getChild(CharSequence token) { return children.get(token); } @@ -39,7 +47,7 @@ private void insert(List tokens) { if (tokens.isEmpty()) { return; } - TokenTrieNode currentNode = this; + CharSeqTokenTrieNode currentNode = this; int currentTokenIndex = 0; // find leaf @@ -49,15 +57,16 @@ private void insert(List tokens) { } // add rest of tokens as new nodes while (currentTokenIndex < tokens.size()) { - TokenTrieNode childNode = new TokenTrieNode(new HashMap<>()); + CharSeqTokenTrieNode childNode = new CharSeqTokenTrieNode(new CharArrayMap<>(1, false)); currentNode.children.put(tokens.get(currentTokenIndex), childNode); currentNode = childNode; currentTokenIndex++; } } - static TokenTrieNode build(Collection tokens, Function> tokenizeFunction) { - TokenTrieNode root = new TokenTrieNode(new HashMap<>()); + public static CharSeqTokenTrieNode build(Collection tokens, CheckedFunction, IOException> tokenizeFunction) + throws IOException { + CharSeqTokenTrieNode root = new CharSeqTokenTrieNode(new CharArrayMap<>(1, false)); for (String token : tokens) { List subTokens = tokenizeFunction.apply(token); root.insert(subTokens); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/ControlCharFilter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/ControlCharFilter.java new file mode 100644 index 0000000000000..dc27f51c520e9 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/ControlCharFilter.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import com.carrotsearch.hppc.CharArrayList; + +import org.apache.lucene.analysis.charfilter.BaseCharFilter; + +import java.io.CharArrayReader; +import java.io.IOException; +import java.io.Reader; + +/** + * Char filter for removing control chars from a stream + */ +public class ControlCharFilter extends BaseCharFilter { + public static final String NAME = "control_char_filter"; + // TODO this is probably not ultimately necessary, keeping track of where we are in the stream + // and optimizing our replacements (like MappingCharFilter), would be faster and use less memory + private Reader transformedInput; + + public ControlCharFilter(Reader in) { + super(in); + } + + @Override + public int read(char[] cbuf, int off, int len) throws IOException { + if (transformedInput == null) { + fill(); + } + + return transformedInput.read(cbuf, off, len); + } + + @Override + public int read() throws IOException { + if (transformedInput == null) { + fill(); + } + + return transformedInput.read(); + } + + private void fill() throws IOException { + CharArrayList charArrayList = new CharArrayList(1024); + char[] temp = new char[1024]; + int totalRead = 0; + int diff = 0; + for (int cnt = input.read(temp); cnt > 0; cnt = input.read(temp)) { + int pos = 0; + while (pos < cnt) { + int start = pos; + while (start < cnt) { + if (isControlChar(temp[start]) == false) { + break; + } + start++; + } + if (start > pos) { + diff += (start - pos); + addOffCorrectMap(pos + totalRead, diff); + } + int size = 0; + while (size < (cnt - start)) { + // While the category is not a control char; read. + if (isControlChar(temp[start + size]) == false) { + size++; + } else { + break; + } + } + charArrayList.add(temp, start, size); + pos = start + size; + } + totalRead += cnt; + } + transformedInput = new CharArrayReader(charArrayList.toArray()); + } + + private static boolean isControlChar(char c) { + if (c == '\n' || c == '\r' || c == '\t') { + return false; + } + int category = Character.getType(c); + + return category >= Character.CONTROL && category <= Character.SURROGATE; + } + +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DelimitedToken.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DelimitedToken.java index 74f1121cc467f..7d385055aae5d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DelimitedToken.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DelimitedToken.java @@ -13,43 +13,39 @@ public class DelimitedToken { - /** - * Merges the list of tokens. - * - * Assumes that the tokens are in order. - * - * @param tokens - * @return The merged token - */ - public static DelimitedToken mergeTokens(List tokens) { + static DelimitedToken mergeTokens(List tokens) { if (tokens.size() == 1) { return tokens.get(0); } - - String merged = tokens.stream().map(DelimitedToken::getToken).collect(Collectors.joining()); - return new DelimitedToken(tokens.get(0).getStartPos(), tokens.get(tokens.size() - 1).getEndPos(), merged); + int startOffSet = tokens.get(0).startOffset; + int endOffset = tokens.get(tokens.size() - 1).endOffset; + return new DelimitedToken( + tokens.stream().map(DelimitedToken::charSequence).map(CharSequence::toString).collect(Collectors.joining()), + startOffSet, + endOffset + ); } - private final int startPos; - private final int endPos; - private final String token; + private final CharSequence charSequence; + private final int startOffset; + private final int endOffset; - DelimitedToken(int startPos, int endPos, String token) { - this.startPos = startPos; - this.endPos = endPos; - this.token = token; + public DelimitedToken(CharSequence charSequence, int startOffset, int endOffset) { + this.charSequence = charSequence; + this.startOffset = startOffset; + this.endOffset = endOffset; } - public int getStartPos() { - return startPos; + public CharSequence charSequence() { + return charSequence; } - public int getEndPos() { - return endPos; + public int startOffset() { + return startOffset; } - public String getToken() { - return token; + public int endOffset() { + return endOffset; } @Override @@ -57,16 +53,11 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DelimitedToken that = (DelimitedToken) o; - return startPos == that.startPos && endPos == that.endPos && Objects.equals(token, that.token); + return startOffset == that.startOffset && endOffset == that.endOffset && Objects.equals(charSequence, that.charSequence); } @Override public int hashCode() { - return Objects.hash(startPos, endPos, token); - } - - @Override - public String toString() { - return "{" + "startPos=" + startPos + ", endPos=" + endPos + ", token=" + token + '}'; + return Objects.hash(charSequence, startOffset, endOffset); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizationResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizationResult.java new file mode 100644 index 0000000000000..44cd29309f648 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizationResult.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; +import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; + +import java.io.IOException; +import java.util.List; +import java.util.stream.IntStream; + +public class MPNetTokenizationResult extends TokenizationResult { + + static final String REQUEST_ID = "request_id"; + static final String TOKENS = "tokens"; + static final String ARG1 = "arg_1"; + + public MPNetTokenizationResult(List vocab, List tokenizations, int padTokenId) { + super(vocab, tokenizations, padTokenId); + } + + @Override + public NlpTask.Request buildRequest(String requestId, Tokenization.Truncate t) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + builder.field(REQUEST_ID, requestId); + writePaddedTokens(TOKENS, builder); + writeAttentionMask(ARG1, builder); + builder.endObject(); + + // BytesReference.bytes closes the builder + BytesReference jsonRequest = BytesReference.bytes(builder); + return new NlpTask.Request(this, jsonRequest); + } + + static class MPNetTokensBuilder extends BertTokenizationResult.BertTokensBuilder { + + MPNetTokensBuilder(boolean withSpecialTokens, int clsTokenId, int sepTokenId) { + super(withSpecialTokens, clsTokenId, sepTokenId); + } + + @Override + public TokensBuilder addSequencePair( + List tokenId1s, + List tokenMap1, + List tokenId2s, + List tokenMap2 + ) { + if (withSpecialTokens) { + tokenIds.add(IntStream.of(clsTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + tokenIds.add(tokenId1s.stream().mapToInt(Integer::valueOf)); + tokenMap.add(tokenMap1.stream().mapToInt(Integer::valueOf)); + int previouslyFinalMap = tokenMap1.get(tokenMap1.size() - 1); + // MPNet adds two `` betwee sequence pairs + if (withSpecialTokens) { + tokenIds.add(IntStream.of(sepTokenId, sepTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION, SPECIAL_TOKEN_POSITION)); + } + tokenIds.add(tokenId2s.stream().mapToInt(Integer::valueOf)); + tokenMap.add(tokenMap2.stream().mapToInt(i -> i + previouslyFinalMap)); + if (withSpecialTokens) { + tokenIds.add(IntStream.of(sepTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + return this; + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizer.java index e2468041b8df0..5639cac1aa758 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizer.java @@ -8,7 +8,6 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; -import org.elasticsearch.xpack.ml.inference.nlp.MPNetRequestBuilder; import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; import java.util.Collections; @@ -16,8 +15,7 @@ import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; -import java.util.function.Function; -import java.util.stream.IntStream; +import java.util.stream.Collectors; /** * Performs basic tokenization and normalization of input text @@ -41,7 +39,6 @@ protected MPNetTokenizer( boolean doStripAccents, boolean withSpecialTokens, int maxSequenceLength, - Function requestBuilderFactory, Set neverSplit ) { super( @@ -52,7 +49,6 @@ protected MPNetTokenizer( doStripAccents, withSpecialTokens, maxSequenceLength, - requestBuilderFactory, Sets.union(neverSplit, NEVER_SPLIT), SEPARATOR_TOKEN, CLASS_TOKEN, @@ -67,25 +63,20 @@ protected int getNumExtraTokensForSeqPair() { return 4; } - @Override - protected BertTokenizationBuilder bertTokenizationBuilder() { - return new MPNetTokenizationBuilder(); + TokenizationResult.TokensBuilder createTokensBuilder(int clsTokenId, int sepTokenId, boolean withSpecialTokens) { + return new MPNetTokenizationResult.MPNetTokensBuilder(withSpecialTokens, clsTokenId, sepTokenId); } - protected class MPNetTokenizationBuilder extends BertTokenizationBuilder { - - @Override - BertTokenizationBuilder addTokens(List wordPieceTokenIds, List tokenPositionMap) { - if (numSeq > 0 && withSpecialTokens) { - tokenIds.add(IntStream.of(sepTokenId, sepTokenId)); - tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION, SPECIAL_TOKEN_POSITION)); - } - tokenIds.add(wordPieceTokenIds.stream().mapToInt(Integer::valueOf)); - tokenMap.add(tokenPositionMap.stream().mapToInt(Integer::valueOf)); - numSeq++; - return this; - } + @Override + public NlpTask.RequestBuilder requestBuilder() { + return (inputs, requestId, truncate) -> buildTokenizationResult( + inputs.stream().map(s -> tokenize(s, truncate)).collect(Collectors.toList()) + ).buildRequest(requestId, truncate); + } + @Override + public TokenizationResult buildTokenizationResult(List tokenizations) { + return new MPNetTokenizationResult(originalVocab, tokenizations, getPadTokenId().orElseThrow()); } public static Builder mpBuilder(List vocab, Tokenization tokenization) { @@ -96,13 +87,12 @@ public static class Builder { protected final List originalVocab; protected final SortedMap vocab; - protected boolean doLowerCase = false; + protected boolean doLowerCase; protected boolean doTokenizeCjKChars = true; - protected boolean withSpecialTokens = true; + protected boolean withSpecialTokens; protected int maxSequenceLength; protected Boolean doStripAccents = null; protected Set neverSplit; - protected Function requestBuilderFactory = MPNetRequestBuilder::new; protected Builder(List vocab, Tokenization tokenization) { this.originalVocab = vocab; @@ -155,11 +145,6 @@ public Builder setWithSpecialTokens(boolean withSpecialTokens) { return this; } - public Builder setRequestBuilderFactory(Function requestBuilderFactory) { - this.requestBuilderFactory = requestBuilderFactory; - return this; - } - public MPNetTokenizer build() { // if not set strip accents defaults to the value of doLowerCase if (doStripAccents == null) { @@ -178,7 +163,6 @@ public MPNetTokenizer build() { doStripAccents, withSpecialTokens, maxSequenceLength, - requestBuilderFactory, neverSplit ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MultiCharSequence.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MultiCharSequence.java new file mode 100644 index 0000000000000..f78031834986e --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MultiCharSequence.java @@ -0,0 +1,98 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.apache.lucene.util.CharsRef; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class MultiCharSequence implements CharSequence { + + private final int[] lengths; + private final List sequenceList; + + public MultiCharSequence(List sequenceList) { + this.sequenceList = sequenceList; + this.lengths = new int[sequenceList.size()]; + int i = 0; + int length = 0; + for (CharSequence sequence : sequenceList) { + length += sequence.length(); + lengths[i++] = length; + } + } + + @Override + public int length() { + return lengths[lengths.length - 1]; + } + + @Override + public char charAt(int index) { + int sequenceIndex = Arrays.binarySearch(lengths, index + 1); + if (sequenceIndex < 0) { + sequenceIndex = -1 - sequenceIndex; + } + CharSequence sequence = sequenceList.get(sequenceIndex); + if (sequenceIndex == 0) { + return sequence.charAt(index); + } + return sequence.charAt(index - lengths[sequenceIndex - 1]); + } + + @Override + public CharSequence subSequence(int start, int end) { + if (start == 0 && end >= length()) { + return this; + } + if (start == end) { + return new CharsRef(CharsRef.EMPTY_CHARS, 0, 0); + } + + int startIndex = Arrays.binarySearch(lengths, start); + if (startIndex < 0) { + startIndex = -1 - startIndex; + } + int endIndex = Arrays.binarySearch(lengths, end); + if (endIndex < 0) { + endIndex = -1 - endIndex; + } + if (endIndex > lengths.length - 1) { + endIndex = lengths.length - 1; + } + if (startIndex == endIndex) { + if (startIndex == 0) { + return sequenceList.get(startIndex).subSequence(start, end); + } else { + return sequenceList.get(startIndex).subSequence(start - lengths[startIndex - 1], end - lengths[startIndex - 1]); + } + } + List sequences = new ArrayList<>((endIndex - startIndex) + 1); + if (startIndex == 0) { + sequences.add(sequenceList.get(startIndex).subSequence(start, sequenceList.get(startIndex).length())); + } else { + sequences.add(sequenceList.get(startIndex).subSequence(start - lengths[startIndex - 1], sequenceList.get(startIndex).length())); + } + if (endIndex - startIndex > 1) { + sequences.addAll(sequenceList.subList(startIndex + 1, endIndex)); + } + sequences.add(sequenceList.get(endIndex).subSequence(0, end - lengths[endIndex - 1])); + return new MultiCharSequence(sequences); + } + + @Override + public String toString() { + StringBuilder builder = new StringBuilder(); + for (CharSequence sequence : sequenceList) { + builder.append(sequence); + } + return builder.toString(); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java index 59dbb616f7fea..7eab8dfcf8f50 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java @@ -7,12 +7,11 @@ package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; +import org.elasticsearch.core.Releasable; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.MPNetTokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.inference.nlp.BertRequestBuilder; -import org.elasticsearch.xpack.ml.inference.nlp.MPNetRequestBuilder; import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; import org.elasticsearch.xpack.ml.inference.nlp.Vocabulary; @@ -22,13 +21,13 @@ import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.TOKENIZATION; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.VOCABULARY; -public interface NlpTokenizer { +public interface NlpTokenizer extends Releasable { - TokenizationResult buildTokenizationResult(List tokenizations); + TokenizationResult buildTokenizationResult(List tokenizations); - TokenizationResult.Tokenization tokenize(String seq, Tokenization.Truncate truncate); + TokenizationResult.Tokens tokenize(String seq, Tokenization.Truncate truncate); - TokenizationResult.Tokenization tokenize(String seq1, String seq2, Tokenization.Truncate truncate); + TokenizationResult.Tokens tokenize(String seq1, String seq2, Tokenization.Truncate truncate); NlpTask.RequestBuilder requestBuilder(); @@ -44,10 +43,10 @@ static NlpTokenizer build(Vocabulary vocabulary, Tokenization params) { ExceptionsHelper.requireNonNull(params, TOKENIZATION); ExceptionsHelper.requireNonNull(vocabulary, VOCABULARY); if (params instanceof BertTokenization) { - return BertTokenizer.builder(vocabulary.get(), params).setRequestBuilderFactory(BertRequestBuilder::new).build(); + return BertTokenizer.builder(vocabulary.get(), params).build(); } if (params instanceof MPNetTokenization) { - return MPNetTokenizer.mpBuilder(vocabulary.get(), params).setRequestBuilderFactory(MPNetRequestBuilder::new).build(); + return MPNetTokenizer.mpBuilder(vocabulary.get(), params).build(); } throw new IllegalArgumentException("unknown tokenization type [" + params.getName() + "]"); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java index 862be3c43bf67..30ceb1c437a51 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java @@ -7,93 +7,150 @@ package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; -import java.util.ArrayList; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; +import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; + +import java.io.IOException; import java.util.List; +import java.util.OptionalInt; +import java.util.stream.IntStream; -public class TokenizationResult { +public abstract class TokenizationResult { + public static final int SPECIAL_TOKEN_POSITION = -1; private final List vocab; - private final List tokenizations = new ArrayList<>(); - private int maxLength; + private final List tokens; + private final int maxLength; + private final int padTokenId; - public TokenizationResult(List vocab) { + protected TokenizationResult(List vocab, List tokenizations, int padTokenId) { this.vocab = vocab; - this.maxLength = -1; + this.tokens = tokenizations; + this.padTokenId = padTokenId; + int max = 0; + for (Tokens tokenization : tokenizations) { + max = Math.max(tokenization.tokenIds.length, max); + } + this.maxLength = max; } - public boolean anyTruncated() { - return tokenizations.stream().anyMatch(Tokenization::isTruncated); + List getTokens() { + return tokens; } public String getFromVocab(int tokenId) { return vocab.get(tokenId); } - public List getTokenizations() { - return tokenizations; + public Tokens getTokenization(int tokenizationIndex) { + return tokens.get(tokenizationIndex); } - public void addTokenization(String input, boolean isTruncated, List tokens, int[] tokenIds, int[] tokenMap) { - maxLength = Math.max(maxLength, tokenIds.length); - tokenizations.add(new Tokenization(input, tokens, isTruncated, tokenIds, tokenMap)); + public boolean anyTruncated() { + return tokens.stream().anyMatch(Tokens::truncated); } - public void addTokenization(Tokenization tokenization) { - maxLength = Math.max(maxLength, tokenization.tokenIds.length); - tokenizations.add(tokenization); + public boolean isEmpty() { + return this.tokens.isEmpty() || this.tokens.stream().allMatch(t -> t.tokenIds.length == 0); } - public int getLongestSequenceLength() { - return maxLength; + public abstract NlpTask.Request buildRequest(String requestId, Tokenization.Truncate t) throws IOException; + + protected void writePaddedTokens(String fieldName, XContentBuilder builder) throws IOException { + builder.startArray(fieldName); + for (var inputTokens : tokens) { + builder.startArray(); + + // Note, cannot write the array directly as the internal builder code writes start/end array values + for (int t : inputTokens.tokenIds) { + builder.value(t); + } + for (int i = inputTokens.tokenIds.length; i < maxLength; i++) { + builder.value(padTokenId); + } + builder.endArray(); + } + builder.endArray(); } - public static class Tokenization { + protected void writeAttentionMask(String fieldName, XContentBuilder builder) throws IOException { + builder.startArray(fieldName); + for (var inputTokens : tokens) { + builder.startArray(); + // Note, cannot write the array directly as the internal builder code writes start/end array values + for (int ignored : inputTokens.tokenIds) { + builder.value(1); + } + for (int i = inputTokens.tokenIds.length; i < maxLength; i++) { + builder.value(padTokenId); + } + builder.endArray(); + } + builder.endArray(); + } + + protected void writeTokenTypeIds(String fieldName, XContentBuilder builder) throws IOException { + builder.startArray(fieldName); + for (int i = 0; i < tokens.size(); i++) { + builder.startArray(); + for (int j = 0; j < maxLength; j++) { + builder.value(0); + } + builder.endArray(); + } + builder.endArray(); + } + + protected void writePositionIds(String fieldName, XContentBuilder builder) throws IOException { + builder.startArray(fieldName); + for (int i = 0; i < tokens.size(); i++) { + builder.startArray(); + for (int j = 0; j < maxLength; j++) { + builder.value(j); + } + builder.endArray(); + } + builder.endArray(); + } - private final String input; - private final List tokens; - private final int[] tokenIds; - private final int[] tokenMap; - private final boolean truncated; + public record Tokens(String input, List tokens, boolean truncated, int[] tokenIds, int[] tokenMap) { - public Tokenization(String input, List tokens, boolean truncated, int[] tokenIds, int[] tokenMap) { + public Tokens { assert tokenIds.length == tokenMap.length; - this.input = input; - this.tokens = tokens; - this.tokenIds = tokenIds; - this.tokenMap = tokenMap; - this.truncated = truncated; } - /** - * The integer values of the tokens} - * - * @return A list of token Ids - */ - public int[] getTokenIds() { - return tokenIds; + public OptionalInt getTokenIndex(int token) { + return IntStream.range(0, tokenIds.length).filter(tokenIndex -> token == tokenIds[tokenIndex]).findFirst(); } + } + interface TokensBuilder { /** - * Maps the token position to the position in the source text. - * Source words may be divided into more than one token so more - * than one token can map back to the source token - * - * @return Map of source token to + * Adds tokens to the token builder + * @param tokenIds Token ids without special tokens added + * @param tokenMap Token map without considering special tokens + * @return The builder object */ - public int[] getTokenMap() { - return tokenMap; - } - - public String getInput() { - return input; - } + TokensBuilder addSequence(List tokenIds, List tokenMap); - public List getTokens() { - return tokens; - } + /** + * Adds an encoded sequence pair to the token builder + * @param tokenId1s Sequence 1 ids + * @param tokenMap1 Sequence 1 token mappings + * @param tokenId2s Sequence 2 ids + * @param tokenMap2 Sequence 2 token map + * @return The builder object + */ + TokensBuilder addSequencePair(List tokenId1s, List tokenMap1, List tokenId2s, List tokenMap2); - public boolean isTruncated() { - return truncated; - } + /** + * Builds the token object + * @param input the original sequence input, may be a simple concatenation of a sequence pair + * @param truncated Was this truncated when tokenized + * @param allTokens All the tokens with their values and offsets + * @return A new Tokens object + */ + Tokens build(String input, boolean truncated, List allTokens); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceAnalyzer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceAnalyzer.java new file mode 100644 index 0000000000000..6f1b89f20056f --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceAnalyzer.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.core.WhitespaceTokenizer; + +import java.io.IOException; +import java.io.Reader; +import java.io.UncheckedIOException; +import java.util.List; + +public class WordPieceAnalyzer extends Analyzer { + private final List vocabulary; + private final List neverSplit; + private final boolean doLowerCase; + private final boolean doTokenizeCjKChars; + private final boolean doStripAccents; + private WordPieceTokenFilter innerTokenFilter; + private final String unknownToken; + + public WordPieceAnalyzer( + List vocabulary, + List neverSplit, + boolean doLowerCase, + boolean doTokenizeCjKChars, + boolean doStripAccents, + String unknownToken + ) { + this.vocabulary = vocabulary; + this.neverSplit = neverSplit; + this.doLowerCase = doLowerCase; + this.doTokenizeCjKChars = doTokenizeCjKChars; + this.doStripAccents = doStripAccents; + this.unknownToken = unknownToken; + } + + @Override + protected TokenStreamComponents createComponents(String fieldName) { + try { + WhitespaceTokenizer tokenizer = new WhitespaceTokenizer(512); + innerTokenFilter = WordPieceTokenFilter.build( + doLowerCase, + doTokenizeCjKChars, + doStripAccents, + neverSplit, + vocabulary, + unknownToken, + 100, + tokenizer + ); + return new TokenStreamComponents(tokenizer, innerTokenFilter); + } catch (IOException ex) { + throw new UncheckedIOException(ex); + } + } + + public List getTokens() { + if (innerTokenFilter != null) { + return innerTokenFilter.getTokenizedValues(); + } else { + return List.of(); + } + } + + @Override + protected Reader initReader(String fieldName, Reader reader) { + return new ControlCharFilter(reader); + } + + @Override + protected Reader initReaderForNormalization(String fieldName, Reader reader) { + return new ControlCharFilter(reader); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilter.java new file mode 100644 index 0000000000000..eef885c5afb76 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilter.java @@ -0,0 +1,207 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.apache.lucene.analysis.CharArrayMap; +import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.TokenFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import java.util.Objects; + +public final class WordPieceTokenFilter extends TokenFilter { + private final LinkedList tokens; + private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); + private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); + private final PositionIncrementAttribute posIncAtt = addAttribute(PositionIncrementAttribute.class); + private static final CharSequence CONTINUATION = "##"; + + private State current; + private final CharArraySet neverSplit; + private final CharArrayMap vocabulary; + private final List tokenizedValues; + private final int maxInputCharsPerWord; + private final int tokenizedUnknown; + private final CharSequence unknownToken; + + public static WordPieceTokenFilter build( + boolean isLowerCase, + boolean isTokenizeCjkChars, + boolean isStripAccents, + List neverSplit, + List dictionary, + String unknownToken, + int maxInputCharsPerWord, + TokenStream input + ) throws IOException { + CharArrayMap vocabMap = new CharArrayMap<>(dictionary.size(), isLowerCase); + int i = 0; + for (var word : dictionary) { + vocabMap.put(word, i++); + } + input = BasicTokenFilter.build(isTokenizeCjkChars, isStripAccents, neverSplit, input); + return new WordPieceTokenFilter(input, new CharArraySet(neverSplit, isLowerCase), vocabMap, unknownToken, maxInputCharsPerWord); + } + + public WordPieceTokenFilter( + TokenStream input, + CharArraySet neverSplit, + CharArrayMap vocabulary, + CharSequence unknownToken, + int maxInputCharsPerWord + ) { + super(input); + this.tokens = new LinkedList<>(); + this.neverSplit = neverSplit; + this.vocabulary = vocabulary; + this.tokenizedValues = new ArrayList<>(); + if (vocabulary.containsKey(unknownToken) == false) { + throw new IllegalArgumentException( + "provided vocabulary does not contain the unknown token of [" + unknownToken.toString() + "]" + ); + } + this.unknownToken = unknownToken; + this.tokenizedUnknown = vocabulary.get(unknownToken); + this.maxInputCharsPerWord = maxInputCharsPerWord; + } + + public List getTokenizedValues() { + return tokenizedValues; + } + + @Override + public void reset() throws IOException { + super.reset(); + tokens.clear(); + tokenizedValues.clear(); + current = null; + } + + @Override + public boolean incrementToken() throws IOException { + // TODO seems like this + lowercase + tokenize cjk + wordpiece could all be the same thing.... + if (tokens.isEmpty() == false) { + assert current != null; + WordPieceToken token = tokens.removeFirst(); + restoreState(current); // keep all other attributes untouched + termAtt.setEmpty().append(token.charSequence()); + offsetAtt.setOffset(token.startOffset(), token.endOffset()); + posIncAtt.setPositionIncrement(0); + return true; + } + + current = null; // not really needed, but for safety + if (input.incrementToken()) { + if (neverSplit.contains(termAtt)) { + Integer maybeTokenized = vocabulary.get(termAtt); + tokenizedValues.add( + new WordPieceToken( + termAtt.toString(), + Objects.requireNonNullElse(maybeTokenized, tokenizedUnknown), + offsetAtt.startOffset(), + offsetAtt.endOffset() + ) + ); + return true; + } + if (termAtt.length() > maxInputCharsPerWord) { + tokenizedValues.add(new WordPieceToken(unknownToken, tokenizedUnknown, offsetAtt.startOffset(), offsetAtt.endOffset())); + termAtt.setEmpty().append(unknownToken); + return true; + } + + boolean isBad = false; + int start = 0; + int length = termAtt.length(); + while (start < length) { + int end = length; + CharSequence currentValidSubStr = null; + + while (start < end) { + CharSequence subStr; + if (start > 0) { + subStr = new MultiCharSequence(List.of(CONTINUATION, termAtt.subSequence(start, end))); + } else { + subStr = termAtt.subSequence(start, end); + } + + if (vocabulary.containsKey(subStr)) { + currentValidSubStr = subStr; + break; + } + end--; + } + + if (currentValidSubStr == null) { + isBad = true; + break; + } + int encoding = vocabulary.get(currentValidSubStr); + WordPieceToken t = new WordPieceToken(currentValidSubStr, encoding, offsetAtt.startOffset(), offsetAtt.endOffset()); + tokenizedValues.add(t); + tokens.add(t); + start = end; + } + + if (isBad) { + tokens.clear(); + WordPieceToken t = new WordPieceToken(unknownToken, tokenizedUnknown, offsetAtt.startOffset(), offsetAtt.endOffset()); + tokenizedValues.add(t); + termAtt.setEmpty().append(unknownToken); + } else { + current = captureState(); + WordPieceToken token = tokens.removeFirst(); + termAtt.setEmpty().append(token.charSequence()); + offsetAtt.setOffset(token.startOffset(), token.endOffset()); + } + return true; + } + return false; + } + + public static class WordPieceToken extends DelimitedToken implements CharSequence { + public final int encoding; + + WordPieceToken(CharSequence sequence, int encoding, int startOffset, int endOffset) { + super(sequence, startOffset, endOffset); + this.encoding = encoding; + } + + public int getEncoding() { + return this.encoding; + } + + @Override + public int length() { + return charSequence().length(); + } + + @Override + public char charAt(int index) { + return charSequence().charAt(index); + } + + @Override + public CharSequence subSequence(int start, int end) { + return charSequence().subSequence(start, end); + } + + @Override + public String toString() { + return charSequence().toString(); + } + } + +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenizer.java deleted file mode 100644 index b50e70f85f12a..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenizer.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -/** - * SubWord tokenization via the Word Piece algorithm using the - * provided vocabulary. - * - * The input is split by white space and should be pre-processed - * by {@link BasicTokenizer} - */ -public class WordPieceTokenizer { - - private static final String CONTINUATION = "##"; - - private final Map vocab; - private final String unknownToken; - private final int maxInputCharsPerWord; - - /** - * @param vocab The token vocabulary - * @param unknownToken If not found in the vocabulary - * @param maxInputCharsPerWord Inputs tokens longer than this are 'unknown' - */ - public WordPieceTokenizer(Map vocab, String unknownToken, int maxInputCharsPerWord) { - this.vocab = vocab; - this.unknownToken = unknownToken; - this.maxInputCharsPerWord = maxInputCharsPerWord; - } - - /** - * Wordpiece tokenize the input text. - * - * @param token Word to tokenize - * @return List of token IDs - */ - public List tokenize(DelimitedToken token) { - - if (token.getToken().length() > maxInputCharsPerWord) { - assert vocab.containsKey(unknownToken); - return Collections.singletonList(vocab.get(unknownToken)); - } - - List output = new ArrayList<>(); - boolean isBad = false; - int start = 0; - int length = token.getToken().length(); - while (start < length) { - int end = length; - - String currentValidSubStr = null; - - while (start < end) { - String subStr; - if (start > 0) { - subStr = CONTINUATION + token.getToken().substring(start, end); - } else { - subStr = token.getToken().substring(start, end); - } - - if (vocab.containsKey(subStr)) { - currentValidSubStr = subStr; - break; - } - - end--; - } - - if (currentValidSubStr == null) { - isBad = true; - break; - } - - output.add(vocab.get(currentValidSubStr)); - - start = end; - } - - if (isBad) { - return Collections.singletonList(vocab.get(unknownToken)); - } else { - return output; - } - } -} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/PyTorchInferenceResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/PyTorchInferenceResult.java index 0ac37ecc633b7..75cb77fb12b05 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/PyTorchInferenceResult.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/PyTorchInferenceResult.java @@ -91,14 +91,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(REQUEST_ID.getPreferredName(), requestId); if (inference != null) { builder.startArray(INFERENCE.getPreferredName()); - for (int i = 0; i < inference.length; i++) { + for (double[][] doubles : inference) { builder.startArray(); for (int j = 0; j < inference[0].length; j++) { - builder.startArray(); - for (int k = 0; k < inference[0][0].length; k++) { - builder.value(inference[i][j][k]); - } - builder.endArray(); + builder.value(doubles[j]); } builder.endArray(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobNodeSelector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobNodeSelector.java index 1c7d34f83b06b..c5b1d0cb30c82 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobNodeSelector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobNodeSelector.java @@ -217,7 +217,7 @@ public PersistentTasksCustomMetadata.Assignment selectNode( reason = createReason( jobId, nodeNameAndMlAttributes(node), - "This node is not providing accurate information to determine is load by memory." + "This node is not providing accurate information to determine its load by memory." ); logger.trace(reason); reasons.put(node.getName(), reason); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoad.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoad.java index 143df0be852f6..15fafd3b5f220 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoad.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoad.java @@ -9,8 +9,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.core.Nullable; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; import java.util.Objects; @@ -25,7 +25,10 @@ public class NodeLoad { private final boolean useMemory; private final String error; private final long numAssignedJobs; - private final long assignedJobMemory; + private final long assignedNativeCodeOverheadMemory; + private final long assignedAnomalyDetectorMemory; + private final long assignedDataFrameAnalyticsMemory; + private final long assignedNativeInferenceMemory; private final long numAllocatingJobs; NodeLoad( @@ -35,7 +38,10 @@ public class NodeLoad { boolean useMemory, String error, long numAssignedJobs, - long assignedJobMemory, + long assignedNativeCodeOverheadMemory, + long assignedAnomalyDetectorMemory, + long assignedDataFrameAnalyticsMemory, + long assignedNativeInferenceMemory, long numAllocatingJobs ) { this.maxMemory = maxMemory; @@ -44,7 +50,10 @@ public class NodeLoad { this.useMemory = useMemory; this.error = error; this.numAssignedJobs = numAssignedJobs; - this.assignedJobMemory = assignedJobMemory; + this.assignedNativeCodeOverheadMemory = assignedNativeCodeOverheadMemory; + this.assignedAnomalyDetectorMemory = assignedAnomalyDetectorMemory; + this.assignedDataFrameAnalyticsMemory = assignedDataFrameAnalyticsMemory; + this.assignedNativeInferenceMemory = assignedNativeInferenceMemory; this.numAllocatingJobs = numAllocatingJobs; } @@ -56,10 +65,39 @@ public long getNumAssignedJobs() { } /** - * @return The total memory in bytes used by the assigned jobs. + * @return The total memory in bytes used by all assigned jobs. */ public long getAssignedJobMemory() { - return assignedJobMemory; + return assignedNativeCodeOverheadMemory + assignedAnomalyDetectorMemory + assignedDataFrameAnalyticsMemory + + assignedNativeInferenceMemory; + } + + /** + * @return The native code overhead, if any, for native processes on this node. + */ + public long getAssignedNativeCodeOverheadMemory() { + return assignedNativeCodeOverheadMemory; + } + + /** + * @return The total memory in bytes used by the assigned anomaly detectors. + */ + public long getAssignedAnomalyDetectorMemory() { + return assignedAnomalyDetectorMemory; + } + + /** + * @return The total memory in bytes used by the assigned data frame analytics jobs. + */ + public long getAssignedDataFrameAnalyticsMemory() { + return assignedDataFrameAnalyticsMemory; + } + + /** + * @return The total memory in bytes used by the assigned native inference processes. + */ + public long getAssignedNativeInferenceMemory() { + return assignedNativeInferenceMemory; } /** @@ -94,7 +132,7 @@ public String getNodeId() { * @return The available memory on the node */ public long getFreeMemory() { - return Math.max(maxMemory - assignedJobMemory, 0L); + return Math.max(maxMemory - getAssignedJobMemory(), 0L); } /** @@ -128,7 +166,10 @@ public boolean equals(Object o) { && maxJobs == nodeLoad.maxJobs && useMemory == nodeLoad.useMemory && numAssignedJobs == nodeLoad.numAssignedJobs - && assignedJobMemory == nodeLoad.assignedJobMemory + && assignedNativeCodeOverheadMemory == nodeLoad.assignedNativeCodeOverheadMemory + && assignedAnomalyDetectorMemory == nodeLoad.assignedAnomalyDetectorMemory + && assignedDataFrameAnalyticsMemory == nodeLoad.assignedDataFrameAnalyticsMemory + && assignedNativeInferenceMemory == nodeLoad.assignedNativeInferenceMemory && numAllocatingJobs == nodeLoad.numAllocatingJobs && Objects.equals(nodeId, nodeLoad.nodeId) && Objects.equals(error, nodeLoad.error); @@ -136,7 +177,19 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(maxMemory, maxJobs, nodeId, useMemory, error, numAssignedJobs, assignedJobMemory, numAllocatingJobs); + return Objects.hash( + maxMemory, + maxJobs, + nodeId, + useMemory, + error, + numAssignedJobs, + assignedNativeCodeOverheadMemory, + assignedAnomalyDetectorMemory, + assignedDataFrameAnalyticsMemory, + assignedNativeInferenceMemory, + numAllocatingJobs + ); } public static Builder builder(String nodeId) { @@ -154,7 +207,10 @@ public static class Builder { private boolean useMemory; private String error; private long numAssignedJobs; - private long assignedJobMemory; + private long assignedNativeCodeOverheadMemory; + private long assignedAnomalyDetectorMemory; + private long assignedDataFrameAnalyticsMemory; + private long assignedNativeInferenceMemory; private long numAllocatingJobs; public Builder(NodeLoad nodeLoad) { @@ -164,7 +220,10 @@ public Builder(NodeLoad nodeLoad) { this.useMemory = nodeLoad.useMemory; this.error = nodeLoad.error; this.numAssignedJobs = nodeLoad.numAssignedJobs; - this.assignedJobMemory = nodeLoad.assignedJobMemory; + this.assignedNativeCodeOverheadMemory = nodeLoad.assignedNativeCodeOverheadMemory; + this.assignedAnomalyDetectorMemory = nodeLoad.assignedAnomalyDetectorMemory; + this.assignedDataFrameAnalyticsMemory = nodeLoad.assignedDataFrameAnalyticsMemory; + this.assignedNativeInferenceMemory = nodeLoad.assignedNativeInferenceMemory; this.numAllocatingJobs = nodeLoad.numAllocatingJobs; } @@ -173,7 +232,7 @@ public Builder(String nodeId) { } public long getFreeMemory() { - return Math.max(maxMemory - assignedJobMemory, 0L); + return Math.max(maxMemory - assignedAnomalyDetectorMemory, 0L); } public int remainingJobs() { @@ -217,8 +276,23 @@ public Builder incNumAssignedJobs() { return this; } - public Builder incAssignedJobMemory(long assignedJobMemory) { - this.assignedJobMemory += assignedJobMemory; + public Builder incAssignedNativeCodeOverheadMemory(long assignedNativeCodeOverheadMemory) { + this.assignedNativeCodeOverheadMemory += assignedNativeCodeOverheadMemory; + return this; + } + + public Builder incAssignedAnomalyDetectorMemory(long assignedAnomalyDetectorMemory) { + this.assignedAnomalyDetectorMemory += assignedAnomalyDetectorMemory; + return this; + } + + public Builder incAssignedDataFrameAnalyticsMemory(long assignedDataFrameAnalyticsMemory) { + this.assignedDataFrameAnalyticsMemory += assignedDataFrameAnalyticsMemory; + return this; + } + + public Builder incAssignedNativeInferenceMemory(long assignedNativeInferenceMemory) { + this.assignedNativeInferenceMemory += assignedNativeInferenceMemory; return this; } @@ -235,19 +309,36 @@ void addTask(String taskName, String taskId, boolean isAllocating, MlMemoryTrack Long jobMemoryRequirement = memoryTracker.getJobMemoryRequirement(taskName, taskId); if (jobMemoryRequirement == null) { useMemory = false; - logger.debug( - () -> new ParameterizedMessage( - "[{}] memory requirement was not available. Calculating load by number of assigned jobs.", - taskId - ) - ); + logger.debug("[{}] task memory requirement was not available.", taskId); } else { - assignedJobMemory += jobMemoryRequirement; + switch (taskName) { + case MlTasks.JOB_TASK_NAME, MlTasks.JOB_SNAPSHOT_UPGRADE_TASK_NAME -> assignedAnomalyDetectorMemory += + jobMemoryRequirement; + case MlTasks.DATA_FRAME_ANALYTICS_TASK_NAME -> assignedDataFrameAnalyticsMemory += jobMemoryRequirement; + default -> { + assert false : "ML memory-requiring task name not handled: " + taskName; + // If this ever happens in production then this is better than nothing, but + // hopefully the assertion will mean we pick up any omission in testing + assignedAnomalyDetectorMemory += jobMemoryRequirement; + } + } } } public NodeLoad build() { - return new NodeLoad(maxMemory, maxJobs, nodeId, useMemory, error, numAssignedJobs, assignedJobMemory, numAllocatingJobs); + return new NodeLoad( + maxMemory, + maxJobs, + nodeId, + useMemory, + error, + numAssignedJobs, + assignedNativeCodeOverheadMemory, + assignedAnomalyDetectorMemory, + assignedDataFrameAnalyticsMemory, + assignedNativeInferenceMemory, + numAllocatingJobs + ); } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoadDetector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoadDetector.java index 2a8ef46d8e5c9..eed33e77a8e2d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoadDetector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoadDetector.java @@ -88,6 +88,11 @@ public NodeLoad detectNodeLoad( } updateLoadGivenTasks(nodeLoad, persistentTasks); updateLoadGivenModelAllocations(nodeLoad, allocationMetadata); + // if any processes are running then the native code will be loaded, but shared between all processes, + // so increase the total memory usage to account for this + if (nodeLoad.getNumAssignedJobs() > 0) { + nodeLoad.incAssignedNativeCodeOverheadMemory(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes()); + } return nodeLoad.build(); } @@ -105,12 +110,6 @@ private void updateLoadGivenTasks(NodeLoad.Builder nodeLoad, PersistentTasksCust nodeLoad.addTask(task.getTaskName(), taskParams.getMlId(), state.isAllocating(), mlMemoryTracker); } } - - // if any jobs are running then the native code will be loaded, but shared between all jobs, - // so increase the total memory usage of the assigned jobs to account for this - if (nodeLoad.getNumAssignedJobs() > 0) { - nodeLoad.incAssignedJobMemory(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes()); - } } } @@ -122,7 +121,7 @@ private void updateLoadGivenModelAllocations(NodeLoad.Builder nodeLoad, TrainedM .orElse(RoutingState.STOPPED) .consumesMemory()) { nodeLoad.incNumAssignedJobs(); - nodeLoad.incAssignedJobMemory(allocation.getTaskParams().estimateMemoryUsageBytes()); + nodeLoad.incAssignedNativeInferenceMemory(allocation.getTaskParams().estimateMemoryUsageBytes()); } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java index 4ea52aec6e59e..47f250673f2bc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java @@ -15,12 +15,14 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.RetryableAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.engine.DocumentMissingException; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.persistent.AllocatedPersistentTask; @@ -305,12 +307,16 @@ private void runJob(JobTask jobTask, JobState jobState, OpenJobAction.JobParams // This job has a running datafeed attached to it. // In order to prevent gaps in the model we revert to the current snapshot deleting intervening results. - revertToCurrentSnapshot(jobTask.getJobId(), ActionListener.wrap(response -> openJob(jobTask), e -> { - if (autodetectProcessManager.isNodeDying() == false) { - logger.error(new ParameterizedMessage("[{}] failed to revert to current snapshot", jobTask.getJobId()), e); - failTask(jobTask, "failed to revert to current snapshot"); - } - })); + RevertToCurrentSnapshotAction revertToCurrentSnapshotAction = new RevertToCurrentSnapshotAction( + jobTask, + ActionListener.wrap(response -> openJob(jobTask), e -> { + if (autodetectProcessManager.isNodeDying() == false) { + logger.error(new ParameterizedMessage("[{}] failed to revert to current snapshot", jobTask.getJobId()), e); + failTask(jobTask, "failed to revert to current snapshot"); + } + }) + ); + revertToCurrentSnapshotAction.run(); } else { openJob(jobTask); } @@ -454,51 +460,96 @@ private void verifyCurrentSnapshotVersion(String jobId, ActionListener executeAsyncWithOrigin(client, ML_ORIGIN, GetJobsAction.INSTANCE, request, jobListener); } - private void revertToCurrentSnapshot(String jobId, ActionListener listener) { - ActionListener jobListener = ActionListener.wrap(jobResponse -> { - List jobPage = jobResponse.getResponse().results(); - // We requested a single concrete job so if it didn't exist we would get an error - assert jobPage.size() == 1; + /** + * This action reverts a job to its current snapshot if one exists or resets the job. + * This action is retryable. As this action happens when a job is relocating to another node, + * it is common that this happens during rolling upgrades. During a rolling upgrade, it is + * probable that data nodes containing shards of the ML indices might not be available temporarily + * which results to failures in the revert/reset action. Thus, it is important to retry a few times + * so that the job manages to successfully recover without user intervention. + */ + private class RevertToCurrentSnapshotAction extends RetryableAction { + + private final JobTask jobTask; + private volatile boolean hasFailedAtLeastOnce; + + private RevertToCurrentSnapshotAction(JobTask jobTask, ActionListener listener) { + super( + logger, + client.threadPool(), + // No need to wait before first execution + TimeValue.timeValueMillis(1), + // Retry for 15 minutes. This should be enough time for at least some replicas + // to be available so that and data deletion can succeed. + TimeValue.timeValueMinutes(15), + listener, + MachineLearning.UTILITY_THREAD_POOL_NAME + ); + this.jobTask = Objects.requireNonNull(jobTask); + } - String jobSnapshotId = jobPage.get(0).getModelSnapshotId(); - if (jobSnapshotId == null && isMasterNodeVersionOnOrAfter(ResetJobAction.VERSION_INTRODUCED)) { - logger.info("[{}] job has running datafeed task; resetting as no snapshot exists", jobId); - ResetJobAction.Request request = new ResetJobAction.Request(jobId); - request.setSkipJobStateValidation(true); - request.masterNodeTimeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); - request.timeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); - executeAsyncWithOrigin( - client, - ML_ORIGIN, - ResetJobAction.INSTANCE, - request, - ActionListener.wrap(response -> listener.onResponse(true), listener::onFailure) - ); - } else { - logger.info("[{}] job has running datafeed task; reverting to current snapshot", jobId); - RevertModelSnapshotAction.Request request = new RevertModelSnapshotAction.Request( - jobId, - jobSnapshotId == null ? ModelSnapshot.EMPTY_SNAPSHOT_ID : jobSnapshotId - ); - request.setForce(true); - request.setDeleteInterveningResults(true); - request.masterNodeTimeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); - request.timeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); - executeAsyncWithOrigin( - client, - ML_ORIGIN, - RevertModelSnapshotAction.INSTANCE, - request, - ActionListener.wrap(response -> listener.onResponse(true), listener::onFailure) + @Override + public void tryAction(ActionListener listener) { + ActionListener jobListener = ActionListener.wrap(jobResponse -> { + List jobPage = jobResponse.getResponse().results(); + // We requested a single concrete job so if it didn't exist we would get an error + assert jobPage.size() == 1; + + String jobSnapshotId = jobPage.get(0).getModelSnapshotId(); + if (jobSnapshotId == null && isMasterNodeVersionOnOrAfter(ResetJobAction.VERSION_INTRODUCED)) { + logger.info("[{}] job has running datafeed task; resetting as no snapshot exists", jobTask.getJobId()); + ResetJobAction.Request request = new ResetJobAction.Request(jobTask.getJobId()); + request.setSkipJobStateValidation(true); + request.masterNodeTimeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); + request.timeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); + executeAsyncWithOrigin( + client, + ML_ORIGIN, + ResetJobAction.INSTANCE, + request, + ActionListener.wrap(response -> listener.onResponse(true), listener::onFailure) + ); + } else { + logger.info("[{}] job has running datafeed task; reverting to current snapshot", jobTask.getJobId()); + RevertModelSnapshotAction.Request request = new RevertModelSnapshotAction.Request( + jobTask.getJobId(), + jobSnapshotId == null ? ModelSnapshot.EMPTY_SNAPSHOT_ID : jobSnapshotId + ); + request.setForce(true); + request.setDeleteInterveningResults(true); + request.masterNodeTimeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); + request.timeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); + executeAsyncWithOrigin( + client, + ML_ORIGIN, + RevertModelSnapshotAction.INSTANCE, + request, + ActionListener.wrap(response -> listener.onResponse(true), listener::onFailure) + ); + } + }, error -> listener.onFailure(ExceptionsHelper.serverError("[{}] error getting job", error, jobTask.getJobId()))); + + // We need to refetch the job in order to learn what is its current model snapshot + // as the one that exists in the task params is outdated. + GetJobsAction.Request request = new GetJobsAction.Request(jobTask.getJobId()); + request.masterNodeTimeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); + executeAsyncWithOrigin(client, ML_ORIGIN, GetJobsAction.INSTANCE, request, jobListener); + } + + @Override + public boolean shouldRetry(Exception e) { + if (jobTask.isClosing() || jobTask.isVacating()) { + return false; + } + if (hasFailedAtLeastOnce == false) { + hasFailedAtLeastOnce = true; + logger.error( + new ParameterizedMessage("[{}] error reverting job to its current snapshot; attempting retry", jobTask.getJobId()), + e ); } - }, error -> listener.onFailure(ExceptionsHelper.serverError("[{}] error getting job", error, jobId))); - - // We need to refetch the job in order to learn what is its current model snapshot - // as the one that exists in the task params is outdated. - GetJobsAction.Request request = new GetJobsAction.Request(jobId); - request.masterNodeTimeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); - executeAsyncWithOrigin(client, ML_ORIGIN, GetJobsAction.INSTANCE, request, jobListener); + return true; + } } // Exceptions that occur while the node is dying, i.e. after the JVM has received a SIGTERM, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java index 701abf89ae816..666ed4ce29487 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java @@ -210,6 +210,16 @@ public boolean isRecentlyRefreshed(Duration customDuration) { && localLastUpdateTime.plus(RECENT_UPDATE_THRESHOLD).plus(customDuration).isAfter(Instant.now()); } + /** + * Has the information in this object ever been refreshed? + * This method is intended for use when it's not essential to + * have the most up-to-date information, but having some + * information is necessary, for example in telemetry. + */ + public boolean isEverRefreshed() { + return isMaster && lastUpdateTime != null; + } + /** * Get the memory requirement for an anomaly detector job. * This method only works on the master node. diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlMemoryAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlMemoryAction.java new file mode 100644 index 0000000000000..0cc76216936e5 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlMemoryAction.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.rest; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.common.Strings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestActions.NodesResponseRestListener; +import org.elasticsearch.xpack.core.ml.action.MlMemoryAction; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; + +public class RestMlMemoryAction extends BaseRestHandler { + + public static final String NODE_ID = "nodeId"; + public static final String MASTER_TIMEOUT = "master_timeout"; + public static final String TIMEOUT = "timeout"; + + @Override + public List routes() { + return List.of(new Route(GET, BASE_PATH + "memory/{" + NODE_ID + "}/_stats"), new Route(GET, BASE_PATH + "memory/_stats")); + } + + @Override + public String getName() { + return "ml_memory_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + String nodeId = restRequest.param(NODE_ID); + if (Strings.isNullOrEmpty(nodeId)) { + nodeId = Metadata.ALL; + } + MlMemoryAction.Request request = new MlMemoryAction.Request(nodeId); + request.masterNodeTimeout(restRequest.paramAsTime(MASTER_TIMEOUT, request.masterNodeTimeout())); + request.timeout(restRequest.paramAsTime(TIMEOUT, request.timeout())); + return channel -> client.execute(MlMemoryAction.INSTANCE, request, new NodesResponseRestListener<>(channel)); + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java index 716efb04b6c40..82f7dd71cdec9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java @@ -121,7 +121,7 @@ public void setup() { nodeLoadDetector = mock(NodeLoadDetector.class); when(nodeLoadDetector.getMlMemoryTracker()).thenReturn(mlMemoryTracker); when(nodeLoadDetector.detectNodeLoad(any(), any(), anyInt(), anyInt(), anyBoolean())).thenReturn( - NodeLoad.builder("any").setUseMemory(true).incAssignedJobMemory(ByteSizeValue.ofGb(1).getBytes()).build() + NodeLoad.builder("any").setUseMemory(true).incAssignedAnomalyDetectorMemory(ByteSizeValue.ofGb(1).getBytes()).build() ); clusterService = mock(ClusterService.class); settings = Settings.EMPTY; @@ -148,7 +148,7 @@ public void testScalingEdgeCase() { NodeLoad.builder("any") .setMaxMemory(432013312) .setUseMemory(true) - .incAssignedJobMemory( + .incAssignedAnomalyDetectorMemory( (long) (168.7 * 1024 + 0.5) + (long) (1.4 * 1024 * 1024 + 0.5) + ByteSizeValue.ofMb(256).getBytes() + Job.PROCESS_MEMORY_OVERHEAD.getBytes() * 3 ) @@ -188,7 +188,7 @@ public void testScalingEdgeCase() { NodeLoad.builder("any") .setMaxMemory(bytesForML) .setUseMemory(true) - .incAssignedJobMemory( + .incAssignedAnomalyDetectorMemory( (long) (168.7 * 1024 + 0.5) + (long) (1.4 * 1024 * 1024 + 0.5) + ByteSizeValue.ofMb(256).getBytes() + ByteSizeValue .ofMb(128) .getBytes() + Job.PROCESS_MEMORY_OVERHEAD.getBytes() * 4 @@ -232,8 +232,8 @@ public void testScaleStability() { while (forScaleUp.getFreeMemory() > Job.PROCESS_MEMORY_OVERHEAD.getBytes()) { long jobSize = randomLongBetween(Job.PROCESS_MEMORY_OVERHEAD.getBytes(), forScaleUp.getFreeMemory()); maxJob = Math.max(jobSize, maxJob); - forScaleUp.incNumAssignedJobs().incAssignedJobMemory(jobSize); - forScaleDown.incNumAssignedJobs().incAssignedJobMemory(jobSize); + forScaleUp.incNumAssignedJobs().incAssignedAnomalyDetectorMemory(jobSize); + forScaleDown.incNumAssignedJobs().incAssignedAnomalyDetectorMemory(jobSize); } // Create jobs for scale up NodeLoad nodeLoadForScaleUp = forScaleUp.build(); @@ -244,7 +244,7 @@ public void testScaleStability() { break; } maxJob = Math.max(jobSize, maxJob); - forScaleDown.incNumAssignedJobs().incAssignedJobMemory(jobSize); + forScaleDown.incNumAssignedJobs().incAssignedAnomalyDetectorMemory(jobSize); String waitingJob = randomAlphaOfLength(10); when(mlMemoryTracker.getAnomalyDetectorJobMemoryRequirement(eq(waitingJob))).thenReturn(jobSize); waitingJobs.add(waitingJob); @@ -345,7 +345,7 @@ public void testScaleUp_withWaitingJobsAndAutoMemoryAndNoRoomInNodes() { NodeLoad.builder("any") .setMaxMemory(ByteSizeValue.ofGb(1).getBytes()) .setUseMemory(true) - .incAssignedJobMemory(ByteSizeValue.ofGb(1).getBytes()) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofGb(1).getBytes()) .build() ); MlScalingReason.Builder reasonBuilder = new MlScalingReason.Builder().setPassedConfiguration(Settings.EMPTY) @@ -460,7 +460,7 @@ public void testScaleUp_withWaitingSnapshotUpgradesAndAutoMemoryAndNoRoomInNodes NodeLoad.builder("any") .setMaxMemory(ByteSizeValue.ofGb(1).getBytes()) .setUseMemory(true) - .incAssignedJobMemory(ByteSizeValue.ofGb(1).getBytes()) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofGb(1).getBytes()) .build() ); MlScalingReason.Builder reasonBuilder = new MlScalingReason.Builder().setPassedConfiguration(Settings.EMPTY) @@ -570,7 +570,7 @@ public void testScaleUp_withWaitingJobsAndRoomInNodes() { .setUseMemory(true) .setMaxJobs(10) .incNumAssignedJobs() - .incAssignedJobMemory(ByteSizeValue.ofMb(230).getBytes()) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofMb(230).getBytes()) .build(), NodeLoad.builder("not_filled").setMaxMemory(ByteSizeValue.ofMb(230).getBytes()).setMaxJobs(10).setUseMemory(true).build() ); @@ -634,7 +634,7 @@ public void testScaleUp_withWaitingJobsAndNoRoomInNodes() { NodeLoad.builder("any") .setMaxMemory(ByteSizeValue.ofGb(1).getBytes()) .setUseMemory(true) - .incAssignedJobMemory(ByteSizeValue.ofGb(1).getBytes()) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofGb(1).getBytes()) .build() ); MlAutoscalingDeciderService service = buildService(); @@ -701,7 +701,7 @@ public void testScaleUp_withWaitingJobs_WithFutureCapacity() { NodeLoad.builder("any") .setMaxMemory(ByteSizeValue.ofGb(1).getBytes()) .setUseMemory(true) - .incAssignedJobMemory(ByteSizeValue.ofGb(1).getBytes()) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofGb(1).getBytes()) .build() ); MlAutoscalingDeciderService service = buildService(); @@ -763,7 +763,7 @@ public void testScaleUp_withWaitingModelAndAutoMemoryAndNoRoomInNodes() { NodeLoad.builder("any") .setMaxMemory(ByteSizeValue.ofGb(1).getBytes()) .setUseMemory(true) - .incAssignedJobMemory(ByteSizeValue.ofGb(1).getBytes()) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofGb(1).getBytes()) .build() ); MlScalingReason.Builder reasonBuilder = new MlScalingReason.Builder().setPassedConfiguration(Settings.EMPTY) @@ -809,7 +809,7 @@ public void testScaleUp_withWaitingModelsAndRoomInNodes() { .setUseMemory(true) .setMaxJobs(10) .incNumAssignedJobs() - .incAssignedJobMemory(ByteSizeValue.ofMb(230).getBytes()) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofMb(230).getBytes()) .build(), NodeLoad.builder("not_filled").setMaxMemory(ByteSizeValue.ofMb(230).getBytes()).setMaxJobs(10).setUseMemory(true).build() ); @@ -848,9 +848,18 @@ public void testScaleUp_withWaitingModelsAndRoomInNodes() { public void testScaleDown() { List nodeLoads = Arrays.asList( - NodeLoad.builder("foo").setMaxMemory(DEFAULT_NODE_SIZE).incAssignedJobMemory(ByteSizeValue.ofGb(1).getBytes()).build(), - NodeLoad.builder("bar").setMaxMemory(DEFAULT_NODE_SIZE).incAssignedJobMemory(ByteSizeValue.ofGb(1).getBytes()).build(), - NodeLoad.builder("baz").setMaxMemory(DEFAULT_NODE_SIZE).incAssignedJobMemory(ByteSizeValue.ofGb(1).getBytes()).build() + NodeLoad.builder("foo") + .setMaxMemory(DEFAULT_NODE_SIZE) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofGb(1).getBytes()) + .build(), + NodeLoad.builder("bar") + .setMaxMemory(DEFAULT_NODE_SIZE) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofGb(1).getBytes()) + .build(), + NodeLoad.builder("baz") + .setMaxMemory(DEFAULT_NODE_SIZE) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofGb(1).getBytes()) + .build() ); MlAutoscalingDeciderService service = buildService(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertTokenizationResultTests.java similarity index 84% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilderTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertTokenizationResultTests.java index 4087199b462d8..2d01ddefa5833 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertTokenizationResultTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizer; +import org.junit.After; import java.io.IOException; import java.util.Arrays; @@ -25,14 +26,23 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; -public class BertRequestBuilderTests extends ESTestCase { +public class BertTokenizationResultTests extends ESTestCase { + + private BertTokenizer tokenizer; + + @After + public void closeIt() { + if (tokenizer != null) { + tokenizer.close(); + } + } public void testBuildRequest() throws IOException { - BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, null, 512, null)).build(); + tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, null, 512, null)).build(); - BertRequestBuilder requestBuilder = new BertRequestBuilder(tokenizer); + var requestBuilder = tokenizer.requestBuilder(); NlpTask.Request request = requestBuilder.buildRequest(List.of("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); - Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(5)); assertEquals("request1", jsonDocAsMap.get("request_id")); @@ -42,7 +52,6 @@ public void testBuildRequest() throws IOException { assertEquals(Arrays.asList(0, 1, 2, 3, 4), firstListItemFromMap("arg_3", jsonDocAsMap)); } - @SuppressWarnings("unchecked") private List firstListItemFromMap(String name, Map jsonDocAsMap) { return nthListItemFromMap(name, 0, jsonDocAsMap); } @@ -53,9 +62,9 @@ public static List nthListItemFromMap(String name, int n, Map requestBuilder.buildRequest( @@ -71,7 +80,7 @@ public void testInputTooLarge() throws IOException { ); } { - BertRequestBuilder requestBuilder = new BertRequestBuilder(tokenizer); + var requestBuilder = tokenizer.requestBuilder(); // input will become 3 tokens + the Class and Separator token = 5 which is // our max sequence length requestBuilder.buildRequest(Collections.singletonList("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); @@ -80,15 +89,15 @@ public void testInputTooLarge() throws IOException { @SuppressWarnings("unchecked") public void testBatchWithPadding() throws IOException { - BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, null, 512, null)).build(); + tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, null, 512, null)).build(); - BertRequestBuilder requestBuilder = new BertRequestBuilder(tokenizer); + var requestBuilder = tokenizer.requestBuilder(); NlpTask.Request request = requestBuilder.buildRequest( List.of("Elasticsearch", "my little red car", "Godzilla day"), "request1", Tokenization.Truncate.NONE ); - Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(5)); assertThat((List>) jsonDocAsMap.get("tokens"), hasSize(3)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessorTests.java index 2020010cb7e65..a4f591ec43b36 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessorTests.java @@ -14,14 +14,13 @@ import org.elasticsearch.xpack.core.ml.inference.results.TopClassEntry; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.FillMaskConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.VocabularyConfig; -import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BasicTokenizer; +import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizationResult; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizer; -import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.DelimitedToken; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; +import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.WordPieceTokenFilter; import org.elasticsearch.xpack.ml.inference.pytorch.results.PyTorchInferenceResult; import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.OptionalInt; @@ -41,25 +40,28 @@ public void testProcessResults() { { 0, 0, 0, 0, 0, 0, 0 }, // The { 0, 0, 0, 0, 0, 0, 0 }, // capital { 0, 0, 0, 0, 0, 0, 0 }, // of - { 0.01, 0.01, 0.3, 0.1, 0.01, 0.2, 1.2 }, // MASK + { 0.01, 0.01, 0.3, 0.01, 0.2, 1.2, 0.1 }, // MASK { 0, 0, 0, 0, 0, 0, 0 }, // is { 0, 0, 0, 0, 0, 0, 0 } // paris } }; String input = "The capital of " + BertTokenizer.MASK_TOKEN + " is Paris"; - List vocab = Arrays.asList("The", "capital", "of", BertTokenizer.MASK_TOKEN, "is", "Paris", "France"); - List tokens = new BasicTokenizer(randomBoolean(), randomBoolean(), randomBoolean()).tokenize(input); + List vocab = Arrays.asList("The", "capital", "of", "is", "Paris", "France", BertTokenizer.MASK_TOKEN); + List tokens = List.of(); int[] tokenMap = new int[] { 0, 1, 2, 3, 4, 5 }; - int[] tokenIds = new int[] { 0, 1, 2, 3, 4, 5 }; + int[] tokenIds = new int[] { 0, 1, 2, 6, 4, 5 }; - TokenizationResult tokenization = new TokenizationResult(vocab); - tokenization.addTokenization(input, false, tokens, tokenIds, tokenMap); + TokenizationResult tokenization = new BertTokenizationResult( + vocab, + List.of(new TokenizationResult.Tokens(input, tokens, false, tokenIds, tokenMap)), + 0 + ); BertTokenizer tokenizer = mock(BertTokenizer.class); when(tokenizer.getMaskToken()).thenReturn(BertTokenizer.MASK_TOKEN); - when(tokenizer.getMaskTokenId()).thenReturn(OptionalInt.of(3)); + when(tokenizer.getMaskTokenId()).thenReturn(OptionalInt.of(6)); String resultsField = randomAlphaOfLength(10); FillMaskResults result = (FillMaskResults) FillMaskProcessor.processResult( @@ -85,8 +87,11 @@ public void testProcessResults_GivenMissingTokens() { BertTokenizer tokenizer = mock(BertTokenizer.class); when(tokenizer.getMaskToken()).thenReturn("[MASK]"); - TokenizationResult tokenization = new TokenizationResult(Collections.emptyList()); - tokenization.addTokenization("", false, Collections.emptyList(), new int[] {}, new int[] {}); + TokenizationResult tokenization = new BertTokenizationResult( + List.of(), + List.of(new TokenizationResult.Tokens("", List.of(), false, new int[0], new int[0])), + 0 + ); PyTorchInferenceResult pyTorchResult = new PyTorchInferenceResult("1", new double[][][] { { {} } }, 0L, null); expectThrows( diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetTokenizationResultTests.java similarity index 81% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilderTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetTokenizationResultTests.java index d848e6c9dd1b8..0a2907d3c67f6 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetTokenizationResultTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.MPNetTokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.MPNetTokenizer; +import org.junit.After; import java.io.IOException; import java.util.Arrays; @@ -25,14 +26,22 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; -public class MPNetRequestBuilderTests extends ESTestCase { +public class MPNetTokenizationResultTests extends ESTestCase { + private MPNetTokenizer tokenizer; + + @After + public void closeIt() { + if (tokenizer != null) { + tokenizer.close(); + } + } public void testBuildRequest() throws IOException { - MPNetTokenizer tokenizer = MPNetTokenizer.mpBuilder(TEST_CASED_VOCAB, new MPNetTokenization(null, null, 512, null)).build(); + tokenizer = MPNetTokenizer.mpBuilder(TEST_CASED_VOCAB, new MPNetTokenization(null, null, 512, null)).build(); - MPNetRequestBuilder requestBuilder = new MPNetRequestBuilder(tokenizer); + var requestBuilder = tokenizer.requestBuilder(); NlpTask.Request request = requestBuilder.buildRequest(List.of("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); - Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(3)); assertEquals("request1", jsonDocAsMap.get("request_id")); @@ -40,7 +49,6 @@ public void testBuildRequest() throws IOException { assertEquals(Arrays.asList(1, 1, 1, 1, 1), firstListItemFromMap("arg_1", jsonDocAsMap)); } - @SuppressWarnings("unchecked") private List firstListItemFromMap(String name, Map jsonDocAsMap) { return nthListItemFromMap(name, 0, jsonDocAsMap); } @@ -51,9 +59,9 @@ public static List nthListItemFromMap(String name, int n, Map requestBuilder.buildRequest( @@ -69,7 +77,7 @@ public void testInputTooLarge() throws IOException { ); } { - MPNetRequestBuilder requestBuilder = new MPNetRequestBuilder(tokenizer); + var requestBuilder = tokenizer.requestBuilder(); // input will become 3 tokens + the Class and Separator token = 5 which is // our max sequence length requestBuilder.buildRequest(Collections.singletonList("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); @@ -78,15 +86,15 @@ public void testInputTooLarge() throws IOException { @SuppressWarnings("unchecked") public void testBatchWithPadding() throws IOException { - MPNetTokenizer tokenizer = MPNetTokenizer.mpBuilder(TEST_CASED_VOCAB, new MPNetTokenization(null, null, 512, null)).build(); + tokenizer = MPNetTokenizer.mpBuilder(TEST_CASED_VOCAB, new MPNetTokenization(null, null, 512, null)).build(); - MPNetRequestBuilder requestBuilder = new MPNetRequestBuilder(tokenizer); + var requestBuilder = tokenizer.requestBuilder(); NlpTask.Request request = requestBuilder.buildRequest( List.of("Elasticsearch", "my little red car", "Godzilla day"), "request1", Tokenization.Truncate.NONE ); - Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(3)); assertThat((List>) jsonDocAsMap.get("tokens"), hasSize(3)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java index a29fcac6d7fef..feedcf7a7d537 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java @@ -15,18 +15,19 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.NerConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.VocabularyConfig; -import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BasicTokenizer; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizer; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.DelimitedToken; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; import org.elasticsearch.xpack.ml.inference.pytorch.results.PyTorchInferenceResult; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BasicTokenFilterTests.basicTokenize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; @@ -99,6 +100,50 @@ public void testProcessResults_GivenNoTokens() { assertThat(e, instanceOf(ElasticsearchStatusException.class)); } + public void testProcessResultsWithSpecialTokens() { + NerProcessor.NerResultProcessor processor = new NerProcessor.NerResultProcessor(NerProcessor.IobTag.values(), null, true); + BertTokenizer tokenizer = BertTokenizer.builder( + List.of( + "el", + "##astic", + "##search", + "many", + "use", + "in", + "london", + BertTokenizer.PAD_TOKEN, + BertTokenizer.UNKNOWN_TOKEN, + BertTokenizer.SEPARATOR_TOKEN, + BertTokenizer.CLASS_TOKEN + ), + new BertTokenization(true, true, null, Tokenization.Truncate.NONE) + ).build(); + TokenizationResult tokenization = tokenizer.buildTokenizationResult( + List.of(tokenizer.tokenize("Many use Elasticsearch in London", Tokenization.Truncate.NONE)) + ); + + double[][][] scores = { + { + { 7, 0, 0, 0, 0, 0, 0, 0, 0 }, // cls + { 7, 0, 0, 0, 0, 0, 0, 0, 0 }, // many + { 7, 0, 0, 0, 0, 0, 0, 0, 0 }, // use + { 0.01, 0.01, 0, 0.01, 0, 7, 0, 3, 0 }, // el + { 0.01, 0.01, 0, 0, 0, 0, 0, 0, 0 }, // ##astic + { 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // ##search + { 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // in + { 0, 0, 0, 0, 0, 0, 0, 6, 0 }, // london + { 7, 0, 0, 0, 0, 0, 0, 0, 0 } // sep + } }; + NerResults result = (NerResults) processor.processResult(tokenization, new PyTorchInferenceResult("1", scores, 1L, null)); + + assertThat(result.getAnnotatedResult(), equalTo("Many use [Elasticsearch](ORG&Elasticsearch) in [London](LOC&London)")); + assertThat(result.getEntityGroups().size(), equalTo(2)); + assertThat(result.getEntityGroups().get(0).getEntity(), equalTo("elasticsearch")); + assertThat(result.getEntityGroups().get(0).getClassName(), equalTo(NerProcessor.Entity.ORG.toString())); + assertThat(result.getEntityGroups().get(1).getEntity(), equalTo("london")); + assertThat(result.getEntityGroups().get(1).getClassName(), equalTo(NerProcessor.Entity.LOC.toString())); + } + public void testProcessResults() { NerProcessor.NerResultProcessor processor = new NerProcessor.NerResultProcessor(NerProcessor.IobTag.values(), null, true); TokenizationResult tokenization = tokenize( @@ -163,9 +208,9 @@ public void testProcessResults_withIobMap() { assertThat(result.getEntityGroups().get(1).getClassName(), equalTo(NerProcessor.Entity.LOC.toString())); } - public void testGroupTaggedTokens() { + public void testGroupTaggedTokens() throws IOException { String input = "Hi Sarah Jessica, I live in Manchester and work for Elastic"; - List tokens = new BasicTokenizer(randomBoolean(), randomBoolean(), randomBoolean()).tokenize(input); + List tokens = basicTokenize(randomBoolean(), randomBoolean(), List.of(), input); assertThat(tokens, hasSize(12)); List taggedTokens = new ArrayList<>(); @@ -181,7 +226,7 @@ public void testGroupTaggedTokens() { taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); - taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.B_ORG, 1.0)); + taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i), NerProcessor.IobTag.B_ORG, 1.0)); List entityGroups = NerProcessor.NerResultProcessor.groupTaggedTokens(taggedTokens, input); assertThat(entityGroups, hasSize(3)); @@ -193,9 +238,9 @@ public void testGroupTaggedTokens() { assertThat(entityGroups.get(2).getEntity(), equalTo("Elastic")); } - public void testGroupTaggedTokens_GivenNoEntities() { + public void testGroupTaggedTokens_GivenNoEntities() throws IOException { String input = "Hi there"; - List tokens = new BasicTokenizer(randomBoolean(), randomBoolean(), randomBoolean()).tokenize(input); + List tokens = basicTokenize(randomBoolean(), randomBoolean(), List.of(), input); List taggedTokens = new ArrayList<>(); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(0), NerProcessor.IobTag.O, 1.0)); @@ -205,9 +250,9 @@ public void testGroupTaggedTokens_GivenNoEntities() { assertThat(entityGroups, is(empty())); } - public void testGroupTaggedTokens_GivenConsecutiveEntities() { + public void testGroupTaggedTokens_GivenConsecutiveEntities() throws IOException { String input = "Rita, Sue, and Bob too"; - List tokens = new BasicTokenizer(randomBoolean(), randomBoolean(), randomBoolean()).tokenize(input); + List tokens = basicTokenize(randomBoolean(), randomBoolean(), List.of(), input); List taggedTokens = new ArrayList<>(); int i = 0; @@ -217,7 +262,7 @@ public void testGroupTaggedTokens_GivenConsecutiveEntities() { taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.B_PER, 1.0)); - taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); + taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i), NerProcessor.IobTag.O, 1.0)); List entityGroups = NerProcessor.NerResultProcessor.groupTaggedTokens(taggedTokens, input); assertThat(entityGroups, hasSize(3)); @@ -229,9 +274,9 @@ public void testGroupTaggedTokens_GivenConsecutiveEntities() { assertThat(entityGroups.get(2).getEntity(), equalTo("Bob")); } - public void testGroupTaggedTokens_GivenConsecutiveContinuingEntities() { + public void testGroupTaggedTokens_GivenConsecutiveContinuingEntities() throws IOException { String input = "FirstName SecondName, NextPerson NextPersonSecondName. something_else"; - List tokens = new BasicTokenizer(randomBoolean(), randomBoolean(), randomBoolean()).tokenize(input); + List tokens = basicTokenize(randomBoolean(), randomBoolean(), List.of(), input); List taggedTokens = new ArrayList<>(); int i = 0; @@ -240,7 +285,7 @@ public void testGroupTaggedTokens_GivenConsecutiveContinuingEntities() { taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.B_PER, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.I_PER, 1.0)); - taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.B_ORG, 1.0)); + taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i), NerProcessor.IobTag.B_ORG, 1.0)); List entityGroups = NerProcessor.NerResultProcessor.groupTaggedTokens(taggedTokens, input); assertThat(entityGroups, hasSize(3)); @@ -251,9 +296,9 @@ public void testGroupTaggedTokens_GivenConsecutiveContinuingEntities() { assertThat(entityGroups.get(2).getClassName(), equalTo("ORG")); } - public void testEntityContainsPunctuation() { + public void testEntityContainsPunctuation() throws IOException { String input = "Alexander, my name is Benjamin Trent, I work at Acme Inc.."; - List tokens = new BasicTokenizer(randomBoolean(), randomBoolean(), randomBoolean()).tokenize(input); + List tokens = basicTokenize(randomBoolean(), randomBoolean(), List.of(), input); List taggedTokens = new ArrayList<>(); int i = 0; @@ -271,7 +316,7 @@ public void testEntityContainsPunctuation() { taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.I_ORG, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.I_ORG, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.I_ORG, 1.0)); - taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); + taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i), NerProcessor.IobTag.O, 1.0)); assertEquals(tokens.size(), taggedTokens.size()); List entityGroups = NerProcessor.NerResultProcessor.groupTaggedTokens(taggedTokens, input); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessorTests.java index 0f1b03e4bea56..2d57e997c8f5b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessorTests.java @@ -71,7 +71,7 @@ public void testBuildRequest() throws IOException { NlpTask.Request request = processor.getRequestBuilder(config) .buildRequest(List.of("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); - Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(5)); assertEquals("request1", jsonDocAsMap.get("request_id")); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java index 9fd5bb8f833c2..4f5d614348967 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java @@ -50,7 +50,7 @@ public void testBuildRequest() throws IOException { (NlpConfig) new ZeroShotClassificationConfigUpdate.Builder().setLabels(List.of("new", "stuff")).build().apply(config) ).buildRequest(List.of("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); - Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(5)); assertEquals("request1", jsonDocAsMap.get("request_id")); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenFilterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenFilterTests.java new file mode 100644 index 0000000000000..d2b7fdcd1e559 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenFilterTests.java @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.BaseTokenStreamTestCase; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.core.WhitespaceTokenizer; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; + +import java.io.IOException; +import java.io.Reader; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.List; + +/** + * Some test cases taken from + * https://github.com/huggingface/transformers/blob/ba8c4d0ac04acfcdbdeaed954f698d6d5ec3e532/tests/test_tokenization_bert.py + */ +public class BasicTokenFilterTests extends BaseTokenStreamTestCase { + + public void testNeverSplit_GivenNoLowerCase() throws IOException { + Analyzer analyzer = basicAnalyzerFromSettings(false, false, List.of("[UNK]")); + assertAnalyzesToNoCharFilter(analyzer, "1 (return) [ Patois ", new String[] { "1", "(", "return", ")", "[", "Patois" }); + assertAnalyzesToNoCharFilter(analyzer, "Hello [UNK].", new String[] { "Hello", "[UNK]", "." }); + assertAnalyzesToNoCharFilter(analyzer, "Hello-[UNK]", new String[] { "Hello", "-", "[UNK]" }); + assertAnalyzesToNoCharFilter( + analyzer, + " \tHeLLo!how \n Are yoU? [UNK]", + new String[] { "HeLLo", "!", "how", "Are", "yoU", "?", "[UNK]" } + ); + assertAnalyzesToNoCharFilter(analyzer, "Hello [UNK]?", new String[] { "Hello", "[UNK]", "?" }); + assertAnalyzesToNoCharFilter(analyzer, "Hello [UNK]!!", new String[] { "Hello", "[UNK]", "!", "!" }); + assertAnalyzesToNoCharFilter(analyzer, "Hello~[UNK][UNK]", new String[] { "Hello", "~", "[UNK]", "[UNK]" }); + assertAnalyzesToNoCharFilter(analyzer, "Hello-[unk]", new String[] { "Hello", "-", "[", "unk", "]" }); + } + + public void testNeverSplit_GivenLowerCase() throws IOException { + Analyzer analyzer = basicAnalyzerFromSettings(false, false, List.of("[UNK]")); + assertAnalyzesToNoCharFilter( + analyzer, + " \tHeLLo!how \n Are yoU? [UNK]", + new String[] { "HeLLo", "!", "how", "Are", "yoU", "?", "[UNK]" } + ); + assertAnalyzesToNoCharFilter(analyzer, "Hello [UNK].", new String[] { "Hello", "[UNK]", "." }); + assertAnalyzesToNoCharFilter(analyzer, "Hello [UNK]?", new String[] { "Hello", "[UNK]", "?" }); + assertAnalyzesToNoCharFilter(analyzer, "Hello [UNK]!!", new String[] { "Hello", "[UNK]", "!", "!" }); + assertAnalyzesToNoCharFilter(analyzer, "Hello-[UNK]", new String[] { "Hello", "-", "[UNK]" }); + assertAnalyzesToNoCharFilter(analyzer, "Hello~[UNK][UNK]", new String[] { "Hello", "~", "[UNK]", "[UNK]" }); + assertAnalyzesToNoCharFilter(analyzer, "Hello-[unk]", new String[] { "Hello", "-", "[", "unk", "]" }); + } + + public void testSplitCJK() throws Exception { + Analyzer analyzer = basicAnalyzerFromSettings(true, false, List.of("[UNK]")); + assertAnalyzesToNoCharFilter(analyzer, "hello ah\u535A\u63A8zz", new String[] { "hello", "ah", "\u535A", "\u63A8", "zz" }); + assertAnalyzesToNoCharFilter(analyzer, "hello world", new String[] { "hello", "world" }); + } + + public void testStripAccents() throws Exception { + Analyzer analyzer = basicAnalyzerFromSettings(true, true, List.of("[UNK]")); + assertAnalyzesToNoCharFilter(analyzer, "HäLLo how are you", new String[] { "HaLLo", "how", "are", "you" }); + } + + private static void assertAnalyzesToNoCharFilter(Analyzer a, String input, String[] output) throws IOException { + assertTokenStreamContents(a.tokenStream("dummy", input), output, null, null, null, null, null, input.length()); + checkResetException(a, input); + // We don't allow the random char filter because our offsets aren't corrected appropriately due to "never_split" + // If we could figure out a way to pass "never_split" through whichever passed char_filter there was, then it would work + checkAnalysisConsistency(random(), a, false, input); + } + + public void testIsPunctuation() { + assertTrue(BasicTokenFilter.isPunctuationMark('-')); + assertTrue(BasicTokenFilter.isPunctuationMark('$')); + assertTrue(BasicTokenFilter.isPunctuationMark('`')); + assertTrue(BasicTokenFilter.isPunctuationMark('.')); + assertFalse(BasicTokenFilter.isPunctuationMark(' ')); + assertFalse(BasicTokenFilter.isPunctuationMark('A')); + assertTrue(BasicTokenFilter.isPunctuationMark('[')); + } + + public static Analyzer basicAnalyzerFromSettings(boolean isTokenizeCjkChars, boolean isStripAccents, List neverSplit) { + return new Analyzer() { + @Override + protected TokenStreamComponents createComponents(String fieldName) { + Tokenizer t = new WhitespaceTokenizer(); + try { + return new TokenStreamComponents(t, BasicTokenFilter.build(isTokenizeCjkChars, isStripAccents, neverSplit, t)); + } catch (IOException ex) { + throw new UncheckedIOException(ex); + } + } + + @Override + protected Reader initReader(String fieldName, Reader reader) { + return new ControlCharFilter(reader); + } + }; + } + + public static List basicTokenize(Analyzer analyzer, String input) throws IOException { + try (TokenStream test = analyzer.tokenStream("test", input)) { + test.reset(); + CharTermAttribute term = test.addAttribute(CharTermAttribute.class); + OffsetAttribute offsetAttribute = test.addAttribute(OffsetAttribute.class); + List tokens = new ArrayList<>(); + while (test.incrementToken()) { + tokens.add(new DelimitedToken(term.toString(), offsetAttribute.startOffset(), offsetAttribute.endOffset())); + } + test.end(); + return tokens; + } + } + + public static List basicTokenize( + boolean isTokenizeCjkChars, + boolean isStripAccents, + List neverSplit, + String input + ) throws IOException { + try (Analyzer analyzer = basicAnalyzerFromSettings(isTokenizeCjkChars, isStripAccents, neverSplit)) { + return basicTokenize(analyzer, input); + } + } + +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenizerTests.java deleted file mode 100644 index effe3be0da5a6..0000000000000 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenizerTests.java +++ /dev/null @@ -1,266 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; - -import org.elasticsearch.test.ESTestCase; - -import java.util.Collections; -import java.util.List; -import java.util.stream.Collectors; - -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.sameInstance; - -/** - * Some test cases taken from - * https://github.com/huggingface/transformers/blob/ba8c4d0ac04acfcdbdeaed954f698d6d5ec3e532/tests/test_tokenization_bert.py - */ -public class BasicTokenizerTests extends ESTestCase { - - public void testLowerCase() { - BasicTokenizer tokenizer = new BasicTokenizer(); - var tokens = tokenizer.tokenize(" \tHeLLo!how \n Are yoU? "); - assertThat(tokenStrings(tokens), contains("hello", "!", "how", "are", "you", "?")); - - tokens = tokenizer.tokenize("H\u00E9llo"); - assertThat(tokenStrings(tokens), contains("hello")); - } - - public void testLowerCaseWithoutStripAccents() { - BasicTokenizer tokenizer = new BasicTokenizer(true, true, false); - var tokens = tokenizer.tokenize(" \tHäLLo!how \n Are yoU? "); - assertThat(tokenStrings(tokens), contains("hällo", "!", "how", "are", "you", "?")); - - tokens = tokenizer.tokenize("H\u00E9llo"); - assertThat(tokenStrings(tokens), contains("h\u00E9llo")); - } - - public void testLowerCaseStripAccentsDefault() { - BasicTokenizer tokenizer = new BasicTokenizer(true, true); - var tokens = tokenizer.tokenize(" \tHäLLo!how \n Are yoU? "); - assertThat(tokenStrings(tokens), contains("hallo", "!", "how", "are", "you", "?")); - - tokens = tokenizer.tokenize("H\u00E9llo"); - assertThat(tokenStrings(tokens), contains("hello")); - } - - public void testNoLower() { - var tokens = new BasicTokenizer(false, true, false).tokenize(" \tHäLLo!how \n Are yoU? "); - assertThat(tokenStrings(tokens), contains("HäLLo", "!", "how", "Are", "yoU", "?")); - } - - public void testNoLowerStripAccents() { - var tokens = new BasicTokenizer(false, true, true).tokenize(" \tHäLLo!how \n Are yoU? "); - assertThat(tokenStrings(tokens), contains("HaLLo", "!", "how", "Are", "yoU", "?")); - } - - public void testNeverSplit_GivenNoLowerCase() { - BasicTokenizer tokenizer = new BasicTokenizer(false, false, false, Collections.singleton("[UNK]")); - var tokens = tokenizer.tokenize(" \tHeLLo!how \n Are yoU? [UNK]"); - assertThat(tokenStrings(tokens), contains("HeLLo", "!", "how", "Are", "yoU", "?", "[UNK]")); - - tokens = tokenizer.tokenize("Hello [UNK]."); - assertThat(tokenStrings(tokens), contains("Hello", "[UNK]", ".")); - - tokens = tokenizer.tokenize("Hello [UNK]?"); - assertThat(tokenStrings(tokens), contains("Hello", "[UNK]", "?")); - - tokens = tokenizer.tokenize("Hello [UNK]!!"); - assertThat(tokenStrings(tokens), contains("Hello", "[UNK]", "!", "!")); - - tokens = tokenizer.tokenize("Hello-[UNK]"); - assertThat(tokenStrings(tokens), contains("Hello", "-", "[UNK]")); - tokens = tokenizer.tokenize("Hello~[UNK][UNK]"); - assertThat(tokenStrings(tokens), contains("Hello", "~", "[UNK]", "[UNK]")); - assertThat(tokenStrings(tokenizer.tokenize("Hello~[[UNK]")), contains("Hello", "~", "[", "[UNK]")); - assertThat(tokenStrings(tokenizer.tokenize("Hello~[[[UNK]")), contains("Hello", "~", "[", "[", "[UNK]")); - assertThat(tokenStrings(tokenizer.tokenize("Hello~[UNK]]")), contains("Hello", "~", "[UNK]", "]")); - assertThat(tokenStrings(tokenizer.tokenize("Hello~[UNK]]]")), contains("Hello", "~", "[UNK]", "]", "]")); - assertThat(tokenStrings(tokenizer.tokenize("Hello~[[UNK]]")), contains("Hello", "~", "[", "[UNK]", "]")); - tokens = tokenizer.tokenize("Hello-[unk]"); - assertThat(tokenStrings(tokens), contains("Hello", "-", "[", "unk", "]")); - } - - public void testNeverSplit_GivenLowerCase() { - BasicTokenizer tokenizer = new BasicTokenizer(true, false, false, Collections.singleton("[UNK]")); - var tokens = tokenizer.tokenize(" \tHeLLo!how \n Are yoU? [UNK]"); - assertThat(tokenStrings(tokens), contains("hello", "!", "how", "are", "you", "?", "[UNK]")); - - tokens = tokenizer.tokenize("Hello [UNK]."); - assertThat(tokenStrings(tokens), contains("hello", "[UNK]", ".")); - - tokens = tokenizer.tokenize("Hello [UNK]?"); - assertThat(tokenStrings(tokens), contains("hello", "[UNK]", "?")); - - tokens = tokenizer.tokenize("Hello [UNK]!!"); - assertThat(tokenStrings(tokens), contains("hello", "[UNK]", "!", "!")); - - tokens = tokenizer.tokenize("Hello-[UNK]"); - assertThat(tokenStrings(tokens), contains("hello", "-", "[UNK]")); - tokens = tokenizer.tokenize("Hello~[UNK][UNK]"); - assertThat(tokenStrings(tokens), contains("hello", "~", "[UNK]", "[UNK]")); - tokens = tokenizer.tokenize("Hello-[unk]"); - assertThat(tokenStrings(tokens), contains("hello", "-", "[", "unk", "]")); - } - - public void testSplitOnPunctuation() { - var tokens = BasicTokenizer.splitOnPunctuation(new DelimitedToken(0, 3, "hi!")); - assertEquals(new DelimitedToken(0, 2, "hi"), tokens.get(0)); - assertEquals(new DelimitedToken(2, 3, "!"), tokens.get(1)); - - tokens = BasicTokenizer.splitOnPunctuation(makeToken("hi.")); - assertEquals(new DelimitedToken(0, 2, "hi"), tokens.get(0)); - assertEquals(new DelimitedToken(2, 3, "."), tokens.get(1)); - - tokens = BasicTokenizer.splitOnPunctuation(makeToken("!hi")); - assertEquals(new DelimitedToken(0, 1, "!"), tokens.get(0)); - assertEquals(new DelimitedToken(1, 3, "hi"), tokens.get(1)); - - tokens = BasicTokenizer.splitOnPunctuation(makeToken("don't")); - assertEquals(new DelimitedToken(0, 3, "don"), tokens.get(0)); - assertEquals(new DelimitedToken(3, 4, "'"), tokens.get(1)); - assertEquals(new DelimitedToken(4, 5, "t"), tokens.get(2)); - - tokens = BasicTokenizer.splitOnPunctuation(makeToken("!!hi")); - assertEquals(new DelimitedToken(0, 1, "!"), tokens.get(0)); - assertEquals(new DelimitedToken(1, 2, "!"), tokens.get(1)); - assertEquals(new DelimitedToken(2, 4, "hi"), tokens.get(2)); - - tokens = BasicTokenizer.splitOnPunctuation(makeToken("[hi]")); - assertEquals(new DelimitedToken(0, 1, "["), tokens.get(0)); - assertEquals(new DelimitedToken(1, 3, "hi"), tokens.get(1)); - assertEquals(new DelimitedToken(3, 4, "]"), tokens.get(2)); - - tokens = BasicTokenizer.splitOnPunctuation(makeToken("!!")); - assertEquals(new DelimitedToken(0, 1, "!"), tokens.get(0)); - assertEquals(new DelimitedToken(1, 2, "!"), tokens.get(1)); - - tokens = BasicTokenizer.splitOnPunctuation(makeToken("elastic’s")); - assertEquals(new DelimitedToken(0, 7, "elastic"), tokens.get(0)); - assertEquals(new DelimitedToken(7, 8, "’"), tokens.get(1)); - assertEquals(new DelimitedToken(8, 9, "s"), tokens.get(2)); - - tokens = BasicTokenizer.splitOnPunctuation(new DelimitedToken(4, 13, "elastic’s")); - assertEquals(new DelimitedToken(4, 11, "elastic"), tokens.get(0)); - assertEquals(new DelimitedToken(11, 12, "’"), tokens.get(1)); - assertEquals(new DelimitedToken(12, 13, "s"), tokens.get(2)); - } - - public void testStripAccents() { - assertEquals("Hallo", BasicTokenizer.stripAccents("Hällo")); - } - - public void testTokenizeCjkChars() { - assertEquals(" \u535A \u63A8 ", BasicTokenizer.tokenizeCjkChars("\u535A\u63A8")); - - String noCjkChars = "hello"; - assertThat(BasicTokenizer.tokenizeCjkChars(noCjkChars), sameInstance(noCjkChars)); - } - - public void testTokenizeChinese() { - var tokens = new BasicTokenizer().tokenize("ah\u535A\u63A8zz"); - assertThat(tokenStrings(tokens), contains("ah", "\u535A", "\u63A8", "zz")); - } - - public void testCleanText() { - assertEquals("change these chars to spaces", BasicTokenizer.cleanText("change\tthese chars\rto\nspaces")); - assertEquals("filter control chars", BasicTokenizer.cleanText("\u0000filter \uFFFDcontrol chars\u0005")); - } - - public void testIsWhitespace() { - assertTrue(BasicTokenizer.isWhiteSpace(' ')); - assertTrue(BasicTokenizer.isWhiteSpace('\t')); - assertTrue(BasicTokenizer.isWhiteSpace('\r')); - assertTrue(BasicTokenizer.isWhiteSpace('\n')); - assertTrue(BasicTokenizer.isWhiteSpace('\u00A0')); - - assertFalse(BasicTokenizer.isWhiteSpace('_')); - assertFalse(BasicTokenizer.isWhiteSpace('A')); - } - - public void testIsControl() { - assertTrue(BasicTokenizer.isControlChar('\u0005')); - assertTrue(BasicTokenizer.isControlChar('\u001C')); - - assertFalse(BasicTokenizer.isControlChar('A')); - assertFalse(BasicTokenizer.isControlChar(' ')); - assertFalse(BasicTokenizer.isControlChar('\t')); - assertFalse(BasicTokenizer.isControlChar('\r')); - } - - public void testIsPunctuation() { - assertTrue(BasicTokenizer.isPunctuationMark('-')); - assertTrue(BasicTokenizer.isPunctuationMark('$')); - assertTrue(BasicTokenizer.isPunctuationMark('`')); - assertTrue(BasicTokenizer.isPunctuationMark('.')); - assertFalse(BasicTokenizer.isPunctuationMark(' ')); - assertFalse(BasicTokenizer.isPunctuationMark('A')); - assertTrue(BasicTokenizer.isPunctuationMark('[')); - } - - public void testIsCjkChar() { - assertTrue(BasicTokenizer.isCjkChar(0x3400)); - assertFalse(BasicTokenizer.isCjkChar(0x4DC0)); - - assertTrue(BasicTokenizer.isCjkChar(0xF900)); - assertFalse(BasicTokenizer.isCjkChar(0xFB00)); - - assertTrue(BasicTokenizer.isCjkChar(0x20000)); - assertFalse(BasicTokenizer.isCjkChar(0x2A6E0)); - - assertTrue(BasicTokenizer.isCjkChar(0x20000)); - assertFalse(BasicTokenizer.isCjkChar(0x2A6E0)); - - assertTrue(BasicTokenizer.isCjkChar(0x2A700)); - assertFalse(BasicTokenizer.isCjkChar(0x2CEB0)); - - assertTrue(BasicTokenizer.isCjkChar(0x2F800)); - assertFalse(BasicTokenizer.isCjkChar(0x2FA20)); - } - - public void testWhitespaceTokenize() { - { - List delimitedTokens = BasicTokenizer.whiteSpaceTokenize("hello! how are you?"); - assertThat(delimitedTokens, hasSize(4)); - assertThat(tokenStrings(delimitedTokens), contains("hello!", "how", "are", "you?")); - - assertThat(delimitedTokens.get(0), equalTo(new DelimitedToken(0, 6, "hello!"))); - assertThat(delimitedTokens.get(1), equalTo(new DelimitedToken(7, 10, "how"))); - assertThat(delimitedTokens.get(2), equalTo(new DelimitedToken(11, 14, "are"))); - assertThat(delimitedTokens.get(3), equalTo(new DelimitedToken(15, 19, "you?"))); - } - { - List delimitedTokens = BasicTokenizer.whiteSpaceTokenize(" leading whitespace"); - assertThat(delimitedTokens, hasSize(2)); - assertThat(tokenStrings(delimitedTokens), contains("leading", "whitespace")); - - assertThat(delimitedTokens.get(0), equalTo(new DelimitedToken(3, 10, "leading"))); - assertThat(delimitedTokens.get(1), equalTo(new DelimitedToken(11, 21, "whitespace"))); - } - { - List delimitedTokens = BasicTokenizer.whiteSpaceTokenize("double spaced text "); - assertThat(delimitedTokens, hasSize(3)); - assertThat(tokenStrings(delimitedTokens), contains("double", "spaced", "text")); - - assertThat(delimitedTokens.get(0), equalTo(new DelimitedToken(0, 6, "double"))); - assertThat(delimitedTokens.get(1), equalTo(new DelimitedToken(8, 14, "spaced"))); - assertThat(delimitedTokens.get(2), equalTo(new DelimitedToken(16, 20, "text"))); - } - } - - private List tokenStrings(List tokens) { - return tokens.stream().map(DelimitedToken::getToken).collect(Collectors.toList()); - } - - private DelimitedToken makeToken(String str) { - return new DelimitedToken(0, str.length(), str); - } - -} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java index fe44e606887bc..63f3af65f667d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java @@ -47,296 +47,357 @@ public class BertTokenizerTests extends ESTestCase { BertTokenizer.PAD_TOKEN ); - private List tokenStrings(List tokens) { - return tokens.stream().map(DelimitedToken::getToken).collect(Collectors.toList()); + private List tokenStrings(List tokens) { + return tokens.stream().map(DelimitedToken::toString).collect(Collectors.toList()); } public void testTokenize() { - BertTokenizer tokenizer = BertTokenizer.builder( - TEST_CASED_VOCAB, - new BertTokenization(null, false, null, Tokenization.Truncate.NONE) - ).build(); - - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("Elasticsearch", "fun")); - assertArrayEquals(new int[] { 0, 1, 3 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); + try ( + BertTokenizer tokenizer = BertTokenizer.builder( + TEST_CASED_VOCAB, + new BertTokenization(null, false, null, Tokenization.Truncate.NONE) + ).build() + ) { + TokenizationResult.Tokens tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.tokens()), contains("Elastic", "##search", "fun")); + assertArrayEquals(new int[] { 0, 1, 3 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.tokenMap()); + } } public void testTokenizeLargeInputNoTruncation() { - BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, false, 5, Tokenization.Truncate.NONE)) - .build(); - - ElasticsearchStatusException ex = expectThrows( - ElasticsearchStatusException.class, - () -> tokenizer.tokenize("Elasticsearch fun with Pancake and Godzilla", Tokenization.Truncate.NONE) - ); - assertThat(ex.getMessage(), equalTo("Input too large. The tokenized input length [8] exceeds the maximum sequence length [5]")); - - BertTokenizer specialCharTokenizer = BertTokenizer.builder( - TEST_CASED_VOCAB, - new BertTokenization(null, true, 5, Tokenization.Truncate.NONE) - ).build(); - - // Shouldn't throw - tokenizer.tokenize("Elasticsearch fun with Pancake", Tokenization.Truncate.NONE); - - // Should throw as special chars add two tokens - expectThrows( - ElasticsearchStatusException.class, - () -> specialCharTokenizer.tokenize("Elasticsearch fun with Pancake", Tokenization.Truncate.NONE) - ); + try ( + BertTokenizer tokenizer = BertTokenizer.builder( + TEST_CASED_VOCAB, + new BertTokenization(null, false, 5, Tokenization.Truncate.NONE) + ).build(); + BertTokenizer specialCharTokenizer = BertTokenizer.builder( + TEST_CASED_VOCAB, + new BertTokenization(null, true, 5, Tokenization.Truncate.NONE) + ).build() + ) { + + ElasticsearchStatusException ex = expectThrows( + ElasticsearchStatusException.class, + () -> tokenizer.tokenize("Elasticsearch fun with Pancake and Godzilla", Tokenization.Truncate.NONE) + ); + assertThat(ex.getMessage(), equalTo("Input too large. The tokenized input length [8] exceeds the maximum sequence length [5]")); + + // Shouldn't throw + tokenizer.tokenize("Elasticsearch fun with Pancake", Tokenization.Truncate.NONE); + + // Should throw as special chars add two tokens + expectThrows( + ElasticsearchStatusException.class, + () -> specialCharTokenizer.tokenize("Elasticsearch fun with Pancake", Tokenization.Truncate.NONE) + ); + } + } public void testTokenizeLargeInputTruncation() { - BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, false, 5, Tokenization.Truncate.FIRST)) - .build(); - - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( - "Elasticsearch fun with Pancake and Godzilla", - Tokenization.Truncate.FIRST - ); - assertArrayEquals(new int[] { 0, 1, 3, 18, 17 }, tokenization.getTokenIds()); - - BertTokenizer tokenizerWithSpecialTokens = BertTokenizer.builder( - TEST_CASED_VOCAB, - new BertTokenization(null, true, 5, Tokenization.Truncate.FIRST) - ).build(); - tokenization = tokenizerWithSpecialTokens.tokenize("Elasticsearch fun with Pancake and Godzilla", Tokenization.Truncate.FIRST); - assertArrayEquals(new int[] { 12, 0, 1, 3, 13 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 0, 1, -1 }, tokenization.getTokenMap()); + try ( + BertTokenizer tokenizer = BertTokenizer.builder( + TEST_CASED_VOCAB, + new BertTokenization(null, false, 5, Tokenization.Truncate.FIRST) + ).build() + ) { + + TokenizationResult.Tokens tokenization = tokenizer.tokenize( + "Elasticsearch fun with Pancake and Godzilla", + Tokenization.Truncate.FIRST + ); + assertArrayEquals(new int[] { 0, 1, 3, 18, 17 }, tokenization.tokenIds()); + } + + try ( + BertTokenizer tokenizerWithSpecialTokens = BertTokenizer.builder( + TEST_CASED_VOCAB, + new BertTokenization(null, true, 5, Tokenization.Truncate.FIRST) + ).build() + ) { + var tokenization = tokenizerWithSpecialTokens.tokenize( + "Elasticsearch fun with Pancake and Godzilla", + Tokenization.Truncate.FIRST + ); + assertArrayEquals(new int[] { 12, 0, 1, 3, 13 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { -1, 0, 0, 1, -1 }, tokenization.tokenMap()); + } } public void testTokenizeAppendSpecialTokens() { - BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, Tokenization.createDefault()).build(); - - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); - assertArrayEquals(new int[] { 12, 0, 1, 3, 13 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 0, 1, -1 }, tokenization.getTokenMap()); + try (BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, Tokenization.createDefault()).build()) { + TokenizationResult.Tokens tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); + assertArrayEquals(new int[] { 12, 0, 1, 3, 13 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { -1, 0, 0, 1, -1 }, tokenization.tokenMap()); + } } public void testNeverSplitTokens() { final String specialToken = "SP001"; - BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, Tokenization.createDefault()) - .setNeverSplit(Collections.singleton(specialToken)) - .setWithSpecialTokens(false) - .build(); - - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( - "Elasticsearch " + specialToken + " fun", - Tokenization.Truncate.NONE - ); - assertThat(tokenStrings(tokenization.getTokens()), contains("Elasticsearch", specialToken, "fun")); - assertArrayEquals(new int[] { 0, 1, 15, 3 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1, 2 }, tokenization.getTokenMap()); + try ( + BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, Tokenization.createDefault()) + .setNeverSplit(Collections.singleton(specialToken)) + .setWithSpecialTokens(false) + .build() + ) { + + TokenizationResult.Tokens tokenization = tokenizer.tokenize( + "Elasticsearch " + specialToken + " fun", + Tokenization.Truncate.NONE + ); + assertThat(tokenStrings(tokenization.tokens()), contains("Elastic", "##search", specialToken, "fun")); + assertArrayEquals(new int[] { 0, 1, 15, 3 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1, 2 }, tokenization.tokenMap()); + } } public void testDoLowerCase() { - { + try ( BertTokenizer tokenizer = BertTokenizer.builder( Arrays.asList("elastic", "##search", "fun", BertTokenizer.UNKNOWN_TOKEN, BertTokenizer.PAD_TOKEN), Tokenization.createDefault() - ).setDoLowerCase(false).setWithSpecialTokens(false).build(); + ).setDoLowerCase(false).setWithSpecialTokens(false).build() + ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); - assertArrayEquals(new int[] { 3, 2 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 1 }, tokenization.getTokenMap()); + TokenizationResult.Tokens tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); + assertArrayEquals(new int[] { 3, 2 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 1 }, tokenization.tokenMap()); tokenization = tokenizer.tokenize("elasticsearch fun", Tokenization.Truncate.NONE); - assertArrayEquals(new int[] { 0, 1, 2 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); + assertArrayEquals(new int[] { 0, 1, 2 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.tokenMap()); } - { + try ( BertTokenizer tokenizer = BertTokenizer.builder( Arrays.asList("elastic", "##search", "fun", BertTokenizer.UNKNOWN_TOKEN, BertTokenizer.PAD_TOKEN), Tokenization.createDefault() - ).setDoLowerCase(true).setWithSpecialTokens(false).build(); + ).setDoLowerCase(true).setWithSpecialTokens(false).build() + ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); - assertArrayEquals(new int[] { 0, 1, 2 }, tokenization.getTokenIds()); + TokenizationResult.Tokens tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); + assertArrayEquals(new int[] { 0, 1, 2 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.tokenMap()); } } public void testPunctuation() { - BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, Tokenization.createDefault()).setWithSpecialTokens(false).build(); - - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch, fun.", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("Elasticsearch", ",", "fun", ".")); - assertArrayEquals(new int[] { 0, 1, 11, 3, 10 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1, 2, 3 }, tokenization.getTokenMap()); - - tokenization = tokenizer.tokenize("Elasticsearch, fun [MASK].", Tokenization.Truncate.NONE); - assertArrayEquals(new int[] { 0, 1, 11, 3, 14, 10 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1, 2, 3, 4 }, tokenization.getTokenMap()); + try ( + BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, Tokenization.createDefault()) + .setWithSpecialTokens(false) + .build() + ) { + TokenizationResult.Tokens tokenization = tokenizer.tokenize("Elasticsearch, fun.", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.tokens()), contains("Elastic", "##search", ",", "fun", ".")); + assertArrayEquals(new int[] { 0, 1, 11, 3, 10 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1, 2, 3 }, tokenization.tokenMap()); + + tokenization = tokenizer.tokenize("Elasticsearch, fun [MASK].", Tokenization.Truncate.NONE); + assertArrayEquals(new int[] { 0, 1, 11, 3, 14, 10 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1, 2, 3, 4 }, tokenization.tokenMap()); + } } public void testPunctuationWithMask() { - BertTokenizer tokenizer = BertTokenizer.builder( - List.of( - "[CLS]", - "This", - "is", - "[MASK]", - "-", - "~", - "ta", - "##stic", - "!", - "[SEP]", - "sub", - ",", - ".", - BertTokenizer.UNKNOWN_TOKEN, - BertTokenizer.PAD_TOKEN - ), - Tokenization.createDefault() - ).setWithSpecialTokens(true).setNeverSplit(Set.of("[MASK]")).build(); - - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("This is [MASK]-tastic!", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("This", "is", "[MASK]", "-", "tastic", "!")); - assertArrayEquals(new int[] { 0, 1, 2, 3, 4, 6, 7, 8, 9 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 4, 5, -1 }, tokenization.getTokenMap()); - - tokenization = tokenizer.tokenize("This is sub~[MASK]!", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("This", "is", "sub", "~", "[MASK]", "!")); - assertArrayEquals(new int[] { 0, 1, 2, 10, 5, 3, 8, 9 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 5, -1 }, tokenization.getTokenMap()); - - tokenization = tokenizer.tokenize("This is sub,[MASK].tastic!", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("This", "is", "sub", ",", "[MASK]", ".", "tastic", "!")); - assertArrayEquals(new int[] { 0, 1, 2, 10, 11, 3, 12, 6, 7, 8, 9 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 5, 6, 6, 7, -1 }, tokenization.getTokenMap()); + try ( + BertTokenizer tokenizer = BertTokenizer.builder( + List.of( + "[CLS]", + "This", + "is", + "[MASK]", + "-", + "~", + "ta", + "##stic", + "!", + "[SEP]", + "sub", + ",", + ".", + BertTokenizer.UNKNOWN_TOKEN, + BertTokenizer.PAD_TOKEN + ), + Tokenization.createDefault() + ).setWithSpecialTokens(true).setNeverSplit(Set.of("[MASK]")).build() + ) { + + TokenizationResult.Tokens tokenization = tokenizer.tokenize("This is [MASK]-tastic!", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.tokens()), contains("This", "is", "[MASK]", "-", "ta", "##stic", "!")); + assertArrayEquals(new int[] { 0, 1, 2, 3, 4, 6, 7, 8, 9 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 4, 5, -1 }, tokenization.tokenMap()); + + tokenization = tokenizer.tokenize("This is sub~[MASK]!", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.tokens()), contains("This", "is", "sub", "~", "[MASK]", "!")); + assertArrayEquals(new int[] { 0, 1, 2, 10, 5, 3, 8, 9 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 5, -1 }, tokenization.tokenMap()); + + tokenization = tokenizer.tokenize("This is sub,[MASK].tastic!", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.tokens()), contains("This", "is", "sub", ",", "[MASK]", ".", "ta", "##stic", "!")); + assertArrayEquals(new int[] { 0, 1, 2, 10, 11, 3, 12, 6, 7, 8, 9 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 5, 6, 6, 7, -1 }, tokenization.tokenMap()); + } } public void testBatchInput() { - BertTokenizer tokenizer = BertTokenizer.builder( - TEST_CASED_VOCAB, - new BertTokenization(null, false, null, Tokenization.Truncate.NONE) - ).build(); - - TokenizationResult tr = tokenizer.buildTokenizationResult( - List.of( - tokenizer.tokenize("Elasticsearch", Tokenization.Truncate.NONE), - tokenizer.tokenize("my little red car", Tokenization.Truncate.NONE), - tokenizer.tokenize("Godzilla day", Tokenization.Truncate.NONE), - tokenizer.tokenize("Godzilla Pancake red car day", Tokenization.Truncate.NONE) - ) - ); - assertThat(tr.getTokenizations(), hasSize(4)); - - TokenizationResult.Tokenization tokenization = tr.getTokenizations().get(0); - assertArrayEquals(new int[] { 0, 1 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0 }, tokenization.getTokenMap()); - - tokenization = tr.getTokenizations().get(1); - assertArrayEquals(new int[] { 4, 5, 6, 7 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 1, 2, 3 }, tokenization.getTokenMap()); - - tokenization = tr.getTokenizations().get(2); - assertArrayEquals(new int[] { 8, 9, 16 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); - - tokenization = tr.getTokenizations().get(3); - assertArrayEquals(new int[] { 8, 9, 17, 6, 7, 16 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1, 2, 3, 4 }, tokenization.getTokenMap()); + try ( + BertTokenizer tokenizer = BertTokenizer.builder( + TEST_CASED_VOCAB, + new BertTokenization(null, false, null, Tokenization.Truncate.NONE) + ).build() + ) { + + TokenizationResult tr = tokenizer.buildTokenizationResult( + List.of( + tokenizer.tokenize("Elasticsearch", Tokenization.Truncate.NONE), + tokenizer.tokenize("my little red car", Tokenization.Truncate.NONE), + tokenizer.tokenize("Godzilla day", Tokenization.Truncate.NONE), + tokenizer.tokenize("Godzilla Pancake red car day", Tokenization.Truncate.NONE) + ) + ); + assertThat(tr.getTokens(), hasSize(4)); + + TokenizationResult.Tokens tokenization = tr.getTokenization(0); + assertArrayEquals(new int[] { 0, 1 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0 }, tokenization.tokenMap()); + + tokenization = tr.getTokenization(1); + assertArrayEquals(new int[] { 4, 5, 6, 7 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 1, 2, 3 }, tokenization.tokenMap()); + + tokenization = tr.getTokenization(2); + assertArrayEquals(new int[] { 8, 9, 16 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.tokenMap()); + + tokenization = tr.getTokenization(3); + assertArrayEquals(new int[] { 8, 9, 17, 6, 7, 16 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1, 2, 3, 4 }, tokenization.tokenMap()); + } } public void testMultiSeqTokenization() { - BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, Tokenization.createDefault()) - .setDoLowerCase(false) - .setWithSpecialTokens(true) - .build(); - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( - "Elasticsearch is fun", - "Godzilla my little red car", - Tokenization.Truncate.NONE - ); - - var tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); - assertThat( - tokenStream, - contains( - BertTokenizer.CLASS_TOKEN, - "Elastic", - "##search", - "is", - "fun", - BertTokenizer.SEPARATOR_TOKEN, - "God", - "##zilla", - "my", - "little", - "red", - "car", - BertTokenizer.SEPARATOR_TOKEN - ) - ); - assertArrayEquals(new int[] { 12, 0, 1, 2, 3, 13, 8, 9, 4, 5, 6, 7, 13 }, tokenization.getTokenIds()); + try ( + BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, Tokenization.createDefault()) + .setDoLowerCase(false) + .setWithSpecialTokens(true) + .build() + ) { + TokenizationResult.Tokens tokenization = tokenizer.tokenize( + "Elasticsearch is fun", + "Godzilla my little red car", + Tokenization.Truncate.NONE + ); + + var tokenStream = Arrays.stream(tokenization.tokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); + assertThat( + tokenStream, + contains( + BertTokenizer.CLASS_TOKEN, + "Elastic", + "##search", + "is", + "fun", + BertTokenizer.SEPARATOR_TOKEN, + "God", + "##zilla", + "my", + "little", + "red", + "car", + BertTokenizer.SEPARATOR_TOKEN + ) + ); + assertArrayEquals(new int[] { 12, 0, 1, 2, 3, 13, 8, 9, 4, 5, 6, 7, 13 }, tokenization.tokenIds()); + } } public void testTokenizeLargeInputMultiSequenceTruncation() { - BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, true, 10, Tokenization.Truncate.FIRST)) - .build(); - - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( - "Elasticsearch is fun", - "Godzilla my little red car", - Tokenization.Truncate.FIRST - ); - - var tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); - assertThat( - tokenStream, - contains( - BertTokenizer.CLASS_TOKEN, - "Elastic", - BertTokenizer.SEPARATOR_TOKEN, - "God", - "##zilla", - "my", - "little", - "red", - "car", - BertTokenizer.SEPARATOR_TOKEN - ) - ); - - expectThrows( - ElasticsearchStatusException.class, - () -> BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, true, 8, Tokenization.Truncate.NONE)) - .build() - .tokenize("Elasticsearch is fun", "Godzilla my little red car", Tokenization.Truncate.NONE) - ); - - tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, true, 10, Tokenization.Truncate.SECOND)).build(); - - tokenization = tokenizer.tokenize("Elasticsearch is fun", "Godzilla my little red car", Tokenization.Truncate.SECOND); - tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); - assertThat( - tokenStream, - contains( - BertTokenizer.CLASS_TOKEN, - "Elastic", - "##search", - "is", - "fun", - BertTokenizer.SEPARATOR_TOKEN, - "God", - "##zilla", - "my", - BertTokenizer.SEPARATOR_TOKEN - ) - ); + try ( + BertTokenizer tokenizer = BertTokenizer.builder( + TEST_CASED_VOCAB, + new BertTokenization(null, true, 10, Tokenization.Truncate.FIRST) + ).build() + ) { + + TokenizationResult.Tokens tokenization = tokenizer.tokenize( + "Elasticsearch is fun", + "Godzilla my little red car", + Tokenization.Truncate.FIRST + ); + + var tokenStream = Arrays.stream(tokenization.tokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); + assertThat( + tokenStream, + contains( + BertTokenizer.CLASS_TOKEN, + "Elastic", + BertTokenizer.SEPARATOR_TOKEN, + "God", + "##zilla", + "my", + "little", + "red", + "car", + BertTokenizer.SEPARATOR_TOKEN + ) + ); + + expectThrows( + ElasticsearchStatusException.class, + () -> BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, true, 8, Tokenization.Truncate.NONE)) + .build() + .tokenize("Elasticsearch is fun", "Godzilla my little red car", Tokenization.Truncate.NONE) + ); + } + + try ( + BertTokenizer tokenizer = BertTokenizer.builder( + TEST_CASED_VOCAB, + new BertTokenization(null, true, 10, Tokenization.Truncate.SECOND) + ).build() + ) { + + TokenizationResult.Tokens tokenization = tokenizer.tokenize( + "Elasticsearch is fun", + "Godzilla my little red car", + Tokenization.Truncate.SECOND + ); + var tokenStream = Arrays.stream(tokenization.tokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); + assertThat( + tokenStream, + contains( + BertTokenizer.CLASS_TOKEN, + "Elastic", + "##search", + "is", + "fun", + BertTokenizer.SEPARATOR_TOKEN, + "God", + "##zilla", + "my", + BertTokenizer.SEPARATOR_TOKEN + ) + ); + } } public void testMultiSeqRequiresSpecialTokens() { - BertTokenizer tokenizer = BertTokenizer.builder( - List.of("foo", BertTokenizer.UNKNOWN_TOKEN, BertTokenizer.PAD_TOKEN, BertTokenizer.CLASS_TOKEN, BertTokenizer.SEPARATOR_TOKEN), - Tokenization.createDefault() - ).setDoLowerCase(false).setWithSpecialTokens(false).build(); - expectThrows(Exception.class, () -> tokenizer.tokenize("foo", "foo", Tokenization.Truncate.NONE)); + try ( + BertTokenizer tokenizer = BertTokenizer.builder( + List.of( + "foo", + BertTokenizer.UNKNOWN_TOKEN, + BertTokenizer.PAD_TOKEN, + BertTokenizer.CLASS_TOKEN, + BertTokenizer.SEPARATOR_TOKEN + ), + Tokenization.createDefault() + ).setDoLowerCase(false).setWithSpecialTokens(false).build() + ) { + expectThrows(Exception.class, () -> tokenizer.tokenize("foo", "foo", Tokenization.Truncate.NONE)); + } } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenTrieNodeTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNodeTests.java similarity index 66% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenTrieNodeTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNodeTests.java index a96d557d36b50..b04cad380af15 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenTrieNodeTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNodeTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.test.ESTestCase; +import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -17,15 +18,15 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class TokenTrieNodeTests extends ESTestCase { +public class CharSeqTokenTrieNodeTests extends ESTestCase { - public void testEmpty() { - TokenTrieNode root = TokenTrieNode.build(Collections.emptyList(), s -> Arrays.asList(s.split(":"))); + public void testEmpty() throws IOException { + CharSeqTokenTrieNode root = CharSeqTokenTrieNode.build(Collections.emptyList(), s -> Arrays.asList(s.split(":"))); assertThat(root.isLeaf(), is(true)); } - public void testTokensWithoutDelimiter() { - TokenTrieNode root = TokenTrieNode.build(List.of("a", "b", "c"), s -> Arrays.asList(s.split(":"))); + public void testTokensWithoutDelimiter() throws IOException { + CharSeqTokenTrieNode root = CharSeqTokenTrieNode.build(List.of("a", "b", "c"), s -> Arrays.asList(s.split(":"))); assertThat(root.isLeaf(), is(false)); assertThat(root.getChild("a").isLeaf(), is(true)); @@ -34,48 +35,51 @@ public void testTokensWithoutDelimiter() { assertThat(root.getChild("d"), is(nullValue())); } - public void testTokensWithDelimiter() { - TokenTrieNode root = TokenTrieNode.build(List.of("aa:bb:cc", "aa:bb:dd", "bb:aa:cc", "bb:bb:cc"), s -> Arrays.asList(s.split(":"))); + public void testTokensWithDelimiter() throws IOException { + CharSeqTokenTrieNode root = CharSeqTokenTrieNode.build( + List.of("aa:bb:cc", "aa:bb:dd", "bb:aa:cc", "bb:bb:cc"), + s -> Arrays.asList(s.split(":")) + ); assertThat(root.isLeaf(), is(false)); // Let's look at the aa branch first { - TokenTrieNode aaNode = root.getChild("aa"); + CharSeqTokenTrieNode aaNode = root.getChild("aa"); assertThat(aaNode, is(notNullValue())); assertThat(aaNode.isLeaf(), is(false)); assertThat(aaNode.getChild("zz"), is(nullValue())); - TokenTrieNode bbNode = aaNode.getChild("bb"); + CharSeqTokenTrieNode bbNode = aaNode.getChild("bb"); assertThat(bbNode, is(notNullValue())); assertThat(bbNode.isLeaf(), is(false)); assertThat(bbNode.getChild("zz"), is(nullValue())); - TokenTrieNode ccNode = bbNode.getChild("cc"); + CharSeqTokenTrieNode ccNode = bbNode.getChild("cc"); assertThat(ccNode, is(notNullValue())); assertThat(ccNode.isLeaf(), is(true)); assertThat(ccNode.getChild("zz"), is(nullValue())); - TokenTrieNode ddNode = bbNode.getChild("dd"); + CharSeqTokenTrieNode ddNode = bbNode.getChild("dd"); assertThat(ddNode, is(notNullValue())); assertThat(ddNode.isLeaf(), is(true)); assertThat(ddNode.getChild("zz"), is(nullValue())); } // Now the bb branch { - TokenTrieNode bbNode = root.getChild("bb"); + CharSeqTokenTrieNode bbNode = root.getChild("bb"); assertThat(bbNode, is(notNullValue())); assertThat(bbNode.isLeaf(), is(false)); assertThat(bbNode.getChild("zz"), is(nullValue())); - TokenTrieNode aaNode = bbNode.getChild("aa"); + CharSeqTokenTrieNode aaNode = bbNode.getChild("aa"); assertThat(aaNode, is(notNullValue())); assertThat(aaNode.isLeaf(), is(false)); assertThat(aaNode.getChild("zz"), is(nullValue())); - TokenTrieNode aaCcNode = aaNode.getChild("cc"); + CharSeqTokenTrieNode aaCcNode = aaNode.getChild("cc"); assertThat(aaCcNode, is(notNullValue())); assertThat(aaCcNode.isLeaf(), is(true)); assertThat(aaCcNode.getChild("zz"), is(nullValue())); - TokenTrieNode bbBbNode = bbNode.getChild("bb"); + CharSeqTokenTrieNode bbBbNode = bbNode.getChild("bb"); assertThat(bbBbNode, is(notNullValue())); assertThat(bbBbNode.isLeaf(), is(false)); assertThat(bbBbNode.getChild("zz"), is(nullValue())); - TokenTrieNode bbCcNode = bbBbNode.getChild("cc"); + CharSeqTokenTrieNode bbCcNode = bbBbNode.getChild("cc"); assertThat(bbCcNode, is(notNullValue())); assertThat(bbCcNode.isLeaf(), is(true)); assertThat(bbCcNode.getChild("zz"), is(nullValue())); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/ControlCharFilterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/ControlCharFilterTests.java new file mode 100644 index 0000000000000..0735ccea569ba --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/ControlCharFilterTests.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.elasticsearch.test.ESTestCase; + +import java.io.CharArrayReader; +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; + +public class ControlCharFilterTests extends ESTestCase { + + public void testOnlyControlChars() throws IOException { + ControlCharFilter controlCharFilter = new ControlCharFilter( + new CharArrayReader(new char[] { Character.FORMAT, Character.CONTROL, Character.CONTROL, Character.CONTROL }) + ); + char[] output = new char[10]; + assertThat(controlCharFilter.read(output, 0, 5), equalTo(-1)); + } + + public void testEmpty() throws IOException { + ControlCharFilter controlCharFilter = new ControlCharFilter(new CharArrayReader(new char[] {})); + char[] output = new char[10]; + assertThat(controlCharFilter.read(output, 0, 5), equalTo(-1)); + } + + public void testCorrect() throws IOException { + ControlCharFilter controlCharFilter = new ControlCharFilter( + new CharArrayReader( + new char[] { + Character.FORMAT, + Character.FORMAT, + 'a', + Character.FORMAT, + Character.FORMAT, + 'b', + 'b', + Character.CONTROL, + 'c', + 'c', + Character.CONTROL, + Character.CONTROL } + ) + ); + char[] output = new char[10]; + int read = controlCharFilter.read(output, 0, 10); + assertThat(read, equalTo(5)); + assertThat(new String(output, 0, read), equalTo("abbcc")); + } + + public void testCorrectForLongString() throws IOException { + char[] charArray = new char[2000]; + int i = 0; + for (; i < 1000; i++) { + charArray[i] = 'a'; + } + charArray[i++] = Character.CONTROL; + charArray[i++] = Character.CONTROL; + for (int j = 0; j < 997; j++) { + charArray[i++] = 'a'; + } + charArray[i] = Character.CONTROL; + ControlCharFilter controlCharFilter = new ControlCharFilter(new CharArrayReader(charArray)); + char[] output = new char[2000]; + int read = controlCharFilter.read(output); + assertThat(read, equalTo(1997)); + for (int j = 0; j < read; j++) { + assertEquals('a', output[j]); + } + } + +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizerTests.java index 43d292fecd4c5..a2b7b9b364e2c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizerTests.java @@ -42,54 +42,59 @@ public class MPNetTokenizerTests extends ESTestCase { MPNetTokenizer.PAD_TOKEN ); - private List tokenStrings(List tokens) { - return tokens.stream().map(DelimitedToken::getToken).collect(Collectors.toList()); + private List tokenStrings(List tokens) { + return tokens.stream().map(DelimitedToken::toString).collect(Collectors.toList()); } public void testTokenize() { - BertTokenizer tokenizer = MPNetTokenizer.mpBuilder( - TEST_CASED_VOCAB, - new MPNetTokenization(null, false, null, Tokenization.Truncate.NONE) - ).build(); - - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("Elasticsearch", "fun")); - assertArrayEquals(new int[] { 0, 1, 3 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); + try ( + BertTokenizer tokenizer = MPNetTokenizer.mpBuilder( + TEST_CASED_VOCAB, + new MPNetTokenization(null, false, null, Tokenization.Truncate.NONE) + ).build() + ) { + TokenizationResult.Tokens tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.tokens()), contains("Elastic", "##search", "fun")); + assertArrayEquals(new int[] { 0, 1, 3 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.tokenMap()); + } } public void testMultiSeqTokenization() { - MPNetTokenizer tokenizer = MPNetTokenizer.mpBuilder( - TEST_CASED_VOCAB, - new MPNetTokenization(null, false, null, Tokenization.Truncate.NONE) - ).setDoLowerCase(false).setWithSpecialTokens(true).build(); - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( - "Elasticsearch is fun", - "Godzilla my little red car", - Tokenization.Truncate.NONE - ); + try ( + MPNetTokenizer tokenizer = MPNetTokenizer.mpBuilder( + TEST_CASED_VOCAB, + new MPNetTokenization(null, false, null, Tokenization.Truncate.NONE) + ).setDoLowerCase(false).setWithSpecialTokens(true).build() + ) { + TokenizationResult.Tokens tokenization = tokenizer.tokenize( + "Elasticsearch is fun", + "Godzilla my little red car", + Tokenization.Truncate.NONE + ); - var tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); - assertThat( - tokenStream, - contains( - MPNetTokenizer.CLASS_TOKEN, - "Elastic", - "##search", - "is", - "fun", - MPNetTokenizer.SEPARATOR_TOKEN, - MPNetTokenizer.SEPARATOR_TOKEN, - "God", - "##zilla", - "my", - "little", - "red", - "car", - MPNetTokenizer.SEPARATOR_TOKEN - ) - ); - assertArrayEquals(new int[] { 12, 0, 1, 2, 3, 13, 13, 8, 9, 4, 5, 6, 7, 13 }, tokenization.getTokenIds()); + var tokenStream = Arrays.stream(tokenization.tokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); + assertThat( + tokenStream, + contains( + MPNetTokenizer.CLASS_TOKEN, + "Elastic", + "##search", + "is", + "fun", + MPNetTokenizer.SEPARATOR_TOKEN, + MPNetTokenizer.SEPARATOR_TOKEN, + "God", + "##zilla", + "my", + "little", + "red", + "car", + MPNetTokenizer.SEPARATOR_TOKEN + ) + ); + assertArrayEquals(new int[] { 12, 0, 1, 2, 3, 13, 13, 8, 9, 4, 5, 6, 7, 13 }, tokenization.tokenIds()); + } } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MultiCharSequenceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MultiCharSequenceTests.java new file mode 100644 index 0000000000000..471cb55518b66 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MultiCharSequenceTests.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.elasticsearch.test.ESTestCase; + +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class MultiCharSequenceTests extends ESTestCase { + + public void testMultiCharSequence() { + + { + CharSequence sequence = new MultiCharSequence(List.of("This is a test")); + assertThat(sequence.length(), equalTo("This is a test".length())); + assertThat(sequence.charAt(5), equalTo("This is a test".charAt(5))); + assertThat(sequence.subSequence(2, 7).toString(), equalTo("This is a test".subSequence(2, 7).toString())); + } + + { + CharSequence sequence = new MultiCharSequence(List.of("This is a test", "another string")); + assertThat(sequence.length(), equalTo("This is a test".length() + "another string".length())); + assertThat(sequence.charAt(5), equalTo("This is a test".charAt(5))); + assertThat(sequence.charAt(16), equalTo('o')); + assertThat(sequence.subSequence(2, 7).toString(), equalTo("This is a test".subSequence(2, 7).toString())); + assertThat(sequence.subSequence(14, 18).toString(), equalTo("anot")); + assertThat(sequence.subSequence(14, 28).toString(), equalTo("another string")); + assertThat(sequence.subSequence(13, 18).toString(), equalTo("tanot")); + assertThat(sequence.subSequence(13, 15).toString(), equalTo("ta")); + } + + { + CharSequence sequence = new MultiCharSequence(List.of("This is a test", "another string", "almost last")); + assertThat(sequence.length(), equalTo("This is a test".length() + "another string".length() + "almost last".length())); + assertThat(sequence.charAt(5), equalTo("This is a test".charAt(5))); + assertThat(sequence.charAt(16), equalTo('o')); + assertThat(sequence.subSequence(2, 7).toString(), equalTo("This is a test".subSequence(2, 7).toString())); + assertThat(sequence.subSequence(14, 18).toString(), equalTo("anot")); + assertThat(sequence.subSequence(14, 28).toString(), equalTo("another string")); + assertThat(sequence.subSequence(13, 18).toString(), equalTo("tanot")); + assertThat(sequence.subSequence(13, 15).toString(), equalTo("ta")); + assertThat(sequence.subSequence(2, 30).toString(), equalTo("is is a testanother stringal")); + assertThat(sequence.subSequence(30, 35).toString(), equalTo("most ")); + } + + } + +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilterTests.java new file mode 100644 index 0000000000000..c08602771de93 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilterTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.BaseTokenStreamTestCase; +import org.apache.lucene.analysis.core.WhitespaceTokenizer; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.List; + +public class WordPieceTokenFilterTests extends BaseTokenStreamTestCase { + + public static final String UNKNOWN_TOKEN = "[UNK]"; + + public void testTokenize() throws IOException { + List vocab = List.of(UNKNOWN_TOKEN, "[CLS]", "[SEP]", "want", "##want", "##ed", "wa", "un", "runn", "##ing"); + TestNLPAnalyzer analyzer = new TestNLPAnalyzer(vocab, UNKNOWN_TOKEN, 512); + + assertAnalyzesTo(analyzer, "", new String[0]); + assertAnalyzesTo(analyzer, "unwanted", new String[] { "un", "##want", "##ed" }, new int[] { 1, 0, 0 }); + assertAnalyzesTo(analyzer, "running", new String[] { "runn", "##ing" }, new int[] { 1, 0 }); + assertAnalyzesTo(analyzer, "unwantedX", new String[] { "[UNK]" }, new int[] { 1 }); + } + + public void testMaxCharLength() throws IOException { + List vocab = List.of(UNKNOWN_TOKEN, "[CLS]", "[SEP]", "want", "##want", "##ed", "wa", "un", "runn", "##ing", "become"); + TestNLPAnalyzer analyzer = new TestNLPAnalyzer(vocab, UNKNOWN_TOKEN, 4); + + assertAnalyzesTo(analyzer, "become", new String[] { UNKNOWN_TOKEN }, new int[] { 1 }); + } + + static class TestNLPAnalyzer extends Analyzer { + private final List dictionary; + private final String unknownToken; + private final int maxTokenSize; + + TestNLPAnalyzer(List dictionary, String unknownToken, int maxTokenSize) { + this.dictionary = dictionary; + this.unknownToken = unknownToken; + this.maxTokenSize = maxTokenSize; + } + + @Override + protected TokenStreamComponents createComponents(String fieldName) { + try { + WhitespaceTokenizer tokenizer = new WhitespaceTokenizer(512); + WordPieceTokenFilter filter = WordPieceTokenFilter.build( + false, + false, + false, + List.of(), + dictionary, + unknownToken, + maxTokenSize, + tokenizer + ); + return new TokenStreamComponents(tokenizer, filter); + } catch (IOException ex) { + throw new UncheckedIOException(ex); + } + } + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenizerTests.java deleted file mode 100644 index c62df28007eef..0000000000000 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenizerTests.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; - -import org.elasticsearch.test.ESTestCase; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.empty; - -public class WordPieceTokenizerTests extends ESTestCase { - - public static final String UNKNOWN_TOKEN = "[UNK]"; - - public void testTokenize() { - String[] vocab = { UNKNOWN_TOKEN, "[CLS]", "[SEP]", "want", "##want", "##ed", "wa", "un", "runn", "##ing" }; - Map vocabMap = createVocabMap(vocab); - - WordPieceTokenizer tokenizer = new WordPieceTokenizer(vocabMap, UNKNOWN_TOKEN, 100); - - var tokenIds = tokenizer.tokenize(new DelimitedToken(0, 0, "")); - assertThat(tokenIds, empty()); - - tokenIds = tokenizer.tokenize(makeToken("unwanted")); - List tokenStrings = tokenIds.stream().map(index -> vocab[index]).collect(Collectors.toList()); - assertThat(tokenStrings, contains("un", "##want", "##ed")); - - tokenIds = tokenizer.tokenize(makeToken("running")); - tokenStrings = tokenIds.stream().map(index -> vocab[index]).collect(Collectors.toList()); - assertThat(tokenStrings, contains("runn", "##ing")); - - tokenIds = tokenizer.tokenize(makeToken("unwantedX")); - tokenStrings = tokenIds.stream().map(index -> vocab[index]).collect(Collectors.toList()); - assertThat(tokenStrings, contains(UNKNOWN_TOKEN)); - } - - private DelimitedToken makeToken(String str) { - return new DelimitedToken(0, str.length(), str); - } - - public void testMaxCharLength() { - String[] vocab = { "Some", "words", "will", "become", "UNK" }; - Map vocabMap = createVocabMap(vocab); - - WordPieceTokenizer tokenizer = new WordPieceTokenizer(vocabMap, "UNK", 4); - var tokenIds = tokenizer.tokenize(new DelimitedToken(0, 0, "become")); - List tokenStrings = tokenIds.stream().map(index -> vocab[index]).collect(Collectors.toList()); - assertThat(tokenStrings, contains("UNK")); - } - - static Map createVocabMap(String... words) { - Map vocabMap = new HashMap<>(); - for (int i = 0; i < words.length; i++) { - vocabMap.put(words[i], i); - } - return vocabMap; - } -} diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java index 4e2ed262bece1..c72a7ddfb9f3b 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java @@ -78,7 +78,7 @@ public class MonitoringTemplateRegistry extends IndexTemplateRegistry { * writes monitoring data in ECS format as of 8.0. These templates define the ECS schema as well as alias fields for the old monitoring * mappings that point to the corresponding ECS fields. */ - public static final int STACK_MONITORING_REGISTRY_VERSION = Version.V_8_0_0.id; + public static final int STACK_MONITORING_REGISTRY_VERSION = Version.V_8_0_0.id + 1; private static final String STACK_MONITORING_REGISTRY_VERSION_VARIABLE = "xpack.stack.monitoring.template.release.version"; private static final String STACK_TEMPLATE_VERSION = "8"; private static final String STACK_TEMPLATE_VERSION_VARIABLE = "xpack.stack.monitoring.template.version"; diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java index d71c9aac6167f..02a0f6b7f0a7f 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java @@ -598,64 +598,64 @@ private boolean canUseWatcher() { @Override public void onCleanUpIndices(TimeValue retention) { - if (state.get() != State.RUNNING) { + ClusterState clusterState = clusterService.state(); + if (clusterService.localNode() == null + || clusterState == null + || clusterState.blocks().hasGlobalBlockWithLevel(ClusterBlockLevel.METADATA_WRITE)) { logger.debug("exporter not ready"); return; } - if (clusterService.state().nodes().isLocalNodeElectedMaster()) { + if (clusterState.nodes().isLocalNodeElectedMaster()) { // Reference date time will be compared to index.creation_date settings, // that's why it must be in UTC ZonedDateTime expiration = ZonedDateTime.now(ZoneOffset.UTC).minus(retention.millis(), ChronoUnit.MILLIS); logger.debug("cleaning indices [expiration={}, retention={}]", expiration, retention); - ClusterState clusterState = clusterService.state(); - if (clusterState != null) { - final long expirationTimeMillis = expiration.toInstant().toEpochMilli(); - final long currentTimeMillis = System.currentTimeMillis(); + final long expirationTimeMillis = expiration.toInstant().toEpochMilli(); + final long currentTimeMillis = System.currentTimeMillis(); - // list of index patterns that we clean up - final String[] indexPatterns = new String[] { ".monitoring-*" }; + // list of index patterns that we clean up + final String[] indexPatterns = new String[] { ".monitoring-*" }; - // Get the names of the current monitoring indices - final Set currents = MonitoredSystem.allSystems() - .map(s -> MonitoringTemplateUtils.indexName(dateTimeFormatter, s, currentTimeMillis)) - .collect(Collectors.toSet()); + // Get the names of the current monitoring indices + final Set currents = MonitoredSystem.allSystems() + .map(s -> MonitoringTemplateUtils.indexName(dateTimeFormatter, s, currentTimeMillis)) + .collect(Collectors.toSet()); - // avoid deleting the current alerts index, but feel free to delete older ones - currents.add(MonitoringTemplateRegistry.ALERTS_INDEX_TEMPLATE_NAME); + // avoid deleting the current alerts index, but feel free to delete older ones + currents.add(MonitoringTemplateRegistry.ALERTS_INDEX_TEMPLATE_NAME); - Set indices = new HashSet<>(); - for (ObjectObjectCursor index : clusterState.getMetadata().indices()) { - String indexName = index.key; + Set indices = new HashSet<>(); + for (ObjectObjectCursor index : clusterState.getMetadata().indices()) { + String indexName = index.key; - if (Regex.simpleMatch(indexPatterns, indexName)) { - // Never delete any "current" index (e.g., today's index or the most recent version no timestamp, like alerts) - if (currents.contains(indexName)) { - continue; - } + if (Regex.simpleMatch(indexPatterns, indexName)) { + // Never delete any "current" index (e.g., today's index or the most recent version no timestamp, like alerts) + if (currents.contains(indexName)) { + continue; + } - long creationDate = index.value.getCreationDate(); - if (creationDate <= expirationTimeMillis) { - if (logger.isDebugEnabled()) { - logger.debug( - "detected expired index [name={}, created={}, expired={}]", - indexName, - Instant.ofEpochMilli(creationDate).atZone(ZoneOffset.UTC), - expiration - ); - } - indices.add(indexName); + long creationDate = index.value.getCreationDate(); + if (creationDate <= expirationTimeMillis) { + if (logger.isDebugEnabled()) { + logger.debug( + "detected expired index [name={}, created={}, expired={}]", + indexName, + Instant.ofEpochMilli(creationDate).atZone(ZoneOffset.UTC), + expiration + ); } + indices.add(indexName); } } + } - if (indices.isEmpty() == false) { - logger.info("cleaning up [{}] old indices", indices.size()); - deleteIndices(indices); - } else { - logger.debug("no old indices found for clean up"); - } + if (indices.isEmpty() == false) { + logger.info("cleaning up [{}] old indices", indices.size()); + deleteIndices(indices); + } else { + logger.debug("no old indices found for clean up"); } } } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java index a78e7baab75f4..c3fcb7525775e 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.monitoring.exporter.Exporter; import org.elasticsearch.xpack.monitoring.exporter.Exporters; -import org.elasticsearch.xpack.monitoring.exporter.local.LocalExporter; import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase; import org.junit.Before; @@ -23,7 +22,6 @@ import java.util.Locale; import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; -import static org.hamcrest.Matchers.is; @ClusterScope(scope = TEST, numDataNodes = 0, numClientNodes = 0) public abstract class AbstractIndicesCleanerTestCase extends MonitoringIntegTestCase { @@ -40,7 +38,6 @@ public void setup() { cleanerService.setGlobalRetention(TimeValue.MAX_VALUE); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/78737") public void testNothingToDelete() throws Exception { CleanerService.Listener listener = getListener(); listener.onCleanUpIndices(days(0)); @@ -107,7 +104,6 @@ public void testIgnoreCurrentTimestampedIndex() throws Exception { assertIndicesCount(1); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/78862") public void testDeleteIndices() throws Exception { CleanerService.Listener listener = getListener(); @@ -167,10 +163,6 @@ protected CleanerService.Listener getListener() throws Exception { Exporters exporters = internalCluster().getInstance(Exporters.class, internalCluster().getMasterName()); for (Exporter exporter : exporters.getEnabledExporters()) { if (exporter instanceof CleanerService.Listener) { - // Ensure that the exporter is initialized. - if (exporter instanceof LocalExporter) { - assertBusy(() -> assertThat(((LocalExporter) exporter).isExporterReady(), is(true))); - } return (CleanerService.Listener) exporter; } } diff --git a/x-pack/plugin/old-lucene-versions/build.gradle b/x-pack/plugin/old-lucene-versions/build.gradle index 22ab9d7bf24ce..e59b68f040f6f 100644 --- a/x-pack/plugin/old-lucene-versions/build.gradle +++ b/x-pack/plugin/old-lucene-versions/build.gradle @@ -1,3 +1,4 @@ +apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-test-artifact' @@ -11,6 +12,7 @@ archivesBaseName = 'x-pack-old-lucene-versions' dependencies { compileOnly project(path: xpackModule('core')) + internalClusterTestImplementation(testArtifact(project(xpackModule('core')))) } addQaCheckDependencies() diff --git a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java new file mode 100644 index 0000000000000..4d2c8113c02ba --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java @@ -0,0 +1,224 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.RepositoryMetadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.env.Environment; +import org.elasticsearch.indices.recovery.RecoverySettings; +import org.elasticsearch.license.DeleteLicenseAction; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicensesMetadata; +import org.elasticsearch.license.PostStartBasicAction; +import org.elasticsearch.license.PostStartBasicRequest; +import org.elasticsearch.license.PostStartTrialAction; +import org.elasticsearch.license.PostStartTrialRequest; +import org.elasticsearch.license.PostStartTrialResponse; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.RepositoryPlugin; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; +import org.elasticsearch.repositories.IndexId; +import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.RepositoryData; +import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; +import org.elasticsearch.snapshots.RestoreService; +import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.mockstore.MockRepository; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; +import org.elasticsearch.xpack.core.archive.ArchiveFeatureSetUsage; +import org.junit.Before; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Map; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.oneOf; + +@ESIntegTestCase.ClusterScope(supportsDedicatedMasters = false, numClientNodes = 0, scope = ESIntegTestCase.Scope.TEST) +public class ArchiveLicenseIntegTests extends AbstractSnapshotIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return Arrays.asList(LocalStateOldLuceneVersions.class, TestRepositoryPlugin.class, MockRepository.Plugin.class); + } + + public static class TestRepositoryPlugin extends Plugin implements RepositoryPlugin { + public static final String FAKE_VERSIONS_TYPE = "fakeversionsrepo"; + + @Override + public Map getRepositories( + Environment env, + NamedXContentRegistry namedXContentRegistry, + ClusterService clusterService, + BigArrays bigArrays, + RecoverySettings recoverySettings + ) { + return Map.of( + FAKE_VERSIONS_TYPE, + metadata -> new FakeVersionsRepo(metadata, env, namedXContentRegistry, clusterService, bigArrays, recoverySettings) + ); + } + + // fakes an old index version format to activate license checks + private static class FakeVersionsRepo extends FsRepository { + FakeVersionsRepo( + RepositoryMetadata metadata, + Environment env, + NamedXContentRegistry namedXContentRegistry, + ClusterService clusterService, + BigArrays bigArrays, + RecoverySettings recoverySettings + ) { + super(metadata, env, namedXContentRegistry, clusterService, bigArrays, recoverySettings); + } + + @Override + public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, SnapshotId snapshotId, IndexId index) + throws IOException { + final IndexMetadata original = super.getSnapshotIndexMetaData(repositoryData, snapshotId, index); + return IndexMetadata.builder(original) + .settings( + Settings.builder() + .put(original.getSettings()) + .put( + IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), + randomBoolean() ? Version.fromString("5.0.0") : Version.fromString("6.0.0") + ) + ) + .build(); + } + } + } + + private static final String repoName = "test-repo"; + private static final String indexName = "test-index"; + private static final String snapshotName = "test-snapshot"; + + @Before + public void createAndRestoreArchive() throws Exception { + createRepository( + repoName, + TestRepositoryPlugin.FAKE_VERSIONS_TYPE, + randomRepositorySettings().put(RestoreService.ALLOW_BWC_INDICES_SETTING.getKey(), true) + ); + createIndex(indexName); + createFullSnapshot(repoName, snapshotName); + + assertAcked(client().admin().indices().prepareDelete(indexName)); + + PostStartTrialRequest request = new PostStartTrialRequest().setType(License.LicenseType.TRIAL.getTypeName()).acknowledge(true); + client().execute(PostStartTrialAction.INSTANCE, request).get(); + } + + public void testFeatureUsage() throws Exception { + XPackUsageFeatureResponse usage = client().execute(XPackUsageFeatureAction.ARCHIVE, new XPackUsageRequest()).get(); + assertThat(usage.getUsage(), instanceOf(ArchiveFeatureSetUsage.class)); + ArchiveFeatureSetUsage archiveUsage = (ArchiveFeatureSetUsage) usage.getUsage(); + assertEquals(0, archiveUsage.getNumberOfArchiveIndices()); + + final RestoreSnapshotRequest req = new RestoreSnapshotRequest(repoName, snapshotName).indices(indexName).waitForCompletion(true); + + final RestoreSnapshotResponse restoreSnapshotResponse = client().admin().cluster().restoreSnapshot(req).get(); + assertThat(restoreSnapshotResponse.getRestoreInfo().failedShards(), equalTo(0)); + ensureGreen(indexName); + + usage = client().execute(XPackUsageFeatureAction.ARCHIVE, new XPackUsageRequest()).get(); + assertThat(usage.getUsage(), instanceOf(ArchiveFeatureSetUsage.class)); + archiveUsage = (ArchiveFeatureSetUsage) usage.getUsage(); + assertEquals(1, archiveUsage.getNumberOfArchiveIndices()); + } + + public void testFailRestoreOnInvalidLicense() throws Exception { + assertAcked(client().execute(DeleteLicenseAction.INSTANCE, new DeleteLicenseRequest()).get()); + assertAcked(client().execute(PostStartBasicAction.INSTANCE, new PostStartBasicRequest()).get()); + + ensureClusterSizeConsistency(); + ensureClusterStateConsistency(); + + final RestoreSnapshotRequest req = new RestoreSnapshotRequest(repoName, snapshotName).indices(indexName).waitForCompletion(true); + ElasticsearchSecurityException e = expectThrows( + ElasticsearchSecurityException.class, + () -> client().admin().cluster().restoreSnapshot(req).actionGet() + ); + assertThat(e.getMessage(), containsString("current license is non-compliant for [archive]")); + } + + // checks that shards are failed if license becomes invalid after successful restore + public void testShardAllocationOnInvalidLicense() throws Exception { + final RestoreSnapshotRequest req = new RestoreSnapshotRequest(repoName, snapshotName).indices(indexName).waitForCompletion(true); + + final RestoreSnapshotResponse restoreSnapshotResponse = client().admin().cluster().restoreSnapshot(req).get(); + assertThat(restoreSnapshotResponse.getRestoreInfo().failedShards(), equalTo(0)); + ensureGreen(indexName); + + assertAcked(client().execute(DeleteLicenseAction.INSTANCE, new DeleteLicenseRequest()).get()); + assertAcked(client().execute(PostStartBasicAction.INSTANCE, new PostStartBasicRequest()).get()); + + ensureClusterSizeConsistency(); + ensureClusterStateConsistency(); + + // check that shards have been failed as part of invalid license + assertBusy( + () -> assertEquals( + ClusterHealthStatus.RED, + client().admin().cluster().prepareHealth(indexName).get().getIndices().get(indexName).getStatus() + ) + ); + + waitNoPendingTasksOnAll(); + ensureClusterStateConsistency(); + + // add a valid license again + // This is a bit of a hack in tests, as we can't readd a trial license + // We force this by clearing the existing basic license first + updateClusterState( + currentState -> ClusterState.builder(currentState) + .metadata(Metadata.builder(currentState.metadata()).removeCustom(LicensesMetadata.TYPE).build()) + .build() + ); + + waitNoPendingTasksOnAll(); + ensureClusterStateConsistency(); + + PostStartTrialRequest request = new PostStartTrialRequest().setType(License.LicenseType.TRIAL.getTypeName()).acknowledge(true); + final PostStartTrialResponse response = client().execute(PostStartTrialAction.INSTANCE, request).get(); + assertThat( + response.getStatus(), + oneOf( + PostStartTrialResponse.Status.UPGRADED_TO_TRIAL, + // The LicenceService automatically generates a license of {@link LicenceService#SELF_GENERATED_LICENSE_TYPE} type + // if there is no license found in the cluster state (see {@link LicenceService#registerOrUpdateSelfGeneratedLicense). + // Since this test explicitly removes the LicensesMetadata from cluster state it is possible that the self generated + // license is created before the PostStartTrialRequest is acked. + PostStartTrialResponse.Status.TRIAL_ALREADY_ACTIVATED + ) + ); + // check if cluster goes green again after valid license has been put in place + ensureGreen(indexName); + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/LocalStateOldLuceneVersions.java b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/LocalStateOldLuceneVersions.java new file mode 100644 index 0000000000000..e4a6110be7693 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/LocalStateOldLuceneVersions.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; + +import java.nio.file.Path; + +public class LocalStateOldLuceneVersions extends LocalStateCompositeXPackPlugin { + + private final OldLuceneVersions plugin; + + public LocalStateOldLuceneVersions(final Settings settings, final Path configPath) { + super(settings, configPath); + this.plugin = new OldLuceneVersions() { + + @Override + protected XPackLicenseState getLicenseState() { + return LocalStateOldLuceneVersions.this.getLicenseState(); + } + + }; + plugins.add(plugin); + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveAllocationDecider.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveAllocationDecider.java new file mode 100644 index 0000000000000..fdd3cf1f5f8e5 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveAllocationDecider.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.Decision; + +import java.util.function.BooleanSupplier; + +import static org.elasticsearch.xpack.lucene.bwc.OldLuceneVersions.isArchiveIndex; + +public class ArchiveAllocationDecider extends AllocationDecider { + + static final String NAME = "archive"; + + private final BooleanSupplier hasValidLicenseSupplier; + + public ArchiveAllocationDecider(BooleanSupplier hasValidLicenseSupplier) { + this.hasValidLicenseSupplier = hasValidLicenseSupplier; + } + + @Override + public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + return allowAllocation(allocation.metadata().getIndexSafe(shardRouting.index()), allocation); + } + + @Override + public Decision canAllocate(ShardRouting shardRouting, RoutingAllocation allocation) { + return allowAllocation(allocation.metadata().getIndexSafe(shardRouting.index()), allocation); + } + + @Override + public Decision canAllocate(IndexMetadata indexMetadata, RoutingNode node, RoutingAllocation allocation) { + return allowAllocation(indexMetadata, allocation); + } + + @Override + public Decision canForceAllocatePrimary(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + return allowAllocation(allocation.metadata().getIndexSafe(shardRouting.index()), allocation); + } + + private Decision allowAllocation(IndexMetadata indexMetadata, RoutingAllocation allocation) { + if (isArchiveIndex(indexMetadata.getCreationVersion())) { + if (hasValidLicenseSupplier.getAsBoolean()) { + return allocation.decision(Decision.YES, NAME, "valid license for archive functionality"); + } else { + return allocation.decision(Decision.NO, NAME, "invalid license for archive functionality"); + } + } else { + return allocation.decision(Decision.YES, NAME, "decider only applicable for indices backed by archive functionality"); + } + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveInfoTransportAction.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveInfoTransportAction.java new file mode 100644 index 0000000000000..702559a4810d8 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveInfoTransportAction.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.XPackField; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureTransportAction; + +import static org.elasticsearch.xpack.lucene.bwc.OldLuceneVersions.ARCHIVE_FEATURE; + +public class ArchiveInfoTransportAction extends XPackInfoFeatureTransportAction { + + private final XPackLicenseState licenseState; + + @Inject + public ArchiveInfoTransportAction(TransportService transportService, ActionFilters actionFilters, XPackLicenseState licenseState) { + super(XPackInfoFeatureAction.ARCHIVE.name(), transportService, actionFilters); + this.licenseState = licenseState; + } + + @Override + public String name() { + return XPackField.ARCHIVE; + } + + @Override + public boolean available() { + return ARCHIVE_FEATURE.checkWithoutTracking(licenseState); + } + + @Override + public boolean enabled() { + return true; + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTracker.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTracker.java new file mode 100644 index 0000000000000..1dcd658c1666c --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTracker.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.license.XPackLicenseState; + +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.lucene.bwc.OldLuceneVersions.ARCHIVE_FEATURE; + +final class ArchiveUsageTracker implements Runnable { + + private final XPackLicenseState licenseState; + private final Supplier clusterStateSupplier; + + ArchiveUsageTracker(XPackLicenseState licenseState, Supplier clusterStateSupplier) { + this.clusterStateSupplier = clusterStateSupplier; + this.licenseState = licenseState; + } + + @Override + public void run() { + if (hasArchiveIndices(clusterStateSupplier.get())) { + ARCHIVE_FEATURE.check(licenseState); + } + } + + private static boolean hasArchiveIndices(ClusterState state) { + for (IndexMetadata indexMetadata : state.metadata()) { + if (OldLuceneVersions.isArchiveIndex(indexMetadata.getCreationVersion())) { + return true; + } + } + return false; + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTransportAction.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTransportAction.java new file mode 100644 index 0000000000000..d209db2f9ce37 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTransportAction.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureTransportAction; +import org.elasticsearch.xpack.core.archive.ArchiveFeatureSetUsage; + +import static org.elasticsearch.xpack.lucene.bwc.OldLuceneVersions.ARCHIVE_FEATURE; + +public class ArchiveUsageTransportAction extends XPackUsageFeatureTransportAction { + + private final XPackLicenseState licenseState; + + @Inject + public ArchiveUsageTransportAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + XPackLicenseState licenseState + ) { + super( + XPackUsageFeatureAction.ARCHIVE.name(), + transportService, + clusterService, + threadPool, + actionFilters, + indexNameExpressionResolver + ); + this.licenseState = licenseState; + } + + @Override + protected void masterOperation( + Task task, + XPackUsageRequest request, + ClusterState state, + ActionListener listener + ) { + int numArchiveIndices = 0; + for (IndexMetadata indexMetadata : state.metadata()) { + if (OldLuceneVersions.isArchiveIndex(indexMetadata.getCreationVersion())) { + numArchiveIndices++; + } + } + listener.onResponse( + new XPackUsageFeatureResponse(new ArchiveFeatureSetUsage(ARCHIVE_FEATURE.checkWithoutTracking(licenseState), numArchiveIndices)) + ); + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/FailShardsOnInvalidLicenseClusterListener.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/FailShardsOnInvalidLicenseClusterListener.java new file mode 100644 index 0000000000000..2cf7160518d74 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/FailShardsOnInvalidLicenseClusterListener.java @@ -0,0 +1,98 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.lucene.store.AlreadyClosedException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.routing.RerouteService; +import org.elasticsearch.common.Priority; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.shard.IndexEventListener; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.license.LicenseStateListener; +import org.elasticsearch.license.XPackLicenseState; + +import java.util.HashSet; +import java.util.Set; + +import static org.elasticsearch.xpack.lucene.bwc.OldLuceneVersions.ARCHIVE_FEATURE; + +public class FailShardsOnInvalidLicenseClusterListener implements LicenseStateListener, IndexEventListener { + + private static final Logger logger = LogManager.getLogger(FailShardsOnInvalidLicenseClusterListener.class); + + private final XPackLicenseState xPackLicenseState; + + private final RerouteService rerouteService; + + final Set shardsToFail = new HashSet<>(); + + private boolean allowed; + + public FailShardsOnInvalidLicenseClusterListener(XPackLicenseState xPackLicenseState, RerouteService rerouteService) { + this.xPackLicenseState = xPackLicenseState; + this.rerouteService = rerouteService; + this.allowed = ARCHIVE_FEATURE.checkWithoutTracking(xPackLicenseState); + xPackLicenseState.addListener(this); + } + + @Override + public synchronized void afterIndexShardStarted(IndexShard indexShard) { + shardsToFail.add(indexShard); + failActiveShardsIfNecessary(); + } + + @Override + public synchronized void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) { + if (indexShard != null) { + shardsToFail.remove(indexShard); + } + } + + @Override + public synchronized void licenseStateChanged() { + final boolean allowed = ARCHIVE_FEATURE.checkWithoutTracking(xPackLicenseState); + if (allowed && this.allowed == false) { + rerouteService.reroute("reroute after license activation", Priority.NORMAL, new ActionListener() { + @Override + public void onResponse(ClusterState clusterState) { + logger.trace("successful reroute after license activation"); + } + + @Override + public void onFailure(Exception e) { + logger.debug("unsuccessful reroute after license activation"); + } + }); + } + this.allowed = allowed; + failActiveShardsIfNecessary(); + } + + private void failActiveShardsIfNecessary() { + assert Thread.holdsLock(this); + if (allowed == false) { + for (IndexShard indexShard : shardsToFail) { + try { + indexShard.failShard("invalid license", null); + } catch (AlreadyClosedException ignored) { + // ignore + } catch (Exception e) { + logger.warn(new ParameterizedMessage("Could not close shard {} due to invalid license", indexShard.shardId()), e); + } + } + shardsToFail.clear(); + } + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java index 40b021f9ea9d8..69ac9777960de 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java @@ -10,37 +10,139 @@ import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfo; import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.license.LicensedFeature; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.RepositoryPlugin; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.lucene.bwc.codecs.BWCCodec; import java.io.IOException; import java.io.UncheckedIOException; +import java.util.Collection; import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.function.Consumer; +import java.util.function.Supplier; -public class OldLuceneVersions extends Plugin implements IndexStorePlugin { +public class OldLuceneVersions extends Plugin implements IndexStorePlugin, ClusterPlugin, RepositoryPlugin, ActionPlugin { + + public static final LicensedFeature.Momentary ARCHIVE_FEATURE = LicensedFeature.momentary( + null, + "archive", + License.OperationMode.ENTERPRISE + ); + + public static boolean isArchiveIndex(Version version) { + return version.before(Version.CURRENT.minimumIndexCompatibilityVersion()); + } + + private final SetOnce failShardsListener = new SetOnce<>(); + + @Override + public Collection createComponents( + final Client client, + final ClusterService clusterService, + final ThreadPool threadPool, + final ResourceWatcherService resourceWatcherService, + final ScriptService scriptService, + final NamedXContentRegistry xContentRegistry, + final Environment environment, + final NodeEnvironment nodeEnvironment, + final NamedWriteableRegistry registry, + final IndexNameExpressionResolver resolver, + final Supplier repositoriesServiceSupplier + ) { + this.failShardsListener.set(new FailShardsOnInvalidLicenseClusterListener(getLicenseState(), clusterService.getRerouteService())); + if (DiscoveryNode.isMasterNode(environment.settings())) { + // We periodically look through the indices and identify if there are any archive indices, + // then marking the feature as used. We do this on each master node so that if one master fails, the + // continue reporting usage state. + var usageTracker = new ArchiveUsageTracker(getLicenseState(), clusterService::state); + threadPool.scheduleWithFixedDelay(usageTracker, TimeValue.timeValueMinutes(15), ThreadPool.Names.GENERIC); + } + return List.of(); + } + + @Override + public List> getActions() { + return List.of( + new ActionPlugin.ActionHandler<>(XPackUsageFeatureAction.ARCHIVE, ArchiveUsageTransportAction.class), + new ActionPlugin.ActionHandler<>(XPackInfoFeatureAction.ARCHIVE, ArchiveInfoTransportAction.class) + ); + } + + // overridable by tests + protected XPackLicenseState getLicenseState() { + return XPackPlugin.getSharedLicenseState(); + } + + @Override + public Collection createAllocationDeciders(Settings settings, ClusterSettings clusterSettings) { + return List.of(new ArchiveAllocationDecider(() -> ARCHIVE_FEATURE.checkWithoutTracking(getLicenseState()))); + } @Override public void onIndexModule(IndexModule indexModule) { - if (indexModule.indexSettings().getIndexVersionCreated().before(Version.CURRENT.minimumIndexCompatibilityVersion())) { + if (isArchiveIndex(indexModule.indexSettings().getIndexVersionCreated())) { indexModule.addIndexEventListener(new IndexEventListener() { @Override public void afterFilesRestoredFromRepository(IndexShard indexShard) { convertToNewFormat(indexShard); } }); + + indexModule.addIndexEventListener(failShardsListener.get()); } } + @Override + public Consumer addPreRestoreCheck() { + return indexMetadata -> { + if (isArchiveIndex(indexMetadata.getCreationVersion())) { + if (ARCHIVE_FEATURE.checkWithoutTracking(getLicenseState()) == false) { + throw LicenseUtils.newComplianceException("archive"); + } + } + }; + } + /** * The trick used to allow newer Lucene versions to read older Lucene indices is to convert the old directory to a directory that new * Lucene versions happily operate on. The way newer Lucene versions happily comply with reading older data is to put in place a diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/AsyncTaskManagementService.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/AsyncTaskManagementService.java index 4120c15f5cec7..d1607a30dabe9 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/AsyncTaskManagementService.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/AsyncTaskManagementService.java @@ -108,7 +108,10 @@ public TaskId getParentTask() { @Override public Task createTask(long id, String type, String actionName, TaskId parentTaskId, Map headers) { - Map originHeaders = ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()); + Map originHeaders = ClientHelper.getPersistableSafeSecurityHeaders( + threadPool.getThreadContext(), + clusterService.state() + ); return operation.createTask( request, id, diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java index cbdf72b1b9cc9..ee1056977e906 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Tuple; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.license.XPackLicenseState; @@ -75,7 +76,10 @@ public void setUpMocks() throws Exception { Settings.EMPTY ); ClusterApplierService clusterApplierService = mock(ClusterApplierService.class); - when(clusterApplierService.threadPool()).thenReturn(mock(ThreadPool.class)); + final var threadContext = new ThreadContext(Settings.EMPTY); + final var threadPool = mock(ThreadPool.class); + when(threadPool.getThreadContext()).thenReturn(threadContext); + when(clusterApplierService.threadPool()).thenReturn(threadPool); ClusterService clusterService = mock(ClusterService.class); when(clusterService.getClusterApplierService()).thenReturn(clusterApplierService); this.encryptedRepository = new EncryptedRepository( diff --git a/x-pack/plugin/rollup/build.gradle b/x-pack/plugin/rollup/build.gradle index 6f17a0480e976..01c981a23b8ac 100644 --- a/x-pack/plugin/rollup/build.gradle +++ b/x-pack/plugin/rollup/build.gradle @@ -24,5 +24,7 @@ dependencies { addQaCheckDependencies() tasks.named("internalClusterTest").configure { - systemProperty 'es.rollup_v2_feature_flag_enabled', 'true' + if (BuildParams.isSnapshotBuild() == false) { + systemProperty 'es.rollup_v2_feature_flag_enabled', 'true' + } } diff --git a/x-pack/plugin/rollup/qa/rest/build.gradle b/x-pack/plugin/rollup/qa/rest/build.gradle index 4d5c23feb9d20..90fee3972dd4d 100644 --- a/x-pack/plugin/rollup/qa/rest/build.gradle +++ b/x-pack/plugin/rollup/qa/rest/build.gradle @@ -4,6 +4,8 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ + +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-yaml-rest-test' @@ -22,8 +24,8 @@ restResources { testClusters.configureEach { testDistribution = 'DEFAULT' setting 'xpack.license.self_generated.type', 'basic' - systemProperty 'es.rollup_v2_feature_flag_enabled', 'true' setting 'xpack.security.enabled', 'false' + requiresFeature 'es.rollup_v2_feature_flag_enabled', Version.fromString("8.0.0") } if (BuildParams.inFipsJvm){ diff --git a/x-pack/plugin/rollup/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/rollup/10_basic.yml b/x-pack/plugin/rollup/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/rollup/10_basic.yml index 8fa16a7fe70bc..f81dabc767683 100644 --- a/x-pack/plugin/rollup/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/rollup/10_basic.yml +++ b/x-pack/plugin/rollup/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/rollup/10_basic.yml @@ -23,25 +23,25 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - timestamp: "2020-01-01T05:10:00Z" color: "blue" price: 10 - index: _index: docs - _id: 2 + _id: "2" - timestamp: "2020-01-01T05:30:00Z" color: "blue" price: 20 - index: _index: docs - _id: 3 + _id: "3" - timestamp: "2020-01-01T06:10:00Z" color: "red" price: 30 - index: _index: docs - _id: 4 + _id: "4" - timestamp: "2020-01-01T06:30:00Z" color: "green" price: 40 diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java index b9de612646088..a26d13450b705 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java @@ -90,6 +90,7 @@ public TransportPutRollupJobAction( ); this.persistentTasksService = persistentTasksService; this.client = client; + } @Override @@ -138,9 +139,12 @@ static void checkForDeprecatedTZ(PutRollupJobAction.Request request) { } } - private static RollupJob createRollupJob(RollupJobConfig config, ThreadPool threadPool) { + private RollupJob createRollupJob(RollupJobConfig config, ThreadPool threadPool) { // ensure we only filter for the allowed headers - Map filteredHeaders = ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()); + Map filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders( + threadPool.getThreadContext(), + clusterService.state() + ); return new RollupJob(config, filteredHeaders); } diff --git a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/clear_cache.yml b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/clear_cache.yml index fd0f931d84f0d..907ee4c975877 100644 --- a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/clear_cache.yml +++ b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/clear_cache.yml @@ -14,15 +14,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - field: doc - index: _index: docs - _id: 2 + _id: "2" - field: doc - index: _index: docs - _id: 3 + _id: "3" - field: other - do: diff --git a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/indices_stats.yml b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/indices_stats.yml index 648c6cc9f467f..3ca2f7238beec 100644 --- a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/indices_stats.yml +++ b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/indices_stats.yml @@ -17,15 +17,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - field: foo - index: _index: docs - _id: 2 + _id: "2" - field: bar - index: _index: docs - _id: 3 + _id: "3" - field: baz - do: diff --git a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/pit.yml b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/pit.yml index d2172e49031bc..de7ec7619df62 100644 --- a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/pit.yml +++ b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/pit.yml @@ -14,15 +14,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - field: foo - index: _index: docs - _id: 2 + _id: "2" - field: bar - index: _index: docs - _id: 3 + _id: "3" - field: baz - do: diff --git a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/repository.yml b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/repository.yml index e620301dc3fae..8c43c0682c33b 100644 --- a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/repository.yml +++ b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/repository.yml @@ -14,15 +14,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - field: foo - index: _index: docs - _id: 2 + _id: "2" - field: bar - index: _index: docs - _id: 3 + _id: "3" - field: baz - do: diff --git a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/shared_cache_stats.yml b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/shared_cache_stats.yml index 2d595e8983711..809a45bf938b1 100644 --- a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/shared_cache_stats.yml +++ b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/shared_cache_stats.yml @@ -17,15 +17,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - field: foo - index: _index: docs - _id: 2 + _id: "2" - field: bar - index: _index: docs - _id: 3 + _id: "3" - field: baz - do: diff --git a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/stats.yml b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/stats.yml index 9016644790490..11e5e052b871c 100644 --- a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/stats.yml +++ b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/stats.yml @@ -14,15 +14,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - field: foo - index: _index: docs - _id: 2 + _id: "2" - field: bar - index: _index: docs - _id: 3 + _id: "3" - field: baz - do: diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java index 06ceb9623909b..f29aff38bc253 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java @@ -80,6 +80,7 @@ public class FrozenSearchableSnapshotsIntegTests extends BaseFrozenSearchableSnapshotsIntegTestCase { + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/84158") public void testCreateAndRestorePartialSearchableSnapshot() throws Exception { final String fsRepoName = randomAlphaOfLength(10); final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java index ec3948706c1be..5f735e4c63e05 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java @@ -353,7 +353,7 @@ private static List createWriters(NodeEnvironment nodeEnvironm } /** - * Creates a new {@link CacheIndexWriter} for the specified data path. The is a single instance per data path. + * Creates a new {@link CacheIndexWriter} for the specified data path. There is a single instance per data path. * * @param nodePath the data path * @return a new {@link CacheIndexWriter} instance diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java index 29a05f4607f98..41810789e0646 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java @@ -24,7 +24,6 @@ import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.stream.StreamSupport; /** * This class upgrades frozen indices to apply the index.shard_limit.group=frozen setting after all nodes have been upgraded to 7.13+ @@ -91,7 +90,8 @@ public void onFailure(Exception e) { } static boolean needsUpgrade(ClusterState state) { - return StreamSupport.stream(state.metadata().spliterator(), false) + return state.metadata() + .stream() .filter( imd -> imd.getCompatibilityVersion().onOrAfter(Version.V_7_12_0) && imd.getCompatibilityVersion().before(Version.V_8_0_0) ) @@ -105,7 +105,8 @@ static ClusterState upgradeIndices(ClusterState currentState) { return currentState; } Metadata.Builder builder = Metadata.builder(currentState.metadata()); - StreamSupport.stream(currentState.metadata().spliterator(), false) + currentState.metadata() + .stream() .filter( imd -> imd.getCompatibilityVersion().onOrAfter(Version.V_7_12_0) && imd.getCompatibilityVersion().before(Version.V_8_0_0) ) diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java index 6571f46cb1fd3..5430d039c3eb8 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java @@ -19,8 +19,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; -import java.util.stream.StreamSupport; - import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -71,7 +69,7 @@ public void testUpgradeIndices() { assertThat(upgradedState, not(sameInstance(originalState))); assertThat(upgradedState.metadata().indices().size(), equalTo(originalState.metadata().indices().size())); - assertTrue(StreamSupport.stream(upgradedState.metadata().spliterator(), false).anyMatch(upgraded -> { + assertTrue(upgradedState.metadata().stream().anyMatch(upgraded -> { IndexMetadata original = originalState.metadata().index(upgraded.getIndex()); assertThat(original, notNullValue()); if (upgraded.isPartialSearchableSnapshot() == false diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/build.gradle b/x-pack/plugin/security/qa/operator-privileges-tests/build.gradle index bd401da91c3d1..5772db6d11efb 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/build.gradle +++ b/x-pack/plugin/security/qa/operator-privileges-tests/build.gradle @@ -1,3 +1,5 @@ +import org.elasticsearch.gradle.Version + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-java-rest-test' @@ -36,10 +38,9 @@ testClusters.configureEach { setting 'xpack.security.operator_privileges.enabled', "true" setting 'path.repo', repoDir.absolutePath - systemProperty 'es.user_profile_feature_flag_enabled', 'true' + requiresFeature 'es.user_profile_feature_flag_enabled', Version.fromString("8.1.0") + requiresFeature 'es.rollup_v2_feature_flag_enabled', Version.fromString("8.0.0") user username: "test_admin", password: 'x-pack-test-password', role: "superuser" user username: "test_operator", password: 'x-pack-test-password', role: "limited_operator" - - systemProperty 'es.rollup_v2_feature_flag_enabled', 'true' } diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 1bcfdc99fea29..d535c395baf09 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -95,6 +95,8 @@ public class Constants { "cluster:admin/transform/update", "cluster:admin/transform/upgrade", "cluster:admin/transform/validate", + "indices:internal/transform/checkpoint", + "indices:internal/transform/checkpoint[n]", // "cluster:admin/voting_config/add_exclusions", // "cluster:admin/voting_config/clear_exclusions", "cluster:admin/xpack/ccr/auto_follow_pattern/activate", @@ -197,6 +199,7 @@ public class Constants { "cluster:admin/xpack/security/profile/activate", "cluster:admin/xpack/security/profile/get", "cluster:admin/xpack/security/profile/put/data", + "cluster:admin/xpack/security/profile/search", "cluster:admin/xpack/security/realm/cache/clear", "cluster:admin/xpack/security/role/delete", "cluster:admin/xpack/security/role/get", @@ -242,6 +245,8 @@ public class Constants { "cluster:internal/xpack/ml/model_allocation/delete", "cluster:internal/xpack/ml/model_allocation/update", "cluster:internal/xpack/ml/reset_mode", + "cluster:internal/xpack/ml/trained_models/cache/info", + "cluster:internal/xpack/ml/trained_models/deployments/stats/get", "cluster:internal/xpack/transform/reset_mode", "cluster:monitor/allocation/explain", "cluster:monitor/async_search/status", @@ -273,6 +278,7 @@ public class Constants { "cluster:monitor/xpack/info", "cluster:monitor/xpack/info/aggregate_metric", "cluster:monitor/xpack/info/analytics", + "cluster:monitor/xpack/info/archive", "cluster:monitor/xpack/info/ccr", "cluster:monitor/xpack/info/data_streams", "cluster:monitor/xpack/info/data_tiers", @@ -313,8 +319,8 @@ public class Constants { "cluster:monitor/xpack/ml/job/results/overall_buckets/get", "cluster:monitor/xpack/ml/job/results/records/get", "cluster:monitor/xpack/ml/job/stats/get", + "cluster:monitor/xpack/ml/memory/stats/get", "cluster:monitor/xpack/ml/trained_models/deployment/infer", - "cluster:internal/xpack/ml/trained_models/deployments/stats/get", "cluster:monitor/xpack/repositories_metering/clear_metering_archive", "cluster:monitor/xpack/repositories_metering/get_metrics", "cluster:monitor/xpack/rollup/get", @@ -328,6 +334,7 @@ public class Constants { "cluster:monitor/xpack/usage", "cluster:monitor/xpack/usage/aggregate_metric", "cluster:monitor/xpack/usage/analytics", + "cluster:monitor/xpack/usage/archive", "cluster:monitor/xpack/usage/ccr", "cluster:monitor/xpack/usage/data_streams", "cluster:monitor/xpack/usage/data_tiers", diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/actions/RestGetActionsAction.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/actions/RestGetActionsAction.java index 8dd65407ce81d..76b416bc56c42 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/actions/RestGetActionsAction.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/actions/RestGetActionsAction.java @@ -7,22 +7,13 @@ package org.elasticsearch.xpack.security.operator.actions; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; -import java.lang.reflect.Field; -import java.security.AccessController; -import java.security.PrivilegedAction; import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -37,23 +28,9 @@ public String getName() { return "test_get_actions"; } - @SuppressForbidden(reason = "Use reflection for testing only") - @SuppressWarnings({ "rawtypes", "unchecked" }) @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - final Map actions = AccessController.doPrivileged( - (PrivilegedAction>) () -> { - try { - final Field actionsField = client.getClass().getDeclaredField("actions"); - actionsField.setAccessible(true); - return (Map) actionsField.get(client); - } catch (NoSuchFieldException | IllegalAccessException e) { - throw new ElasticsearchException(e); - } - } - ); - - final List actionNames = actions.keySet().stream().map(ActionType::name).collect(Collectors.toList()); + final List actionNames = client.getActionNames(); return channel -> new RestToXContentListener<>(channel).onResponse( (builder, params) -> builder.startObject().field("actions", actionNames).endObject() ); diff --git a/x-pack/plugin/security/qa/profile/build.gradle b/x-pack/plugin/security/qa/profile/build.gradle index c9105fe7394a4..677c286766972 100644 --- a/x-pack/plugin/security/qa/profile/build.gradle +++ b/x-pack/plugin/security/qa/profile/build.gradle @@ -1,3 +1,5 @@ +import org.elasticsearch.gradle.Version + apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { @@ -19,7 +21,7 @@ testClusters.matching { it.name == 'javaRestTest' }.configureEach { setting 'xpack.security.authc.token.enabled', 'true' setting 'xpack.security.authc.api_key.enabled', 'true' - systemProperty 'es.user_profile_feature_flag_enabled', 'true' + requiresFeature 'es.user_profile_feature_flag_enabled', Version.fromString("8.1.0") user username: "test_admin", password: 'x-pack-test-password' user username: "rac_user", password: 'x-pack-test-password', role: "rac_role" diff --git a/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java b/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java index 9cb692ceecafc..9fd40792a2190 100644 --- a/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java +++ b/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java @@ -24,6 +24,8 @@ import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.hasSize; public class ProfileIT extends ESRestTestCase { @@ -99,6 +101,8 @@ public void testGetProfile() throws IOException { assertOK(adminClient().performRequest(indexRequest)); final Map profileMap1 = doGetProfile(uid); + assertThat(castToMap(profileMap1.get("user")).get("realm_name"), equalTo("realm_name_1")); + assertThat(castToMap(profileMap1.get("user")).get("realm_domain"), equalTo("domainA")); assertThat(castToMap(profileMap1.get("data")), anEmptyMap()); // Retrieve application data along the profile @@ -135,6 +139,27 @@ public void testUpdateProfileData() throws IOException { assertThat(castToMap(profileMap1.get("data")), equalTo(Map.of("app1", Map.of("theme", "default")))); } + public void testSearchProfile() throws IOException { + final Map activateProfileMap = doActivateProfile(); + final String uid = (String) activateProfileMap.get("uid"); + final Request searchProfilesRequest1 = new Request(randomFrom("GET", "POST"), "_security/profile/_search"); + searchProfilesRequest1.setJsonEntity(""" + { + "name": "rac", + "size": 10 + }"""); + final Response searchProfilesResponse1 = adminClient().performRequest(searchProfilesRequest1); + assertOK(searchProfilesResponse1); + final Map searchProfileResponseMap1 = responseAsMap(searchProfilesResponse1); + assertThat(searchProfileResponseMap1, hasKey("took")); + assertThat(searchProfileResponseMap1.get("total"), equalTo(Map.of("value", 1, "relation", "eq"))); + @SuppressWarnings("unchecked") + final List> users = (List>) searchProfileResponseMap1.get("users"); + assertThat(users, hasSize(1)); + assertThat(users.get(0), hasKey("_score")); + assertThat(users.get(0).get("uid"), equalTo(uid)); + } + private Map doActivateProfile() throws IOException { final Request activateProfileRequest = new Request("POST", "_security/profile/_activate"); activateProfileRequest.setJsonEntity(""" diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java index 710edf9fe8a95..4ea421c5fc454 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java @@ -12,21 +12,19 @@ import org.elasticsearch.client.security.CreateTokenRequest; import org.elasticsearch.client.security.CreateTokenResponse; import org.elasticsearch.client.security.DeleteRoleRequest; -import org.elasticsearch.client.security.DeleteUserRequest; import org.elasticsearch.client.security.GetApiKeyRequest; import org.elasticsearch.client.security.GetApiKeyResponse; import org.elasticsearch.client.security.InvalidateApiKeyRequest; import org.elasticsearch.client.security.PutRoleRequest; -import org.elasticsearch.client.security.PutUserRequest; -import org.elasticsearch.client.security.RefreshPolicy; import org.elasticsearch.client.security.support.ApiKey; -import org.elasticsearch.client.security.user.User; import org.elasticsearch.client.security.user.privileges.Role; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Tuple; +import org.elasticsearch.test.TestSecurityClient; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.core.security.user.User; import org.hamcrest.Matchers; import java.io.IOException; @@ -36,6 +34,7 @@ @SuppressWarnings("removal") public abstract class SecurityOnTrialLicenseRestTestCase extends ESRestTestCase { private RestHighLevelClient highLevelAdminClient; + private TestSecurityClient securityClient; @Override protected Settings restAdminSettings() { @@ -49,13 +48,15 @@ protected Settings restClientSettings() { return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); } + protected TestSecurityClient getSecurityClient() { + if (securityClient == null) { + securityClient = new TestSecurityClient(adminClient()); + } + return securityClient; + } + protected void createUser(String username, SecureString password, List roles) throws IOException { - final RestHighLevelClient client = getHighLevelAdminClient(); - client.security() - .putUser( - PutUserRequest.withPassword(new User(username, roles), password.getChars(), true, RefreshPolicy.WAIT_UNTIL), - RequestOptions.DEFAULT - ); + getSecurityClient().putUser(new User(username, roles.toArray(String[]::new)), password); } protected void createRole(String name, Collection clusterPrivileges) throws IOException { @@ -75,8 +76,7 @@ protected Tuple createOAuthToken(String username, SecureString p } protected void deleteUser(String username) throws IOException { - final RestHighLevelClient client = getHighLevelAdminClient(); - client.security().deleteUser(new DeleteUserRequest(username), RequestOptions.DEFAULT); + getSecurityClient().deleteUser(username); } protected void deleteRole(String name) throws IOException { diff --git a/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java b/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java index b08952b78e40f..36bf585bdb404 100644 --- a/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java +++ b/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java @@ -144,6 +144,42 @@ public class ServiceAccountIT extends ESRestTestCase { } }"""; + private static final String ELASTIC_ENTERPRISE_SEARCH_SERVER_ROLE_DESCRIPTOR = """ + { + "cluster": [ + "manage", + "manage_security" + ], + "indices": [ + { + "names": [ + ".ent-search-*", + ".monitoring-ent-search-*", + "metricbeat-ent-search-*", + "enterprise-search-*", + "logs-app_search.analytics-default", + "logs-enterprise_search.api-default", + "logs-app_search.search_relevance_suggestions-default", + "logs-crawler-default", + "logs-workplace_search.analytics-default", + "logs-workplace_search.content_events-default" + ], + "privileges": [ + "manage", + "read", + "write" + ], + "allow_restricted_indices": false + } + ], + "applications": [], + "run_as": [], + "metadata": {}, + "transient_metadata": { + "enabled": true + } + }"""; + @BeforeClass public static void init() throws URISyntaxException, FileNotFoundException { URL resource = ServiceAccountIT.class.getResource("/ssl/ca.crt"); @@ -199,6 +235,19 @@ public void testGetServiceAccount() throws IOException { ) ); + final Request getServiceAccountRequestEnterpriseSearchService = new Request( + "GET", + "_security/service/elastic/enterprise-search-server" + ); + final Response getServiceAccountResponseEnterpriseSearchService = client().performRequest( + getServiceAccountRequestEnterpriseSearchService + ); + assertServiceAccountRoleDescriptor( + getServiceAccountResponseEnterpriseSearchService, + "elastic/enterprise-search-server", + ELASTIC_ENTERPRISE_SEARCH_SERVER_ROLE_DESCRIPTOR + ); + final String requestPath = "_security/service/" + randomFrom("foo", "elastic/foo", "foo/bar"); final Request getServiceAccountRequest4 = new Request("GET", requestPath); final Response getServiceAccountResponse4 = client().performRequest(getServiceAccountRequest4); diff --git a/x-pack/plugin/security/qa/smoke-test-all-realms/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/SecurityRealmSmokeTestCase.java b/x-pack/plugin/security/qa/smoke-test-all-realms/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/SecurityRealmSmokeTestCase.java index bdccd10db561c..4562418c66533 100644 --- a/x-pack/plugin/security/qa/smoke-test-all-realms/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/SecurityRealmSmokeTestCase.java +++ b/x-pack/plugin/security/qa/smoke-test-all-realms/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/SecurityRealmSmokeTestCase.java @@ -11,21 +11,19 @@ import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.client.security.ChangePasswordRequest; import org.elasticsearch.client.security.DeleteRoleRequest; -import org.elasticsearch.client.security.DeleteUserRequest; import org.elasticsearch.client.security.PutRoleRequest; -import org.elasticsearch.client.security.PutUserRequest; import org.elasticsearch.client.security.RefreshPolicy; -import org.elasticsearch.client.security.user.User; import org.elasticsearch.client.security.user.privileges.Role; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.PathUtils; +import org.elasticsearch.test.TestSecurityClient; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.core.security.authc.Authentication.AuthenticationType; +import org.elasticsearch.xpack.core.security.user.User; import org.junit.BeforeClass; import java.io.FileNotFoundException; @@ -48,6 +46,7 @@ public abstract class SecurityRealmSmokeTestCase extends ESRestTestCase { private static Path httpCAPath; private RestHighLevelClient highLevelAdminClient; + private TestSecurityClient securityClient; @BeforeClass public static void findHttpCertificateAuthority() throws Exception { @@ -111,18 +110,11 @@ protected void assertNoApiKeyInfo(Map authenticateResponse, Auth } protected void createUser(String username, SecureString password, List roles) throws IOException { - final RestHighLevelClient client = getHighLevelAdminClient(); - client.security() - .putUser( - PutUserRequest.withPassword(new User(username, roles), password.getChars(), true, RefreshPolicy.WAIT_UNTIL), - RequestOptions.DEFAULT - ); + getSecurityClient().putUser(new User(username, roles.toArray(String[]::new)), password); } protected void changePassword(String username, SecureString password) throws IOException { - final RestHighLevelClient client = getHighLevelAdminClient(); - client.security() - .changePassword(new ChangePasswordRequest(username, password.getChars(), RefreshPolicy.WAIT_UNTIL), RequestOptions.DEFAULT); + getSecurityClient().changePassword(username, password); } protected void createRole(String name, Collection clusterPrivileges) throws IOException { @@ -132,8 +124,7 @@ protected void createRole(String name, Collection clusterPrivileges) thr } protected void deleteUser(String username) throws IOException { - final RestHighLevelClient client = getHighLevelAdminClient(); - client.security().deleteUser(new DeleteUserRequest(username), RequestOptions.DEFAULT); + getSecurityClient().deleteUser(username); } protected void deleteRole(String name) throws IOException { @@ -148,4 +139,11 @@ private RestHighLevelClient getHighLevelAdminClient() { } return highLevelAdminClient; } + + protected TestSecurityClient getSecurityClient() { + if (securityClient == null) { + securityClient = new TestSecurityClient(adminClient()); + } + return securityClient; + } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java index 717173154d496..7e5d4f3bddaff 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java @@ -21,25 +21,22 @@ import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.client.security.PutUserRequest; -import org.elasticsearch.client.security.RefreshPolicy; -import org.elasticsearch.client.security.user.User; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.test.SecuritySettingsSourceField; +import org.elasticsearch.test.TestSecurityClient; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; +import org.elasticsearch.xpack.core.security.user.User; import org.junit.After; import org.junit.Before; import java.util.Collections; -import java.util.List; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.test.SecuritySettingsSource.SECURITY_REQUEST_OPTIONS; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; @@ -58,16 +55,10 @@ public class MultipleIndicesPermissionsTests extends SecurityIntegTestCase { @Before public void waitForSecurityIndexWritable() throws Exception { // adds a dummy user to the native realm to force .security index creation - new TestRestHighLevelClient().security() - .putUser( - PutUserRequest.withPassword( - new User("dummy_user", List.of("missing_role")), - "password".toCharArray(), - true, - RefreshPolicy.IMMEDIATE - ), - SECURITY_REQUEST_OPTIONS - ); + new TestSecurityClient(getRestClient(), SecuritySettingsSource.SECURITY_REQUEST_OPTIONS).putUser( + new User("dummy_user", "missing_role"), + SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING + ); assertSecurityIndexActive(); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java index 2ba3035e4993e..5e1c432750333 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.security.authc; import org.elasticsearch.ElasticsearchSecurityException; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; @@ -24,10 +23,9 @@ import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.client.security.AuthenticateResponse; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; @@ -37,6 +35,8 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; +import org.elasticsearch.test.TestSecurityClient; +import org.elasticsearch.test.rest.yaml.ObjectPath; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.action.ClearSecurityCacheAction; @@ -55,7 +55,9 @@ import org.elasticsearch.xpack.core.security.action.user.PutUserAction; import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; import org.elasticsearch.xpack.core.security.action.user.PutUserResponse; +import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.transport.filter.IPFilter; import org.junit.After; import org.junit.Before; @@ -70,6 +72,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; @@ -206,20 +209,18 @@ public void testCreateApiKey() throws Exception { assertThat(simple.getId(), not(containsString(new String(simple.getKey().getChars())))); assertNull(simple.getExpiration()); - // use the first ApiKey for authorized action - final String base64ApiKeyKeyValue = Base64.getEncoder() - .encodeToString((response.getId() + ":" + response.getKey().toString()).getBytes(StandardCharsets.UTF_8)); // Assert that we can authenticate with the API KEY - final RestHighLevelClient restClient = new TestRestHighLevelClient(); - AuthenticateResponse authResponse = restClient.security() - .authenticate(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "ApiKey " + base64ApiKeyKeyValue).build()); - assertThat(authResponse.getUser().getUsername(), equalTo(ES_TEST_ROOT_USER)); - assertThat(authResponse.getAuthenticationType(), equalTo("api_key")); + final Map authResponse = authenticateWithApiKey(response.getId(), response.getKey()); + assertThat(authResponse.get(User.Fields.USERNAME.getPreferredName()), equalTo(ES_TEST_ROOT_USER)); // use the first ApiKey for an unauthorized action + final Map authorizationHeaders = Collections.singletonMap( + "Authorization", + "ApiKey " + getBase64EncodedApiKeyValue(response.getId(), response.getKey()) + ); ElasticsearchSecurityException e = expectThrows( ElasticsearchSecurityException.class, - () -> client().filterWithHeader(Collections.singletonMap("Authorization", "ApiKey " + base64ApiKeyKeyValue)) + () -> client().filterWithHeader(authorizationHeaders) .admin() .cluster() .prepareUpdateSettings() @@ -373,15 +374,12 @@ public void testInvalidateApiKeyWillClearApiKeyCache() throws IOException, Execu } // Authentication with the first key should fail - final String base64ApiKeyKeyValue = Base64.getEncoder() - .encodeToString((apiKey1.v1() + ":" + apiKey1.v2()).getBytes(StandardCharsets.UTF_8)); - ElasticsearchStatusException e = expectThrows( - ElasticsearchStatusException.class, - () -> new TestRestHighLevelClient().security() - .authenticate(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "ApiKey " + base64ApiKeyKeyValue).build()) + ResponseException e = expectThrows( + ResponseException.class, + () -> authenticateWithApiKey(apiKey1.v1(), new SecureString(apiKey1.v2().toCharArray())) ); assertThat(e.getMessage(), containsString("security_exception")); - assertThat(e.status(), is(RestStatus.UNAUTHORIZED)); + assertThat(e.getResponse().getStatusLine().getStatusCode(), is(RestStatus.UNAUTHORIZED.getStatus())); } private void verifyInvalidateResponse( @@ -1400,14 +1398,33 @@ private Tuple createApiKeyAndAuthenticateWithIt() throws IOExcep .setMetadata(ApiKeyTests.randomMetadata()) .get(); final String docId = createApiKeyResponse.getId(); - final String base64ApiKeyKeyValue = Base64.getEncoder() - .encodeToString((docId + ":" + createApiKeyResponse.getKey().toString()).getBytes(StandardCharsets.UTF_8)); - AuthenticateResponse authResponse = new TestRestHighLevelClient().security() - .authenticate(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "ApiKey " + base64ApiKeyKeyValue).build()); - assertEquals("api_key", authResponse.getAuthenticationType()); + authenticateWithApiKey(docId, createApiKeyResponse.getKey()); return Tuple.tuple(docId, createApiKeyResponse.getKey().toString()); } + private Map authenticateWithApiKey(String id, SecureString key) throws IOException { + final RequestOptions requestOptions = RequestOptions.DEFAULT.toBuilder() + .addHeader("Authorization", "ApiKey " + getBase64EncodedApiKeyValue(id, key)) + .build(); + final TestSecurityClient securityClient = getSecurityClient(requestOptions); + final Map response = securityClient.authenticate(); + + final String authenticationTypeString = String.valueOf(response.get(User.Fields.AUTHENTICATION_TYPE.getPreferredName())); + final Authentication.AuthenticationType authenticationType = Authentication.AuthenticationType.valueOf( + authenticationTypeString.toUpperCase(Locale.ROOT) + ); + assertThat(authenticationType, is(Authentication.AuthenticationType.API_KEY)); + + assertThat(ObjectPath.evaluate(response, "api_key.id"), is(id)); + + return response; + } + + private String getBase64EncodedApiKeyValue(String id, SecureString key) { + final String base64ApiKeyKeyValue = Base64.getEncoder().encodeToString((id + ":" + key).getBytes(StandardCharsets.UTF_8)); + return base64ApiKeyKeyValue; + } + private void assertApiKeyNotCreated(Client client, String keyName) throws ExecutionException, InterruptedException { new RefreshRequestBuilder(client, RefreshAction.INSTANCE).setIndices(SECURITY_MAIN_ALIAS).execute().get(); assertEquals( diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java index 5a6c01a0a99b7..97bec81cf5ef7 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java @@ -19,8 +19,8 @@ import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.client.security.AuthenticateResponse; import org.elasticsearch.client.security.CreateTokenRequest; import org.elasticsearch.client.security.CreateTokenResponse; import org.elasticsearch.client.security.InvalidateTokenRequest; @@ -34,10 +34,12 @@ import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.test.SecuritySettingsSourceField; +import org.elasticsearch.test.TestSecurityClient; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.authc.TokenMetadata; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames; +import org.elasticsearch.xpack.core.security.user.User; import org.junit.After; import org.junit.Before; @@ -58,6 +60,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasItem; @SuppressWarnings("removal") @@ -749,10 +752,10 @@ public void testCreateThenRefreshAsDifferentUser() throws IOException { assertNotEquals(refreshResponse.getAccessToken(), createTokenResponse.getAccessToken()); assertNotEquals(refreshResponse.getRefreshToken(), createTokenResponse.getRefreshToken()); - AuthenticateResponse response = restClient.security().authenticate(superuserOptions); + final Map authenticateResponse = getSecurityClient(superuserOptions).authenticate(); - assertEquals(SecuritySettingsSource.ES_TEST_ROOT_USER, response.getUser().getUsername()); - assertEquals("realm", response.getAuthenticationType()); + assertThat(authenticateResponse, hasEntry(User.Fields.USERNAME.getPreferredName(), SecuritySettingsSource.ES_TEST_ROOT_USER)); + assertThat(authenticateResponse, hasEntry(User.Fields.AUTHENTICATION_TYPE.getPreferredName(), "realm")); assertAuthenticateWithToken(createTokenResponse.getAccessToken(), SecuritySettingsSource.TEST_USER_NAME); assertAuthenticateWithToken(refreshResponse.getAccessToken(), SecuritySettingsSource.TEST_USER_NAME); @@ -838,31 +841,28 @@ private String generateInvalidShortAccessToken(Version version) throws Exception } private void assertAuthenticateWithToken(String accessToken, String expectedUser) throws IOException { - final RestHighLevelClient restClient = new TestRestHighLevelClient(); - AuthenticateResponse authResponse = restClient.security() - .authenticate(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "Bearer " + accessToken).build()); - assertThat(authResponse.getUser().getUsername(), equalTo(expectedUser)); - assertThat(authResponse.getAuthenticationType(), equalTo("token")); + final TestSecurityClient securityClient = getSecurityClient(accessToken); + final Map authResponse = securityClient.authenticate(); + assertThat(authResponse, hasEntry(User.Fields.USERNAME.getPreferredName(), expectedUser)); + assertThat(authResponse, hasEntry(User.Fields.AUTHENTICATION_TYPE.getPreferredName(), "token")); } private void assertUnauthorizedToken(String accessToken) { - final RestHighLevelClient restClient = new TestRestHighLevelClient(); - ElasticsearchStatusException e = expectThrows( - ElasticsearchStatusException.class, - () -> restClient.security() - .authenticate(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "Bearer " + accessToken).build()) - ); - assertThat(e.status(), equalTo(RestStatus.UNAUTHORIZED)); + final TestSecurityClient securityClient = getSecurityClient(accessToken); + ResponseException e = expectThrows(ResponseException.class, securityClient::authenticate); + assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.UNAUTHORIZED.getStatus())); + } + + private TestSecurityClient getSecurityClient(String accessToken) { + return getSecurityClient(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "Bearer " + accessToken).build()); } private RestStatus getAuthenticationResponseCode(String accessToken) throws IOException { - final RestHighLevelClient restClient = new TestRestHighLevelClient(); try { - restClient.security() - .authenticate(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "Bearer " + accessToken).build()); + getSecurityClient(accessToken).authenticate(); return RestStatus.OK; - } catch (ElasticsearchStatusException esse) { - return esse.status(); + } catch (ResponseException esse) { + return RestStatus.fromCode(esse.getResponse().getStatusLine().getStatusCode()); } } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java index 7b503538b24f4..f73153813877e 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java @@ -9,10 +9,6 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.client.security.ChangePasswordRequest; -import org.elasticsearch.client.security.DisableUserRequest; -import org.elasticsearch.client.security.EnableUserRequest; -import org.elasticsearch.client.security.RefreshPolicy; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.NativeRealmIntegTestCase; @@ -31,7 +27,6 @@ import java.util.List; import static java.util.Collections.singletonMap; -import static org.elasticsearch.test.SecuritySettingsSource.SECURITY_REQUEST_OPTIONS; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; @@ -87,7 +82,6 @@ public void testAuthenticate() { * the reserved realm. */ public void testAuthenticateAfterEnablingUser() throws IOException { - final RestHighLevelClient restClient = new TestRestHighLevelClient(); final List usernames = Arrays.asList( ElasticUser.NAME, KibanaUser.NAME, @@ -98,7 +92,8 @@ public void testAuthenticateAfterEnablingUser() throws IOException { RemoteMonitoringUser.NAME ); for (String username : usernames) { - restClient.security().enableUser(new EnableUserRequest(username, RefreshPolicy.getDefault()), SECURITY_REQUEST_OPTIONS); + getSecurityClient().setUserEnabled(username, true); + ClusterHealthResponse response = client().filterWithHeader( singletonMap("Authorization", basicAuthHeaderValue(username, getReservedPassword())) ).admin().cluster().prepareHealth().get(); @@ -126,13 +121,7 @@ public void testChangingPassword() throws IOException { assertThat(response.getClusterName(), is(cluster().getClusterName())); } - final RestHighLevelClient restClient = new TestRestHighLevelClient(); - final boolean changed = restClient.security() - .changePassword( - new ChangePasswordRequest(username, Arrays.copyOf(newPassword, newPassword.length), RefreshPolicy.IMMEDIATE), - SECURITY_REQUEST_OPTIONS - ); - assertTrue(changed); + getSecurityClient().changePassword(username, new SecureString(Arrays.copyOf(newPassword, newPassword.length))); ElasticsearchSecurityException elasticsearchSecurityException = expectThrows( ElasticsearchSecurityException.class, @@ -159,9 +148,7 @@ public void testDisablingUser() throws Exception { assertThat(response.getClusterName(), is(cluster().getClusterName())); // disable user - final boolean disabled = restClient.security() - .disableUser(new DisableUserRequest(ElasticUser.NAME, RefreshPolicy.getDefault()), SECURITY_REQUEST_OPTIONS); - assertTrue(disabled); + getSecurityClient().setUserEnabled(ElasticUser.NAME, false); ElasticsearchSecurityException elasticsearchSecurityException = expectThrows( ElasticsearchSecurityException.class, () -> client().filterWithHeader(singletonMap("Authorization", basicAuthHeaderValue(ElasticUser.NAME, getReservedPassword()))) @@ -173,9 +160,7 @@ public void testDisablingUser() throws Exception { assertThat(elasticsearchSecurityException.getMessage(), containsString("authenticate")); // enable - final boolean enabled = restClient.security() - .enableUser(new EnableUserRequest(ElasticUser.NAME, RefreshPolicy.getDefault()), SECURITY_REQUEST_OPTIONS); - assertTrue(enabled); + getSecurityClient().setUserEnabled(ElasticUser.NAME, true); response = client().filterWithHeader(singletonMap("Authorization", basicAuthHeaderValue(ElasticUser.NAME, getReservedPassword()))) .admin() .cluster() diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthDelegationIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthDelegationIntegTests.java index f0749679888e5..0a94a43b92895 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthDelegationIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthDelegationIntegTests.java @@ -9,10 +9,9 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.security.AuthenticateResponse; -import org.elasticsearch.client.security.AuthenticateResponse.RealmInfo; import org.elasticsearch.client.security.DelegatePkiAuthenticationRequest; import org.elasticsearch.client.security.DelegatePkiAuthenticationResponse; import org.elasticsearch.client.security.DeleteRoleMappingRequest; @@ -21,15 +20,20 @@ import org.elasticsearch.client.security.PutRoleMappingRequest; import org.elasticsearch.client.security.RefreshPolicy; import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression; -import org.elasticsearch.client.security.user.User; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.test.SecuritySettingsSourceField; +import org.elasticsearch.test.TestSecurityClient; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xcontent.ObjectPath; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheRequestBuilder; import org.elasticsearch.xpack.core.security.authc.support.Hasher; +import org.elasticsearch.xpack.core.security.user.User.Fields; import org.junit.Before; import java.io.InputStream; @@ -39,10 +43,14 @@ import java.security.cert.X509Certificate; import java.util.Arrays; import java.util.Collections; +import java.util.List; +import java.util.Map; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.emptyCollectionOf; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; @@ -179,15 +187,16 @@ public void testDelegateThenAuthenticate() throws Exception { // authenticate optionsBuilder = RequestOptions.DEFAULT.toBuilder(); optionsBuilder.addHeader("Authorization", "Bearer " + token); - AuthenticateResponse resp = restClient.security().authenticate(optionsBuilder.build()); - User user = resp.getUser(); - assertThat(user, is(notNullValue())); - assertThat(user.getUsername(), is("Elasticsearch Test Client")); - RealmInfo authnRealm = resp.getAuthenticationRealm(); - assertThat(authnRealm, is(notNullValue())); - assertThat(authnRealm.getName(), is("pki3")); - assertThat(authnRealm.getType(), is("pki")); - assertThat(resp.getAuthenticationType(), is("token")); + + final TestSecurityClient securityClient = getSecurityClient(optionsBuilder.build()); + final Map authenticateResponse = securityClient.authenticate(); + assertThat(authenticateResponse, hasEntry(Fields.USERNAME.getPreferredName(), "Elasticsearch Test Client")); + + Map realm = assertMap(authenticateResponse, Fields.AUTHENTICATION_REALM); + assertThat(realm, hasEntry(Fields.REALM_NAME.getPreferredName(), "pki3")); + assertThat(realm, hasEntry(Fields.REALM_TYPE.getPreferredName(), "pki")); + + assertThat(authenticateResponse, hasEntry(Fields.AUTHENTICATION_TYPE.getPreferredName(), "token")); } } } @@ -220,23 +229,25 @@ public void testTokenInvalidate() throws Exception { // authenticate optionsBuilder = RequestOptions.DEFAULT.toBuilder(); optionsBuilder.addHeader("Authorization", "Bearer " + token); - AuthenticateResponse resp = restClient.security().authenticate(optionsBuilder.build()); - User user = resp.getUser(); - assertThat(user, is(notNullValue())); - assertThat(user.getUsername(), is("Elasticsearch Test Client")); - assertThat(user.getMetadata().get("pki_dn"), is(notNullValue())); - assertThat(user.getMetadata().get("pki_dn"), is("O=org, OU=Elasticsearch, CN=Elasticsearch Test Client")); - assertThat(user.getMetadata().get("pki_delegated_by_user"), is(notNullValue())); - assertThat(user.getMetadata().get("pki_delegated_by_user"), is(delegateeUsername)); - assertThat(user.getMetadata().get("pki_delegated_by_realm"), is(notNullValue())); - assertThat(user.getMetadata().get("pki_delegated_by_realm"), is("file")); + final TestSecurityClient securityClient = getSecurityClient(optionsBuilder.build()); + final Map authenticateResponse = securityClient.authenticate(); + assertThat(authenticateResponse, hasEntry(Fields.USERNAME.getPreferredName(), "Elasticsearch Test Client")); + + final Map metadata = assertMap(authenticateResponse, Fields.METADATA); + assertThat(metadata, hasEntry("pki_dn", "O=org, OU=Elasticsearch, CN=Elasticsearch Test Client")); + assertThat(metadata, hasEntry("pki_delegated_by_user", delegateeUsername)); + assertThat(metadata, hasEntry("pki_delegated_by_realm", "file")); + // no roles because no role mappings - assertThat(user.getRoles(), is(emptyCollectionOf(String.class))); - RealmInfo authnRealm = resp.getAuthenticationRealm(); - assertThat(authnRealm, is(notNullValue())); - assertThat(authnRealm.getName(), is("pki3")); - assertThat(authnRealm.getType(), is("pki")); - assertThat(resp.getAuthenticationType(), is("token")); + List roles = assertList(authenticateResponse, Fields.ROLES); + assertThat(roles, empty()); + + Map realm = assertMap(authenticateResponse, Fields.AUTHENTICATION_REALM); + assertThat(realm, hasEntry(Fields.REALM_NAME.getPreferredName(), "pki3")); + assertThat(realm, hasEntry(Fields.REALM_TYPE.getPreferredName(), "pki")); + + assertThat(authenticateResponse, hasEntry(Fields.AUTHENTICATION_TYPE.getPreferredName(), "token")); + // invalidate InvalidateTokenRequest invalidateRequest = InvalidateTokenRequest.accessToken(token); optionsBuilder = RequestOptions.DEFAULT.toBuilder(); @@ -248,12 +259,19 @@ public void testTokenInvalidate() throws Exception { assertThat(invalidateResponse.getInvalidatedTokens(), is(1)); assertThat(invalidateResponse.getErrorsCount(), is(0)); // failed authenticate - ElasticsearchStatusException e1 = expectThrows( - ElasticsearchStatusException.class, - () -> restClient.security() - .authenticate(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "Bearer " + token).build()) + ResponseException ex = expectThrows( + ResponseException.class, + () -> new TestSecurityClient( + getRestClient(), + RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "Bearer " + token).build() + ).authenticate() ); - assertThat(e1.getMessage(), is("Elasticsearch exception [type=security_exception, reason=token expired]")); + + assertThat(ex.getResponse().getStatusLine().getStatusCode(), is(RestStatus.UNAUTHORIZED.getStatus())); + + final Map response = ESRestTestCase.entityAsMap(ex.getResponse()); + assertThat(ObjectPath.eval("error.type", response), is("security_exception")); + assertThat(ObjectPath.eval("error.reason", response), is("token expired")); } } @@ -336,26 +354,27 @@ public void testDelegatePkiWithRoleMapping() throws Exception { DelegatePkiAuthenticationResponse delegatePkiResponse = restClient.security() .delegatePkiAuthentication(delegatePkiRequest, testUserOptions); // authenticate - AuthenticateResponse resp = restClient.security() - .authenticate( - RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "Bearer " + delegatePkiResponse.getAccessToken()).build() - ); - User user = resp.getUser(); - assertThat(user, is(notNullValue())); - assertThat(user.getUsername(), is("Elasticsearch Test Client")); - assertThat(user.getMetadata().get("pki_dn"), is(notNullValue())); - assertThat(user.getMetadata().get("pki_dn"), is("O=org, OU=Elasticsearch, CN=Elasticsearch Test Client")); - assertThat(user.getMetadata().get("pki_delegated_by_user"), is(notNullValue())); - assertThat(user.getMetadata().get("pki_delegated_by_user"), is("test_user")); - assertThat(user.getMetadata().get("pki_delegated_by_realm"), is(notNullValue())); - assertThat(user.getMetadata().get("pki_delegated_by_realm"), is("file")); + TestSecurityClient securityClient = getSecurityClient( + RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "Bearer " + delegatePkiResponse.getAccessToken()).build() + ); + final Map authenticateResponse = securityClient.authenticate(); + assertThat(authenticateResponse, hasEntry(Fields.USERNAME.getPreferredName(), "Elasticsearch Test Client")); + + final Map metadata = assertMap(authenticateResponse, Fields.METADATA); + assertThat(metadata, hasEntry("pki_dn", "O=org, OU=Elasticsearch, CN=Elasticsearch Test Client")); + assertThat(metadata, hasEntry("pki_delegated_by_user", "test_user")); + assertThat(metadata, hasEntry("pki_delegated_by_realm", "file")); + // assert roles - assertThat(user.getRoles(), containsInAnyOrder("role_by_delegated_user", "role_by_delegated_realm")); - RealmInfo authnRealm = resp.getAuthenticationRealm(); - assertThat(authnRealm, is(notNullValue())); - assertThat(authnRealm.getName(), is("pki3")); - assertThat(authnRealm.getType(), is("pki")); - assertThat(resp.getAuthenticationType(), is("token")); + List roles = assertList(authenticateResponse, Fields.ROLES); + assertThat(roles, containsInAnyOrder("role_by_delegated_user", "role_by_delegated_realm")); + + Map realm = assertMap(authenticateResponse, Fields.AUTHENTICATION_REALM); + assertThat(realm, hasEntry(Fields.REALM_NAME.getPreferredName(), "pki3")); + assertThat(realm, hasEntry(Fields.REALM_TYPE.getPreferredName(), "pki")); + + assertThat(authenticateResponse, hasEntry(Fields.AUTHENTICATION_TYPE.getPreferredName(), "token")); + // delete role mappings for delegated PKI restClient.security() .deleteRoleMapping(new DeleteRoleMappingRequest("role_by_delegated_user", RefreshPolicy.IMMEDIATE), testUserOptions); @@ -364,6 +383,13 @@ public void testDelegatePkiWithRoleMapping() throws Exception { } } + private Object evaluate(Map map, ParseField... fields) { + for (int i = 0; i < fields.length - 1; i++) { + map = assertMap(map, fields[i]); + } + return map.get(fields[fields.length - 1]); + } + public void testIncorrectCertChain() throws Exception { X509Certificate clientCertificate = readCertForPkiDelegation("testClient.crt"); X509Certificate intermediateCA = readCertForPkiDelegation("testIntermediateCA.crt"); @@ -417,4 +443,16 @@ private X509Certificate readCertForPkiDelegation(String certName) throws Excepti } } + @SuppressWarnings("unchecked") + private Map assertMap(Map map, ParseField field) { + final Object val = map.get(field.getPreferredName()); + assertThat("Field " + field + " of " + map, val, instanceOf(Map.class)); + return (Map) val; + } + + private List assertList(Map map, ParseField field) { + final Object val = map.get(field.getPreferredName()); + assertThat("Field " + field + " of " + map, val, instanceOf(List.class)); + return (List) val; + } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SnapshotUserRoleIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SnapshotUserRoleIntegTests.java index 5fb12ce51946c..b924e0757dc40 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SnapshotUserRoleIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SnapshotUserRoleIntegTests.java @@ -17,10 +17,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.security.DeleteRoleRequest; import org.elasticsearch.client.security.PutRoleRequest; -import org.elasticsearch.client.security.PutUserRequest; -import org.elasticsearch.client.security.PutUserResponse; import org.elasticsearch.client.security.RefreshPolicy; -import org.elasticsearch.client.security.user.User; import org.elasticsearch.client.security.user.privileges.Role; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; @@ -31,7 +28,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collections; -import java.util.List; import java.util.Locale; import static org.elasticsearch.test.SecuritySettingsSource.SECURITY_REQUEST_OPTIONS; @@ -72,12 +68,7 @@ public void setupClusterBeforeSnapshot() throws IOException { final char[] password = new char[] { 'p', 'a', 's', 's', 'w', 'o', 'r', 'd' }; final String snapshotUserToken = basicAuthHeaderValue(user, new SecureString(password)); client = client().filterWithHeader(Collections.singletonMap("Authorization", snapshotUserToken)); - PutUserResponse response = new TestRestHighLevelClient().security() - .putUser( - PutUserRequest.withPassword(new User(user, List.of("snapshot_user")), password, true, RefreshPolicy.IMMEDIATE), - SECURITY_REQUEST_OPTIONS - ); - assertTrue(response.isCreated()); + getSecurityClient().putUser(new org.elasticsearch.xpack.core.security.user.User(user, "snapshot_user"), new SecureString(password)); ensureGreen(INTERNAL_SECURITY_MAIN_INDEX_7); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/AbstractProfileSingleNodeTestCase.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/AbstractProfileSingleNodeTestCase.java new file mode 100644 index 0000000000000..bb5a873cf6a0e --- /dev/null +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/AbstractProfileSingleNodeTestCase.java @@ -0,0 +1,120 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.profile; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.test.SecuritySingleNodeTestCase; +import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileAction; +import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileRequest; +import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileResponse; +import org.elasticsearch.xpack.core.security.action.profile.GetProfileAction; +import org.elasticsearch.xpack.core.security.action.profile.GetProfileRequest; +import org.elasticsearch.xpack.core.security.action.profile.GetProfilesResponse; +import org.elasticsearch.xpack.core.security.action.profile.Profile; +import org.elasticsearch.xpack.core.security.action.token.CreateTokenAction; +import org.elasticsearch.xpack.core.security.action.token.CreateTokenRequest; +import org.elasticsearch.xpack.core.security.action.token.CreateTokenResponse; +import org.elasticsearch.xpack.core.security.action.user.PutUserAction; +import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Set; + +import static org.elasticsearch.test.SecuritySettingsSource.TEST_PASSWORD_HASHED; +import static org.hamcrest.Matchers.anEmptyMap; +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +public abstract class AbstractProfileSingleNodeTestCase extends SecuritySingleNodeTestCase { + + protected static final String RAC_USER_NAME = "rac_user"; + protected static final String RAC_ROLE = "rac_role"; + protected static final SecureString NATIVE_RAC_USER_PASSWORD = new SecureString("native_rac_user_password".toCharArray()); + + // Needed for testing in IDE + @SuppressForbidden(reason = "sets the feature flag") + @BeforeClass + public static void enableFeature() { + AccessController.doPrivileged((PrivilegedAction) () -> System.setProperty("es.user_profile_feature_flag_enabled", "true")); + } + + @Override + protected Settings nodeSettings() { + final Settings.Builder builder = Settings.builder().put(super.nodeSettings()); + builder.put("xpack.security.authc.token.enabled", "true"); + return builder.build(); + } + + @Before + public void createNativeUsers() { + final PutUserRequest putUserRequest1 = new PutUserRequest(); + putUserRequest1.username(RAC_USER_NAME); + putUserRequest1.roles(RAC_ROLE); + final String nativeRacUserPasswordHash = new String(getFastStoredHashAlgoForTests().hash(NATIVE_RAC_USER_PASSWORD)); + putUserRequest1.passwordHash(nativeRacUserPasswordHash.toCharArray()); + putUserRequest1.email(RAC_USER_NAME + "@example.com"); + assertThat(client().execute(PutUserAction.INSTANCE, putUserRequest1).actionGet().created(), is(true)); + } + + @Override + protected String configUsers() { + return super.configUsers() + RAC_USER_NAME + ":" + TEST_PASSWORD_HASHED + "\n"; + } + + @Override + protected String configRoles() { + return super.configRoles() + "\n" + RAC_ROLE + ":\n" + " cluster:\n" + " - 'manage_own_api_key'\n" + " - 'monitor'\n"; + } + + @Override + protected String configUsersRoles() { + return super.configUsersRoles() + RAC_ROLE + ":" + RAC_USER_NAME + "\n"; + } + + protected Profile doActivateProfile(String username, SecureString password) { + // User and its access token should be associated to the same profile + return doActivateProfile(username, password, randomBoolean()); + } + + protected Profile doActivateProfile(String username, SecureString password, boolean useToken) { + final ActivateProfileRequest activateProfileRequest = new ActivateProfileRequest(); + if (useToken) { + final CreateTokenRequest createTokenRequest = new CreateTokenRequest("password", username, password.clone(), null, null, null); + final CreateTokenResponse createTokenResponse = client().execute(CreateTokenAction.INSTANCE, createTokenRequest).actionGet(); + activateProfileRequest.getGrant().setType("access_token"); + activateProfileRequest.getGrant().setAccessToken(new SecureString(createTokenResponse.getTokenString().toCharArray())); + } else { + activateProfileRequest.getGrant().setType("password"); + activateProfileRequest.getGrant().setUsername(username); + // clone the secureString because activate action closes it afterwards + activateProfileRequest.getGrant().setPassword(password.clone()); + } + + final ActivateProfileResponse activateProfileResponse = client().execute(ActivateProfileAction.INSTANCE, activateProfileRequest) + .actionGet(); + final Profile profile = activateProfileResponse.getProfile(); + assertThat(profile, notNullValue()); + assertThat(profile.user().username(), equalTo(username)); + assertThat(profile.applicationData(), anEmptyMap()); + return profile; + } + + protected Profile getProfile(String uid, Set dataKeys) { + final GetProfilesResponse getProfilesResponse = client().execute(GetProfileAction.INSTANCE, new GetProfileRequest(uid, dataKeys)) + .actionGet(); + assertThat(getProfilesResponse.getProfiles(), arrayWithSize(1)); + return getProfilesResponse.getProfiles()[0]; + } +} diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileDomainSingleNodeTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileDomainSingleNodeTests.java new file mode 100644 index 0000000000000..2f939487f662e --- /dev/null +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileDomainSingleNodeTests.java @@ -0,0 +1,225 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.profile; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.security.action.profile.Profile; +import org.elasticsearch.xpack.core.security.action.user.PutUserAction; +import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.AuthenticationContext; +import org.elasticsearch.xpack.core.security.authc.RealmConfig; +import org.elasticsearch.xpack.core.security.authc.RealmDomain; +import org.elasticsearch.xpack.core.security.authc.Subject; +import org.elasticsearch.xpack.core.security.user.User; + +import java.time.Instant; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.elasticsearch.test.SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING; +import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.INTERNAL_SECURITY_PROFILE_INDEX_8; +import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_PROFILE_ALIAS; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class ProfileDomainSingleNodeTests extends AbstractProfileSingleNodeTestCase { + + @Override + protected Settings nodeSettings() { + final Settings.Builder builder = Settings.builder().put(super.nodeSettings()); + // Register both file and native realms under the same domain + builder.put("xpack.security.authc.domains.my_domain.realms", "file,index"); + return builder.build(); + } + + public void testActivateProfileUnderDomain() { + // Activate 1st time with the file realm user + final Profile profile1 = doActivateProfile(RAC_USER_NAME, TEST_PASSWORD_SECURE_STRING); + assertThat(profile1.user().username(), equalTo(RAC_USER_NAME)); + assertThat(profile1.user().realmName(), equalTo("file")); + assertThat(profile1.user().domainName(), equalTo("my_domain")); + assertThat(profile1.user().email(), nullValue()); + assertThat(profile1.user().fullName(), nullValue()); + + // Get the profile back by ID + assertThat(getProfile(profile1.uid(), Set.of()), equalTo(profile1)); + + // Activate 2nd time with the native realm user and it should get the same profile + // because they are under the same domain. User fields are updated to the native user's info + final Profile profile2 = doActivateProfile(RAC_USER_NAME, NATIVE_RAC_USER_PASSWORD); + assertThat(profile2.uid(), equalTo(profile1.uid())); + assertThat(profile2.user().username(), equalTo(RAC_USER_NAME)); + assertThat(profile2.user().realmName(), equalTo("index")); + assertThat(profile2.user().domainName(), equalTo("my_domain")); + assertThat(profile2.user().email(), equalTo(RAC_USER_NAME + "@example.com")); + assertThat(profile2.user().fullName(), nullValue()); + assertThat(profile2.user().roles(), containsInAnyOrder(RAC_ROLE)); + + // Activate 3rd time with the file realm user again and it should get the same profile + // User fields are updated to the file user's info again + final Profile profile3 = doActivateProfile(RAC_USER_NAME, TEST_PASSWORD_SECURE_STRING); + assertThat(profile3.uid(), equalTo(profile1.uid())); + assertThat(profile3.user().realmName(), equalTo("file")); + assertThat(profile3.user().domainName(), equalTo("my_domain")); + assertThat(profile3.user().email(), nullValue()); + assertThat(profile3.user().fullName(), nullValue()); + assertThat(profile3.user().roles(), containsInAnyOrder(RAC_ROLE)); + + // Update native rac user + final PutUserRequest putUserRequest1 = new PutUserRequest(); + putUserRequest1.username(RAC_USER_NAME); + putUserRequest1.roles(RAC_ROLE, "superuser"); + putUserRequest1.email(null); + putUserRequest1.fullName("Native RAC User"); + assertThat(client().execute(PutUserAction.INSTANCE, putUserRequest1).actionGet().created(), is(false)); + + // Activate again with the native RAC user to the same profile + final Profile profile4 = doActivateProfile(RAC_USER_NAME, NATIVE_RAC_USER_PASSWORD); + assertThat(profile4.uid(), equalTo(profile1.uid())); + assertThat(profile4.user().username(), equalTo(RAC_USER_NAME)); + assertThat(profile4.user().realmName(), equalTo("index")); + assertThat(profile4.user().domainName(), equalTo("my_domain")); + assertThat(profile4.user().email(), nullValue()); + assertThat(profile4.user().fullName(), equalTo("Native RAC User")); + assertThat(profile4.user().roles(), containsInAnyOrder(RAC_ROLE, "superuser")); + + // Get by ID immediately should get the same document and content as the response to activate + assertThat(getProfile(profile1.uid(), Set.of()), equalTo(profile4)); + } + + public void testGetProfileByAuthenticationUnderDomain() { + final ProfileService profileService = node().injector().getInstance(ProfileService.class); + + final String nodeName = randomAlphaOfLengthBetween(3, 8); + final RealmConfig.RealmIdentifier realmIdentifier1 = new RealmConfig.RealmIdentifier("realm_type_1", "realm_name_1"); + final RealmConfig.RealmIdentifier realmIdentifier2 = new RealmConfig.RealmIdentifier("realm_type_2", "realm_name_2"); + + // Domain name does not matter + final String domainName = randomFrom("domainA", randomAlphaOfLengthBetween(5, 12)); + // The recorded realm is realm_name_1, domain realms must contain the recorded realm + final Set domainRealms = randomBoolean() + ? Set.of(realmIdentifier1, realmIdentifier2) + : Set.of(realmIdentifier1); + final RealmDomain realmDomain = new RealmDomain(domainName, domainRealms); + + final RealmConfig.RealmIdentifier authenticationRealmIdentifier = randomFrom(domainRealms); + + final Authentication authentication = new Authentication( + new User("foo"), + new Authentication.RealmRef( + authenticationRealmIdentifier.getName(), + authenticationRealmIdentifier.getType(), + nodeName, + realmDomain + ), + null + ); + final Subject subject = AuthenticationContext.fromAuthentication(authentication).getEffectiveSubject(); + + // Profile does not exist yet + final PlainActionFuture future1 = new PlainActionFuture<>(); + profileService.getVersionedDocument(subject, future1); + assertThat(future1.actionGet(), nullValue()); + + // Index the document so it can be found + // The document is created with realm_name_1 under domainA (member realms are realm_name_1 and realm_name_2) + final String uid2 = indexDocument(); + final PlainActionFuture future2 = new PlainActionFuture<>(); + profileService.getVersionedDocument(subject, future2); + final ProfileService.VersionedDocument versionedDocument = future2.actionGet(); + assertThat(versionedDocument, notNullValue()); + assertThat(versionedDocument.doc().uid(), equalTo(uid2)); + + // Index it again to trigger duplicate exception + final String uid3 = indexDocument(); + final PlainActionFuture future3 = new PlainActionFuture<>(); + profileService.getVersionedDocument(subject, future3); + final ElasticsearchException e3 = expectThrows(ElasticsearchException.class, future3::actionGet); + + assertThat( + e3.getMessage(), + containsString( + "multiple [2] profiles [" + Stream.of(uid2, uid3).sorted().collect(Collectors.joining(",")) + "] found for user [foo]" + ) + ); + } + + public void testGetProfileByAuthenticationDomainless() { + final ProfileService profileService = node().injector().getInstance(ProfileService.class); + // The document is created with realm_name_1 under domainA (member realms are realm_name_1 and realm_name_2) + final String uid1 = indexDocument(); + final String nodeName = randomAlphaOfLengthBetween(3, 8); + final RealmConfig.RealmIdentifier realmIdentifier1 = new RealmConfig.RealmIdentifier("realm_type_1", "realm_name_1"); + final RealmConfig.RealmIdentifier realmIdentifier2 = new RealmConfig.RealmIdentifier("realm_type_2", "realm_name_2"); + + // Scenario 1 + // The recorded realm_name_1 is no longer part of a domain. + // Authentication for this realm still works for retrieving the same profile document + final Authentication authentication1 = new Authentication( + new User("foo"), + new Authentication.RealmRef(realmIdentifier1.getName(), realmIdentifier1.getType(), nodeName), + null + ); + final Subject subject1 = AuthenticationContext.fromAuthentication(authentication1).getEffectiveSubject(); + + final PlainActionFuture future1 = new PlainActionFuture<>(); + profileService.getVersionedDocument(subject1, future1); + final ProfileService.VersionedDocument versionedDocument1 = future1.actionGet(); + assertThat(versionedDocument1, notNullValue()); + assertThat(versionedDocument1.doc().uid(), equalTo(uid1)); + + // Scenario 2 + // The recorded realm_name_1 is no longer part of a domain. + // Authentication for realm_name_2 (which is still part of domainA) does not work for retrieving the profile document + final RealmDomain realmDomain1 = new RealmDomain("domainA", Set.of(realmIdentifier2)); + final Authentication authentication2 = new Authentication( + new User("foo"), + new Authentication.RealmRef(realmIdentifier2.getName(), realmIdentifier2.getType(), nodeName, realmDomain1), + null + ); + final Subject subject2 = AuthenticationContext.fromAuthentication(authentication2).getEffectiveSubject(); + + final PlainActionFuture future2 = new PlainActionFuture<>(); + profileService.getVersionedDocument(subject2, future2); + assertThat(future2.actionGet(), nullValue()); + + // Scenario 3 + // Both recorded realm_name_1 and the authentication realm_name_2 are no longer part of a domain. + final Authentication authentication3 = new Authentication( + new User("foo"), + new Authentication.RealmRef(realmIdentifier2.getName(), realmIdentifier2.getType(), nodeName), + null + ); + final Subject subject3 = AuthenticationContext.fromAuthentication(authentication3).getEffectiveSubject(); + + final PlainActionFuture future3 = new PlainActionFuture<>(); + profileService.getVersionedDocument(subject3, future3); + assertThat(future3.actionGet(), nullValue()); + } + + private String indexDocument() { + final String uid = randomAlphaOfLength(20); + final String source = ProfileServiceTests.SAMPLE_PROFILE_DOCUMENT_TEMPLATE.formatted(uid, Instant.now().toEpochMilli()); + client().prepareIndex(randomFrom(INTERNAL_SECURITY_PROFILE_INDEX_8, SECURITY_PROFILE_ALIAS)) + .setId("profile_" + uid) + .setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) + .setSource(source, XContentType.JSON) + .get(); + return uid; + } +} diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileSingleNodeTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileSingleNodeTests.java index d0b441fb11501..c17cb13bec458 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileSingleNodeTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileSingleNodeTests.java @@ -7,84 +7,50 @@ package org.elasticsearch.xpack.security.profile; -import org.elasticsearch.ElasticsearchException; +import org.apache.lucene.search.TotalHits; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; -import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.index.engine.DocumentMissingException; -import org.elasticsearch.test.SecuritySingleNodeTestCase; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileAction; -import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileRequest; -import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileResponse; import org.elasticsearch.xpack.core.security.action.profile.GetProfileAction; import org.elasticsearch.xpack.core.security.action.profile.GetProfileRequest; import org.elasticsearch.xpack.core.security.action.profile.GetProfilesResponse; import org.elasticsearch.xpack.core.security.action.profile.Profile; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesAction; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesRequest; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesResponse; import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataAction; import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataRequest; import org.elasticsearch.xpack.core.security.action.user.PutUserAction; import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; -import org.elasticsearch.xpack.core.security.authc.Authentication; -import org.elasticsearch.xpack.core.security.user.User; -import org.junit.BeforeClass; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.time.Instant; +import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; -import java.util.stream.Stream; -import static org.elasticsearch.test.SecuritySettingsSource.TEST_PASSWORD_HASHED; import static org.elasticsearch.test.SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.INTERNAL_SECURITY_PROFILE_INDEX_8; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_PROFILE_ALIAS; import static org.hamcrest.Matchers.anEmptyMap; -import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItemInArray; import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class ProfileSingleNodeTests extends SecuritySingleNodeTestCase { - - private static final String RAC_USER_NAME = "rac_user"; - - // Needed for testing in IDE - @SuppressForbidden(reason = "sets the feature flag") - @BeforeClass - public static void enableFeature() { - AccessController.doPrivileged((PrivilegedAction) () -> System.setProperty("es.user_profile_feature_flag_enabled", "true")); - } - - @Override - protected String configUsers() { - return super.configUsers() + RAC_USER_NAME + ":" + TEST_PASSWORD_HASHED + "\n"; - } - - @Override - protected String configRoles() { - return super.configRoles() + "rac_role:\n" + " cluster:\n" + " - 'manage_own_api_key'\n" + " - 'monitor'\n"; - } - - @Override - protected String configUsersRoles() { - return super.configUsersRoles() + "rac_role:" + RAC_USER_NAME + "\n"; - } +public class ProfileSingleNodeTests extends AbstractProfileSingleNodeTestCase { @Override protected Settings nodeSettings() { @@ -95,19 +61,17 @@ protected Settings nodeSettings() { } public void testProfileIndexAutoCreation() { + // Index does not exist yet + assertThat(getProfileIndexResponse().getIndices(), not(hasItemInArray(INTERNAL_SECURITY_PROFILE_INDEX_8))); + + // Trigger index creation by indexing var indexResponse = client().prepareIndex(randomFrom(INTERNAL_SECURITY_PROFILE_INDEX_8, SECURITY_PROFILE_ALIAS)) .setSource(Map.of("user_profile", Map.of("uid", randomAlphaOfLength(22)))) .get(); - assertThat(indexResponse.status().getStatus(), equalTo(201)); - var getIndexRequest = new GetIndexRequest(); - getIndexRequest.indices(INTERNAL_SECURITY_PROFILE_INDEX_8); - - var getIndexResponse = client().execute(GetIndexAction.INSTANCE, getIndexRequest).actionGet(); - - assertThat(getIndexResponse.getIndices(), arrayContaining(INTERNAL_SECURITY_PROFILE_INDEX_8)); - + final GetIndexResponse getIndexResponse = getProfileIndexResponse(); + assertThat(getIndexResponse.getIndices(), hasItemInArray(INTERNAL_SECURITY_PROFILE_INDEX_8)); var aliases = getIndexResponse.getAliases().get(INTERNAL_SECURITY_PROFILE_INDEX_8); assertThat(aliases, hasSize(1)); assertThat(aliases.get(0).alias(), equalTo(SECURITY_PROFILE_ALIAS)); @@ -131,70 +95,35 @@ public void testProfileIndexAutoCreation() { assertThat(userProfileProperties.keySet(), hasItems("uid", "enabled", "last_synchronized", "user", "access", "application_data")); } - public void testGetProfileByAuthentication() { - final ProfileService profileService = node().injector().getInstance(ProfileService.class); - final Authentication authentication = new Authentication( - new User("foo"), - new Authentication.RealmRef("realm_name_1", "realm_type_1", randomAlphaOfLengthBetween(3, 8)), - null - ); - - // Profile does not exist yet - final PlainActionFuture future1 = new PlainActionFuture<>(); - profileService.getVersionedDocument(authentication, future1); - assertThat(future1.actionGet(), nullValue()); - - // Index the document so it can be found - final String uid2 = indexDocument(); - final PlainActionFuture future2 = new PlainActionFuture<>(); - profileService.getVersionedDocument(authentication, future2); - final ProfileService.VersionedDocument versionedDocument = future2.actionGet(); - assertThat(versionedDocument, notNullValue()); - assertThat(versionedDocument.doc().uid(), equalTo(uid2)); - - // Index it again to trigger duplicate exception - final String uid3 = indexDocument(); - final PlainActionFuture future3 = new PlainActionFuture<>(); - profileService.getVersionedDocument(authentication, future3); - final ElasticsearchException e3 = expectThrows(ElasticsearchException.class, future3::actionGet); - - assertThat( - e3.getMessage(), - containsString( - "multiple [2] profiles [" + Stream.of(uid2, uid3).sorted().collect(Collectors.joining(",")) + "] found for user [foo]" - ) - ); - } - public void testActivateProfile() { final Profile profile1 = doActivateProfile(RAC_USER_NAME, TEST_PASSWORD_SECURE_STRING); assertThat(profile1.user().username(), equalTo(RAC_USER_NAME)); + assertThat(profile1.user().roles(), contains(RAC_ROLE)); + assertThat(profile1.user().realmName(), equalTo("file")); + assertThat(profile1.user().domainName(), equalTo("my_domain")); assertThat(profile1.user().email(), nullValue()); assertThat(profile1.user().fullName(), nullValue()); - + // Get by ID immediately should get the same document and content as the response to activate assertThat(getProfile(profile1.uid(), Set.of()), equalTo(profile1)); // activate again should be getting the same profile final Profile profile2 = doActivateProfile(RAC_USER_NAME, TEST_PASSWORD_SECURE_STRING); assertThat(profile2.uid(), equalTo(profile1.uid())); - - // Create another rac user in the native realm - final PutUserRequest putUserRequest1 = new PutUserRequest(); - putUserRequest1.username(RAC_USER_NAME); - putUserRequest1.roles("rac_role"); - final SecureString nativeRacUserPassword = new SecureString("native_rac_user_password".toCharArray()); - final String nativeRacUserPasswordHash = new String(getFastStoredHashAlgoForTests().hash(nativeRacUserPassword)); - putUserRequest1.passwordHash(nativeRacUserPasswordHash.toCharArray()); - putUserRequest1.email(RAC_USER_NAME + "@example.com"); - assertThat(client().execute(PutUserAction.INSTANCE, putUserRequest1).actionGet().created(), is(true)); - - // Since file and native realms are not in the same domain yet, the new profile should be a different one - final Profile profile3 = doActivateProfile(RAC_USER_NAME, nativeRacUserPassword); - assertThat(profile3.uid(), not(equalTo(profile1.uid()))); + // Get by ID immediately should get the same document and content as the response to activate + assertThat(getProfile(profile2.uid(), Set.of()), equalTo(profile2)); + + // Since file and native realms are not in the same domain, the new profile must be a different one + final Profile profile3 = doActivateProfile(RAC_USER_NAME, NATIVE_RAC_USER_PASSWORD); + assertThat(profile3.uid(), not(equalTo(profile1.uid()))); // NOT the same profile as the file user + assertThat(profile3.user().username(), equalTo(RAC_USER_NAME)); + assertThat(profile3.user().realmName(), equalTo("index")); + assertThat(profile3.user().domainName(), nullValue()); assertThat(profile3.user().email(), equalTo(RAC_USER_NAME + "@example.com")); assertThat(profile3.user().fullName(), nullValue()); - assertThat(profile3.user().roles(), containsInAnyOrder("rac_role")); + assertThat(profile3.user().roles(), contains(RAC_ROLE)); assertThat(profile3.access(), anEmptyMap()); + // Get by ID immediately should get the same document and content as the response to activate + assertThat(getProfile(profile3.uid(), Set.of()), equalTo(profile3)); // Manually inserting some application data client().prepareUpdate(randomFrom(INTERNAL_SECURITY_PROFILE_INDEX_8, SECURITY_PROFILE_ALIAS), "profile_" + profile3.uid()) @@ -224,21 +153,23 @@ public void testActivateProfile() { assertThat(profile4.applicationData(), equalTo(Map.of("my_app", Map.of("theme", "default")))); // Update native rac user - final PutUserRequest putUserRequest2 = new PutUserRequest(); - putUserRequest2.username(RAC_USER_NAME); - putUserRequest2.roles("rac_role", "superuser"); - putUserRequest2.email(null); - putUserRequest2.fullName("Native RAC User"); - assertThat(client().execute(PutUserAction.INSTANCE, putUserRequest2).actionGet().created(), is(false)); + final PutUserRequest putUserRequest1 = new PutUserRequest(); + putUserRequest1.username(RAC_USER_NAME); + putUserRequest1.roles(RAC_ROLE, "superuser"); + putUserRequest1.email(null); + putUserRequest1.fullName("Native RAC User"); + assertThat(client().execute(PutUserAction.INSTANCE, putUserRequest1).actionGet().created(), is(false)); // Activate again should see the updated user info - final Profile profile5 = doActivateProfile(RAC_USER_NAME, nativeRacUserPassword); + final Profile profile5 = doActivateProfile(RAC_USER_NAME, NATIVE_RAC_USER_PASSWORD); assertThat(profile5.uid(), equalTo(profile3.uid())); assertThat(profile5.user().email(), nullValue()); assertThat(profile5.user().fullName(), equalTo("Native RAC User")); - assertThat(profile5.user().roles(), containsInAnyOrder("rac_role", "superuser")); + assertThat(profile5.user().roles(), containsInAnyOrder(RAC_ROLE, "superuser")); // Re-activate should not change access assertThat(profile5.access(), equalTo(Map.of("my_app", Map.of("tag", "prod")))); + // Get by ID immediately should get the same document and content as the response to activate + assertThat(getProfile(profile5.uid(), Set.of()), equalTo(profile5)); // Re-activate should not change application data assertThat(getProfile(profile5.uid(), Set.of("my_app")).applicationData(), equalTo(Map.of("my_app", Map.of("theme", "default")))); } @@ -302,37 +233,110 @@ public void testUpdateProfileData() { ); } - private Profile doActivateProfile(String username, SecureString password) { - final ActivateProfileRequest activateProfileRequest = new ActivateProfileRequest(); - activateProfileRequest.getGrant().setType("password"); - activateProfileRequest.getGrant().setUsername(username); - // clone the secureString because activate action closes it afterwards - activateProfileRequest.getGrant().setPassword(password.clone()); + public void testSearchProfiles() { + final String nativeRacUserPasswordHash = new String(getFastStoredHashAlgoForTests().hash(NATIVE_RAC_USER_PASSWORD)); + final Map users = Map.of( + "user_foo", + "Very Curious User Foo", + "user_bar", + "Super Curious Admin Bar", + "user_baz", + "Very Anxious User Baz", + "user_qux", + "Super Anxious Admin Qux" + ); + users.forEach((key, value) -> { + final PutUserRequest putUserRequest1 = new PutUserRequest(); + putUserRequest1.username(key); + putUserRequest1.fullName(value); + putUserRequest1.roles("rac_role"); + putUserRequest1.passwordHash(nativeRacUserPasswordHash.toCharArray()); + assertThat(client().execute(PutUserAction.INSTANCE, putUserRequest1).actionGet().created(), is(true)); + doActivateProfile(key, NATIVE_RAC_USER_PASSWORD); + }); + + final SearchProfilesResponse.ProfileHit[] profiles1 = doSearch(""); + assertThat(extractUsernames(profiles1), equalTo(users.keySet())); + + final SearchProfilesResponse.ProfileHit[] profiles2 = doSearch(randomFrom("super admin", "admin super")); + assertThat(extractUsernames(profiles2), equalTo(Set.of("user_bar", "user_qux"))); + + // Prefix match on full name + final SearchProfilesResponse.ProfileHit[] profiles3 = doSearch("ver"); + assertThat(extractUsernames(profiles3), equalTo(Set.of("user_foo", "user_baz"))); + + // Prefix match on the username + final SearchProfilesResponse.ProfileHit[] profiles4 = doSearch("user"); + assertThat(extractUsernames(profiles4), equalTo(users.keySet())); + // Documents scored higher are those with matches in more fields + assertThat(extractUsernames(Arrays.copyOfRange(profiles4, 0, 2)), equalTo(Set.of("user_foo", "user_baz"))); + + // Match of different terms on different fields + final SearchProfilesResponse.ProfileHit[] profiles5 = doSearch(randomFrom("admin very", "very admin")); + assertThat(extractUsernames(profiles5), equalTo(users.keySet())); + } - final ActivateProfileResponse activateProfileResponse = client().execute(ActivateProfileAction.INSTANCE, activateProfileRequest) - .actionGet(); - final Profile profile = activateProfileResponse.getProfile(); - assertThat(profile, notNullValue()); - assertThat(profile.user().username(), equalTo(username)); - assertThat(profile.applicationData(), anEmptyMap()); - return profile; + public void testProfileAPIsWhenIndexNotCreated() { + // Ensure index does not exist + assertThat(getProfileIndexResponse().getIndices(), not(hasItemInArray(INTERNAL_SECURITY_PROFILE_INDEX_8))); + + // Get Profile by ID returns empty result + final GetProfilesResponse getProfilesResponse = client().execute( + GetProfileAction.INSTANCE, + new GetProfileRequest(randomAlphaOfLength(20), Set.of()) + ).actionGet(); + assertThat(getProfilesResponse.getProfiles(), arrayWithSize(0)); + + // Ensure index does not exist + assertThat(getProfileIndexResponse().getIndices(), not(hasItemInArray(INTERNAL_SECURITY_PROFILE_INDEX_8))); + + // Search returns empty result + final SearchProfilesResponse.ProfileHit[] profiles1 = doSearch(""); + assertThat(profiles1, emptyArray()); + + // Ensure index does not exist + assertThat(getProfileIndexResponse().getIndices(), not(hasItemInArray(INTERNAL_SECURITY_PROFILE_INDEX_8))); + + // Updating profile data results into doc missing exception + // But the index is created in the process + final DocumentMissingException e1 = expectThrows( + DocumentMissingException.class, + () -> client().execute( + UpdateProfileDataAction.INSTANCE, + new UpdateProfileDataRequest( + randomAlphaOfLength(20), + null, + Map.of(randomAlphaOfLengthBetween(3, 8), randomAlphaOfLengthBetween(3, 8)), + -1, + -1, + WriteRequest.RefreshPolicy.WAIT_UNTIL + ) + ).actionGet() + ); + + // TODO: The index is created after the update call regardless. Should it not do that? + assertThat(getProfileIndexResponse().getIndices(), hasItemInArray(INTERNAL_SECURITY_PROFILE_INDEX_8)); } - private Profile getProfile(String uid, Set dataKeys) { - final GetProfilesResponse getProfilesResponse = client().execute(GetProfileAction.INSTANCE, new GetProfileRequest(uid, dataKeys)) + private SearchProfilesResponse.ProfileHit[] doSearch(String query) { + final SearchProfilesRequest searchProfilesRequest = new SearchProfilesRequest(Set.of(), query, 10); + final SearchProfilesResponse searchProfilesResponse = client().execute(SearchProfilesAction.INSTANCE, searchProfilesRequest) .actionGet(); - assertThat(getProfilesResponse.getProfiles(), arrayWithSize(1)); - return getProfilesResponse.getProfiles()[0]; + assertThat(searchProfilesResponse.getTotalHits().relation, is(TotalHits.Relation.EQUAL_TO)); + return searchProfilesResponse.getProfileHits(); } - private String indexDocument() { - final String uid = randomAlphaOfLength(20); - final String source = ProfileServiceTests.SAMPLE_PROFILE_DOCUMENT_TEMPLATE.formatted(uid, Instant.now().toEpochMilli()); - client().prepareIndex(randomFrom(INTERNAL_SECURITY_PROFILE_INDEX_8, SECURITY_PROFILE_ALIAS)) - .setId("profile_" + uid) - .setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) - .setSource(source, XContentType.JSON) - .get(); - return uid; + private Set extractUsernames(SearchProfilesResponse.ProfileHit[] profileHits) { + return Arrays.stream(profileHits) + .map(SearchProfilesResponse.ProfileHit::profile) + .map(Profile::user) + .map(Profile.ProfileUser::username) + .collect(Collectors.toUnmodifiableSet()); + } + + private GetIndexResponse getProfileIndexResponse() { + final GetIndexRequest getIndexRequest = new GetIndexRequest(); + getIndexRequest.indices(".*"); + return client().execute(GetIndexAction.INSTANCE, getIndexRequest).actionGet(); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 06af48c70471f..c249283511975 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -110,6 +110,7 @@ import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesAction; import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileAction; import org.elasticsearch.xpack.core.security.action.profile.GetProfileAction; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesAction; import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataAction; import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheAction; import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheAction; @@ -186,6 +187,7 @@ import org.elasticsearch.xpack.security.action.privilege.TransportPutPrivilegesAction; import org.elasticsearch.xpack.security.action.profile.TransportActivateProfileAction; import org.elasticsearch.xpack.security.action.profile.TransportGetProfileAction; +import org.elasticsearch.xpack.security.action.profile.TransportSearchProfilesAction; import org.elasticsearch.xpack.security.action.profile.TransportUpdateProfileDataAction; import org.elasticsearch.xpack.security.action.realm.TransportClearRealmCacheAction; import org.elasticsearch.xpack.security.action.role.TransportClearRolesCacheAction; @@ -280,6 +282,7 @@ import org.elasticsearch.xpack.security.rest.action.privilege.RestPutPrivilegesAction; import org.elasticsearch.xpack.security.rest.action.profile.RestActivateProfileAction; import org.elasticsearch.xpack.security.rest.action.profile.RestGetProfileAction; +import org.elasticsearch.xpack.security.rest.action.profile.RestSearchProfilesAction; import org.elasticsearch.xpack.security.rest.action.profile.RestUpdateProfileDataAction; import org.elasticsearch.xpack.security.rest.action.realm.RestClearRealmCacheAction; import org.elasticsearch.xpack.security.rest.action.role.RestClearRolesCacheAction; @@ -1216,7 +1219,8 @@ public void onIndexModule(IndexModule module) { Stream.of( new ActionHandler<>(GetProfileAction.INSTANCE, TransportGetProfileAction.class), new ActionHandler<>(ActivateProfileAction.INSTANCE, TransportActivateProfileAction.class), - new ActionHandler<>(UpdateProfileDataAction.INSTANCE, TransportUpdateProfileDataAction.class) + new ActionHandler<>(UpdateProfileDataAction.INSTANCE, TransportUpdateProfileDataAction.class), + new ActionHandler<>(SearchProfilesAction.INSTANCE, TransportSearchProfilesAction.class) ) ).toList(); } else { @@ -1301,7 +1305,8 @@ public List getRestHandlers( Stream.of( new RestGetProfileAction(settings, getLicenseState()), new RestActivateProfileAction(settings, getLicenseState()), - new RestUpdateProfileDataAction(settings, getLicenseState()) + new RestUpdateProfileDataAction(settings, getLicenseState()), + new RestSearchProfilesAction(settings, getLicenseState()) ) ).toList(); } else { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportGetProfileAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportGetProfileAction.java index 33404ba39b86f..2dd7de7d9aba2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportGetProfileAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportGetProfileAction.java @@ -30,6 +30,6 @@ public TransportGetProfileAction(TransportService transportService, ActionFilter @Override protected void doExecute(Task task, GetProfileRequest request, ActionListener listener) { - profileService.getProfile(request.getUid(), request.getDatKeys(), listener.map(GetProfilesResponse::new)); + profileService.getProfile(request.getUid(), request.getDataKeys(), listener.map(GetProfilesResponse::new)); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportSearchProfilesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportSearchProfilesAction.java new file mode 100644 index 0000000000000..9a0d022d804e0 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportSearchProfilesAction.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.action.profile; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesAction; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesRequest; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesResponse; +import org.elasticsearch.xpack.security.profile.ProfileService; + +public class TransportSearchProfilesAction extends HandledTransportAction { + + private final ProfileService profileService; + + @Inject + public TransportSearchProfilesAction(TransportService transportService, ActionFilters actionFilters, ProfileService profileService) { + super(SearchProfilesAction.NAME, transportService, actionFilters, SearchProfilesRequest::new); + this.profileService = profileService; + } + + @Override + protected void doExecute(Task task, SearchProfilesRequest request, ActionListener listener) { + profileService.searchProfile(request, listener); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java index 43cee77e76c26..d8679a74f361b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java @@ -22,6 +22,35 @@ final class ElasticServiceAccounts { static final String NAMESPACE = "elastic"; + private static final ServiceAccount ENTERPRISE_SEARCH_ACCOUNT = new ElasticServiceAccount( + "enterprise-search-server", + new RoleDescriptor( + NAMESPACE + "/enterprise-search-server", + new String[] { "manage", "manage_security" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder() + .indices( + ".ent-search-*", + ".monitoring-ent-search-*", + "metricbeat-ent-search-*", + "enterprise-search-*", + "logs-app_search.analytics-default", + "logs-enterprise_search.api-default", + "logs-app_search.search_relevance_suggestions-default", + "logs-crawler-default", + "logs-workplace_search.analytics-default", + "logs-workplace_search.content_events-default" + ) + .privileges("manage", "read", "write") + .build() }, + null, + null, + null, + null, + null + ) + ); + private static final ServiceAccount FLEET_ACCOUNT = new ElasticServiceAccount( "fleet-server", new RoleDescriptor( @@ -71,7 +100,7 @@ final class ElasticServiceAccounts { ReservedRolesStore.kibanaSystemRoleDescriptor(NAMESPACE + "/kibana") ); - static final Map ACCOUNTS = List.of(FLEET_ACCOUNT, KIBANA_SYSTEM_ACCOUNT) + static final Map ACCOUNTS = List.of(ENTERPRISE_SEARCH_ACCOUNT, FLEET_ACCOUNT, KIBANA_SYSTEM_ACCOUNT) .stream() .collect(Collectors.toMap(a -> a.id().asPrincipal(), Function.identity())); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileDocument.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileDocument.java index f2ad6c6fce35f..80062d4f77344 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileDocument.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileDocument.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ObjectParserHelper; -import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -30,6 +29,7 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.xpack.core.security.authc.Authentication.REALM_REF_PARSER; public record ProfileDocument( String uid, @@ -55,17 +55,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject("user"); builder.field("username", username); builder.field("roles", roles); - builder.startObject("realm"); - builder.field("name", realm.getName()); - builder.field("type", realm.getType()); - builder.field("node_name", realm.getNodeName()); - builder.endObject(); - if (email != null) { - builder.field("email", email); - } - if (fullName != null) { - builder.field("full_name", fullName); - } + builder.field("realm", realm); + builder.field("email", email); + builder.field("full_name", fullName); if (displayName != null) { builder.field("display_name", displayName); } @@ -74,8 +66,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public Profile.ProfileUser toProfileUser(@Nullable String realmDomain) { - return new Profile.ProfileUser(username, roles, realm.getName(), realmDomain, email, fullName, displayName, active); + public Profile.ProfileUser toProfileUser() { + final String domainName = realm.getDomain() != null ? realm.getDomain().name() : null; + return new Profile.ProfileUser(username, roles, realm.getName(), domainName, email, fullName, displayName, active); } } @@ -161,27 +154,13 @@ public static ProfileDocument fromXContent(XContentParser parser) { (args, v) -> (ProfileDocument) args[0] ); - // TODO:This is a copy from Authentication class. This version ignores unknown fields so that it currently ignores the domain field - // The support will be added later when authentication update is finalised. - public static ConstructingObjectParser REALM_REF_PARSER = new ConstructingObjectParser<>( - "realm_ref", - true, - (args, v) -> new Authentication.RealmRef((String) args[0], (String) args[1], (String) args[2]) - ); - - static { - REALM_REF_PARSER.declareString(constructorArg(), new ParseField("name")); - REALM_REF_PARSER.declareString(constructorArg(), new ParseField("type")); - REALM_REF_PARSER.declareString(constructorArg(), new ParseField("node_name")); - } - static { PROFILE_DOC_USER_PARSER.declareString(constructorArg(), new ParseField("username")); PROFILE_DOC_USER_PARSER.declareStringArray(constructorArg(), new ParseField("roles")); - PROFILE_DOC_USER_PARSER.declareObject(constructorArg(), (p, c) -> REALM_REF_PARSER.parse(p, null), new ParseField("realm")); - PROFILE_DOC_USER_PARSER.declareString(optionalConstructorArg(), new ParseField("email")); - PROFILE_DOC_USER_PARSER.declareString(optionalConstructorArg(), new ParseField("full_name")); - PROFILE_DOC_USER_PARSER.declareString(optionalConstructorArg(), new ParseField("display_name")); + PROFILE_DOC_USER_PARSER.declareObject(constructorArg(), (p, c) -> REALM_REF_PARSER.parse(p, c), new ParseField("realm")); + PROFILE_DOC_USER_PARSER.declareStringOrNull(optionalConstructorArg(), new ParseField("email")); + PROFILE_DOC_USER_PARSER.declareStringOrNull(optionalConstructorArg(), new ParseField("full_name")); + PROFILE_DOC_USER_PARSER.declareStringOrNull(optionalConstructorArg(), new ParseField("display_name")); PROFILE_DOC_USER_PARSER.declareBoolean(constructorArg(), new ParseField("active")); PROFILE_DOC_PARSER.declareString(constructorArg(), new ParseField("uid")); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java index 0267d14402fee..a3a13aadd25d4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; @@ -28,13 +29,17 @@ import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -43,6 +48,8 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.action.profile.Profile; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesRequest; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesResponse; import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataRequest; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationContext; @@ -84,10 +91,10 @@ public ProfileService(Settings settings, Clock clock, Client client, SecurityInd } public void getProfile(String uid, @Nullable Set dataKeys, ActionListener listener) { - getVersionedDocument(uid, listener.map(versionedDocument -> { - // TODO: replace null with actual domain lookup - return versionedDocument != null ? versionedDocument.toProfile(null, dataKeys) : null; - })); + getVersionedDocument( + uid, + listener.map(versionedDocument -> versionedDocument != null ? versionedDocument.toProfile(dataKeys) : null) + ); } // TODO: with request when we take request body for profile activation @@ -119,7 +126,7 @@ public void activateProfile(Authentication authentication, ActionListener { + getVersionedDocument(subject, ActionListener.wrap(versionedDocument -> { if (versionedDocument == null) { createNewProfile(subject, listener); } else { @@ -159,6 +166,72 @@ public void updateProfileData(UpdateProfileDataRequest request, ActionListener listener) { + tryFreezeAndCheckIndex(listener.map(response -> { + assert response == null : "only null response can reach here"; + return new SearchProfilesResponse(new SearchProfilesResponse.ProfileHit[] {}, 0, new TotalHits(0, TotalHits.Relation.EQUAL_TO)); + })).ifPresent(frozenProfileIndex -> { + final BoolQueryBuilder query = QueryBuilders.boolQuery().filter(QueryBuilders.termQuery("user_profile.enabled", true)); + if (Strings.hasText(request.getName())) { + query.must( + QueryBuilders.multiMatchQuery( + request.getName(), + "user_profile.user.username", + "user_profile.user.username._2gram", + "user_profile.user.username._3gram", + "user_profile.user.full_name", + "user_profile.user.full_name._2gram", + "user_profile.user.full_name._3gram", + "user_profile.user.display_name", + "user_profile.user.display_name._2gram", + "user_profile.user.display_name._3gram" + ).type(MultiMatchQueryBuilder.Type.BOOL_PREFIX) + ); + } + final SearchRequest searchRequest = client.prepareSearch(SECURITY_PROFILE_ALIAS) + .setQuery(query) + .setSize(request.getSize()) + .addSort("_score", SortOrder.DESC) + .addSort("user_profile.last_synchronized", SortOrder.DESC) + .request(); + + frozenProfileIndex.checkIndexVersionThenExecute( + listener::onFailure, + () -> executeAsyncWithOrigin( + client, + SECURITY_ORIGIN, + SearchAction.INSTANCE, + searchRequest, + ActionListener.wrap(searchResponse -> { + final SearchHits searchHits = searchResponse.getHits(); + final SearchHit[] hits = searchHits.getHits(); + final SearchProfilesResponse.ProfileHit[] profileHits; + if (hits.length == 0) { + profileHits = new SearchProfilesResponse.ProfileHit[0]; + } else { + profileHits = new SearchProfilesResponse.ProfileHit[hits.length]; + for (int i = 0; i < hits.length; i++) { + final SearchHit hit = hits[i]; + final VersionedDocument versionedDocument = new VersionedDocument( + buildProfileDocument(hit.getSourceRef()), + hit.getPrimaryTerm(), + hit.getSeqNo() + ); + profileHits[i] = new SearchProfilesResponse.ProfileHit( + versionedDocument.toProfile(request.getDataKeys()), + hit.getScore() + ); + } + } + listener.onResponse( + new SearchProfilesResponse(profileHits, searchResponse.getTook().millis(), searchHits.getTotalHits()) + ); + }, listener::onFailure) + ) + ); + }); + } + private void getVersionedDocument(String uid, ActionListener listener) { tryFreezeAndCheckIndex(listener).ifPresent(frozenProfileIndex -> { final GetRequest getRequest = new GetRequest(SECURITY_PROFILE_ALIAS, uidToDocId(uid)); @@ -183,16 +256,33 @@ private void getVersionedDocument(String uid, ActionListener } // Package private for testing - void getVersionedDocument(Authentication authentication, ActionListener listener) { + void getVersionedDocument(Subject subject, ActionListener listener) { tryFreezeAndCheckIndex(listener).ifPresent(frozenProfileIndex -> { - final SearchRequest searchRequest = client.prepareSearch(SECURITY_PROFILE_ALIAS) - .setQuery( - QueryBuilders.boolQuery() - .must(QueryBuilders.termQuery("user_profile.user.username", authentication.getUser().principal())) - // TODO: this will be replaced by domain lookup and reverse lookup - .must(QueryBuilders.termQuery("user_profile.user.realm.name", authentication.getSourceRealm().getName())) - ) - .request(); + final BoolQueryBuilder boolQuery = QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery("user_profile.user.username", subject.getUser().principal())); + if (subject.getRealm().getDomain() == null) { + boolQuery.filter(QueryBuilders.termQuery("user_profile.user.realm.name", subject.getRealm().getName())) + .filter(QueryBuilders.termQuery("user_profile.user.realm.type", subject.getRealm().getType())); + } else { + logger.debug( + () -> new ParameterizedMessage( + "searching existing profile document for user [{}] from any of the realms [{}] under domain [{}]", + subject.getUser().principal(), + Strings.collectionToCommaDelimitedString(subject.getRealm().getDomain().realms()), + subject.getRealm().getDomain().name() + ) + ); + subject.getRealm().getDomain().realms().forEach(realmIdentifier -> { + boolQuery.should( + QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery("user_profile.user.realm.name", realmIdentifier.getName())) + .filter(QueryBuilders.termQuery("user_profile.user.realm.type", realmIdentifier.getType())) + ); + }); + boolQuery.minimumShouldMatch(1); + } + + final SearchRequest searchRequest = client.prepareSearch(SECURITY_PROFILE_ALIAS).setQuery(boolQuery).request(); frozenProfileIndex.checkIndexVersionThenExecute( listener::onFailure, () -> executeAsyncWithOrigin( @@ -206,8 +296,8 @@ void getVersionedDocument(Authentication authentication, ActionListener listener) TransportSingleItemBulkWriteAction.wrapBulkResponse(ActionListener.wrap(indexResponse -> { assert docId.equals(indexResponse.getId()); // TODO: replace with actual domain information - listener.onResponse( - new VersionedDocument(profileDocument, indexResponse.getPrimaryTerm(), indexResponse.getSeqNo()).toProfile(null) + final VersionedDocument versionedDocument = new VersionedDocument( + profileDocument, + indexResponse.getPrimaryTerm(), + indexResponse.getSeqNo() ); + listener.onResponse(versionedDocument.toProfile(Set.of())); }, listener::onFailure)) ) ); @@ -274,7 +370,7 @@ private void updateProfileForActivate(Subject subject, VersionedDocument version ), listener.map( updateResponse -> new VersionedDocument(profileDocument, updateResponse.getPrimaryTerm(), updateResponse.getSeqNo()) - .toProfile(null) + .toProfile(Set.of()) ) ); } @@ -360,7 +456,7 @@ private XContentBuilder wrapProfileDocumentWithoutApplicationData(ProfileDocumen /** * Freeze the profile index check its availability and return it if everything is ok. - * Otherwise it returns null. + * Otherwise it calls the listener with null and returns an empty Optional. */ private Optional tryFreezeAndCheckIndex(ActionListener listener) { final SecurityIndexManager frozenProfileIndex = profileIndex.freeze(); @@ -400,14 +496,13 @@ private ProfileDocument updateWithSubject(ProfileDocument doc, Subject subject) // Package private for testing record VersionedDocument(ProfileDocument doc, long primaryTerm, long seqNo) { - Profile toProfile(@Nullable String realmDomain) { - return toProfile(realmDomain, Set.of()); - } - - Profile toProfile(@Nullable String realmDomain, @Nullable Set dataKeys) { + /** + * Convert the index document to the user-facing Profile by filtering through the application data + */ + Profile toProfile(Set dataKeys) { + assert dataKeys != null : "data keys must not be null"; final Map applicationData; - // NOTE null is the same as empty which means not retrieving any application data - if (dataKeys == null || dataKeys.isEmpty()) { + if (dataKeys.isEmpty()) { applicationData = Map.of(); } else { applicationData = XContentHelper.convertToMap(doc.applicationData(), false, XContentType.JSON, dataKeys, null).v2(); @@ -417,7 +512,7 @@ Profile toProfile(@Nullable String realmDomain, @Nullable Set dataKeys) doc.uid(), doc.enabled(), doc.lastSynchronized(), - doc.user().toProfileUser(realmDomain), + doc.user().toProfileUser(), doc.access(), applicationData, new Profile.VersionControl(primaryTerm, seqNo) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestSearchProfilesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestSearchProfilesAction.java new file mode 100644 index 0000000000000..51f4f2a6709ad --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestSearchProfilesAction.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.rest.action.profile; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesAction; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesRequest; +import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; + +import java.io.IOException; +import java.util.List; +import java.util.Set; + +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class RestSearchProfilesAction extends SecurityBaseRestHandler { + + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "search_profile_request_payload", + a -> new Payload((String) a[0], (Integer) a[1]) + ); + + static { + PARSER.declareString(optionalConstructorArg(), new ParseField("name")); + PARSER.declareInt(optionalConstructorArg(), new ParseField("size")); + } + + public RestSearchProfilesAction(Settings settings, XPackLicenseState licenseState) { + super(settings, licenseState); + } + + @Override + public List routes() { + return List.of(new Route(GET, "/_security/profile/_search"), new Route(POST, "/_security/profile/_search")); + } + + @Override + public String getName() { + return "xpack_security_search_profile"; + } + + @Override + protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { + final Set dataKeys = Strings.tokenizeByCommaToSet(request.param("data", null)); + final Payload payload = request.hasContent() ? PARSER.parse(request.contentParser(), null) : new Payload(null, null); + + final SearchProfilesRequest searchProfilesRequest = new SearchProfilesRequest(dataKeys, payload.name(), payload.size()); + return channel -> client.execute(SearchProfilesAction.INSTANCE, searchProfilesRequest, new RestToXContentListener<>(channel)); + } + + record Payload(String name, Integer size) { + + public String name() { + return name != null ? name : ""; + } + + public Integer size() { + return size != null ? size : 10; + } + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java index d4b1268c4334d..726744cc7b684 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java @@ -24,12 +24,10 @@ import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; import java.io.IOException; -import java.util.Collections; import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -82,7 +80,11 @@ public RestResponse buildResponse(GetUserPrivilegesResponse response, XContentBu builder.field(RoleDescriptor.Fields.CLUSTER.getPreferredName(), response.getClusterPrivileges()); builder.startArray(RoleDescriptor.Fields.GLOBAL.getPreferredName()); for (ConfigurableClusterPrivilege ccp : response.getConditionalClusterPrivileges()) { - ConfigurableClusterPrivileges.toXContent(builder, ToXContent.EMPTY_PARAMS, Collections.singleton(ccp)); + builder.startObject(); + builder.startObject(ccp.getCategory().field.getPreferredName()); + ccp.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + builder.endObject(); } builder.endArray(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java index 27d509c6daabd..b48dcd31cd54a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java @@ -295,6 +295,27 @@ private XContentBuilder getMainIndexMappings() { builder.endObject(); } builder.endObject(); + builder.startObject("profile"); + { + builder.field("type", "object"); + builder.startObject("properties"); + { + builder.startObject("write"); + { + builder.field("type", "object"); + builder.startObject("properties"); + { + builder.startObject("applications"); + builder.field("type", "keyword"); + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); } builder.endObject(); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java index 466b85a839ffe..3b327240069a0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.node.NodeClient; @@ -75,6 +76,7 @@ public abstract class SecurityIntegTestCase extends ESIntegTestCase { * to how {@link ESIntegTestCase#nodeSettings(int, Settings)} works. */ private static CustomSecuritySettingsSource customSecuritySettingsSource = null; + private TestSecurityClient securityClient; @BeforeClass public static void generateBootstrapPassword() { @@ -469,4 +471,15 @@ public TestRestHighLevelClient() { super(getRestClient(), client -> {}, List.of()); } } + + protected TestSecurityClient getSecurityClient(RequestOptions requestOptions) { + return new TestSecurityClient(getRestClient(), requestOptions); + } + + protected TestSecurityClient getSecurityClient() { + if (securityClient == null) { + securityClient = getSecurityClient(SecuritySettingsSource.SECURITY_REQUEST_OPTIONS); + } + return securityClient; + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java new file mode 100644 index 0000000000000..2530c4d1675d4 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.test; + +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xpack.core.security.user.User; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.test.rest.ESRestTestCase.entityAsMap; + +public class TestSecurityClient { + + private final RestClient client; + private final RequestOptions options; + + public TestSecurityClient(RestClient client) { + this(client, RequestOptions.DEFAULT); + } + + public TestSecurityClient(RestClient client, RequestOptions options) { + this.client = client; + this.options = options; + } + + /** + * Uses the REST API to retrieve the currently authenticated user. + * @see User.Fields + * @see org.elasticsearch.xpack.security.rest.action.RestAuthenticateAction + */ + public Map authenticate() throws IOException { + final String endpoint = "/_security/_authenticate"; + final Request request = new Request(HttpGet.METHOD_NAME, endpoint); + return entityAsMap(execute(request)); + } + + /** + * Uses the REST API to create a new user in the native realm. + * @see org.elasticsearch.xpack.security.rest.action.user.RestPutUserAction + */ + public void putUser(User user, SecureString password) throws IOException { + final String endpoint = "/_security/user/" + user.principal(); + final Request request = new Request(HttpPut.METHOD_NAME, endpoint); + final Map map = XContentTestUtils.convertToMap(user); + if (password != null) { + map.put("password", password.toString()); + } + final String body = toJson(map); + request.setJsonEntity(body); + request.addParameters(Map.of("refresh", "true")); + execute(request); + } + + /** + * Uses the REST API to delete a user from the native realm. + * @see org.elasticsearch.xpack.security.rest.action.user.RestDeleteUserAction + */ + public void deleteUser(String username) throws IOException { + final String endpoint = "/_security/user/" + username; + final Request request = new Request(HttpDelete.METHOD_NAME, endpoint); + request.addParameters(Map.of("refresh", "true")); + execute(request); + } + + /** + * Uses the REST API to change the password of a user in the native/reserverd realms. + * @see org.elasticsearch.xpack.security.rest.action.user.RestChangePasswordAction + */ + public void changePassword(String username, SecureString password) throws IOException { + final String endpoint = "/_security/user/" + username + "/_password"; + final Request request = new Request(HttpPost.METHOD_NAME, endpoint); + final String body = """ + { + "password": "%s" + } + """.formatted(password.toString()); + request.setJsonEntity(body); + execute(request); + } + + /** + * Uses the REST API to enable or disable a user in the native/reserved realm. + * @see org.elasticsearch.xpack.security.rest.action.user.RestSetEnabledAction + */ + public void setUserEnabled(String username, boolean enabled) throws IOException { + final String endpoint = "/_security/user/" + username + "/" + (enabled ? "_enable" : "_disable"); + final Request request = new Request(HttpPut.METHOD_NAME, endpoint); + execute(request); + } + + private static String toJson(Map map) throws IOException { + final XContentBuilder builder = XContentFactory.jsonBuilder().map(map); + final BytesReference bytes = BytesReference.bytes(builder); + return bytes.utf8ToString(); + } + + private Response execute(Request request) throws IOException { + request.setOptions(options); + return this.client.performRequest(request); + } + +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/service/TransportGetServiceAccountActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/service/TransportGetServiceAccountActionTests.java index 04432131fc9ff..87ef1ea24559e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/service/TransportGetServiceAccountActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/service/TransportGetServiceAccountActionTests.java @@ -45,12 +45,12 @@ public void testDoExecute() { final PlainActionFuture future1 = new PlainActionFuture<>(); transportGetServiceAccountAction.doExecute(mock(Task.class), request1, future1); final GetServiceAccountResponse getServiceAccountResponse1 = future1.actionGet(); - assertThat(getServiceAccountResponse1.getServiceAccountInfos().length, equalTo(2)); + assertThat(getServiceAccountResponse1.getServiceAccountInfos().length, equalTo(3)); assertThat( Arrays.stream(getServiceAccountResponse1.getServiceAccountInfos()) .map(ServiceAccountInfo::getPrincipal) .collect(Collectors.toList()), - containsInAnyOrder("elastic/fleet-server", "elastic/kibana") + containsInAnyOrder("elastic/enterprise-search-server", "elastic/fleet-server", "elastic/kibana") ); final GetServiceAccountRequest request2 = new GetServiceAccountRequest("elastic", "fleet-server"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java index cc16043697a55..76989a969b273 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java @@ -135,6 +135,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -538,6 +539,18 @@ public void testSecurityConfigChangeEventFormattingForRoles() throws IOException metaMap, Map.of("ignored", 2) ); + RoleDescriptor roleDescriptor5 = new RoleDescriptor( + "role_descriptor5", + new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[0], + randomFrom((RoleDescriptor.ApplicationResourcePrivileges[]) null, new RoleDescriptor.ApplicationResourcePrivileges[0]), + new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges(new LinkedHashSet<>(Arrays.asList("", "\""))), + new ConfigurableClusterPrivileges.ManageApplicationPrivileges(Set.of("\"")) }, + new String[] { "\"[a]/" }, + Map.of(), + Map.of() + ); String keyName = randomAlphaOfLength(4); TimeValue expiration = randomFrom(new TimeValue(randomNonNegativeLong(), randomFrom(TimeUnit.values())), null); List allTestRoleDescriptors = List.of( @@ -545,7 +558,8 @@ public void testSecurityConfigChangeEventFormattingForRoles() throws IOException roleDescriptor1, roleDescriptor2, roleDescriptor3, - roleDescriptor4 + roleDescriptor4, + roleDescriptor5 ); List keyRoleDescriptors = randomSubsetOf(allTestRoleDescriptors); StringBuilder roleDescriptorsStringBuilder = new StringBuilder().append("\"role_descriptors\":["); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java index 2ccaf567d6095..fee0034bf4a0d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java @@ -281,6 +281,7 @@ public void testGroupLookupBase() throws Exception { * If the realm's CA path is monitored for changes and the underlying SSL context is reloaded, then we will get two different outcomes * (one failure, one success) depending on which file content is in place. */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/83560") public void testSslTrustIsReloaded() throws Exception { assumeFalse( "NPE thrown in BCFIPS JSSE - addressed in https://github.com/bcgit/bc-java/commit/" diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java index e96925281c8c9..a673f114bbf91 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java @@ -7,12 +7,18 @@ package org.elasticsearch.xpack.security.authc.service; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; import org.elasticsearch.action.admin.indices.create.AutoCreateAction; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.mapping.put.AutoPutMappingAction; +import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; +import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateAction; +import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction; +import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.delete.DeleteAction; import org.elasticsearch.action.get.GetAction; @@ -25,6 +31,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.xpack.core.ilm.action.GetLifecycleAction; +import org.elasticsearch.xpack.core.ilm.action.PutLifecycleAction; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; import org.elasticsearch.xpack.core.ml.action.DeleteCalendarAction; import org.elasticsearch.xpack.core.ml.action.DeleteCalendarEventAction; @@ -87,12 +95,15 @@ import org.elasticsearch.xpack.core.ml.action.UpdateProcessAction; import org.elasticsearch.xpack.core.ml.action.ValidateDetectorAction; import org.elasticsearch.xpack.core.ml.action.ValidateJobConfigAction; +import org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkAction; import org.elasticsearch.xpack.core.security.action.apikey.CreateApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.CreateApiKeyRequest; import org.elasticsearch.xpack.core.security.action.apikey.GetApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.GetApiKeyRequest; import org.elasticsearch.xpack.core.security.action.apikey.InvalidateApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.InvalidateApiKeyRequest; +import org.elasticsearch.xpack.core.security.action.role.PutRoleAction; +import org.elasticsearch.xpack.core.security.action.user.PutUserAction; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.permission.Role; @@ -289,6 +300,74 @@ public void testElasticServiceAccount() { ); } + public void testElasticEnterpriseSearchServerAccount() { + final Role role = Role.builder( + ElasticServiceAccounts.ACCOUNTS.get("elastic/enterprise-search-server").roleDescriptor(), + null, + RESTRICTED_INDICES_AUTOMATON + ).build(); + + final Authentication authentication = mock(Authentication.class); + final TransportRequest request = mock(TransportRequest.class); + + // manage + assertThat(role.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(true)); + + // manage_security + assertThat( + role.cluster() + .check(CreateApiKeyAction.NAME, new CreateApiKeyRequest(randomAlphaOfLengthBetween(3, 8), null, null), authentication), + is(true) + ); + assertThat(role.cluster().check(GetApiKeyAction.NAME, GetApiKeyRequest.forOwnedApiKeys(), authentication), is(true)); + assertThat(role.cluster().check(InvalidateApiKeyAction.NAME, InvalidateApiKeyRequest.forOwnedApiKeys(), authentication), is(true)); + + assertThat(role.cluster().check(PutUserAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(PutRoleAction.NAME, request, authentication), is(true)); + + // manage_index_templates + assertThat(role.cluster().check(PutIndexTemplateAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(GetIndexTemplatesAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(DeleteIndexTemplateAction.NAME, request, authentication), is(true)); + + // monitoring + assertThat(role.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(ClusterHealthAction.NAME, request, authentication), is(true)); + + // manage_ilm + assertThat(role.cluster().check(GetLifecycleAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(PutLifecycleAction.NAME, request, authentication), is(true)); + + List.of( + ".ent-search-" + randomAlphaOfLengthBetween(1, 20), + ".monitoring-ent-search-" + randomAlphaOfLengthBetween(1, 20), + "metricbeat-ent-search-" + randomAlphaOfLengthBetween(1, 20), + "enterprise-search-" + randomAlphaOfLengthBetween(1, 20), + "logs-app_search.analytics-default", + "logs-enterprise_search.api-default", + "logs-app_search.search_relevance_suggestions-default", + "logs-crawler-default", + "logs-workplace_search.analytics-default", + "logs-workplace_search.content_events-default" + ).forEach(index -> { + final IndexAbstraction enterpriseSearchIndex = mockIndexAbstraction(index); + assertThat(role.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(DeleteAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(IndexAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(GetAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(MultiGetAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(SearchAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(MultiSearchAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(IndicesStatsAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(RefreshAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher("indices:foo").test(enterpriseSearchIndex), is(false)); + }); + } + private IndexAbstraction mockIndexAbstraction(String name) { IndexAbstraction mock = mock(IndexAbstraction.class); when(mock.getName()).thenReturn(name); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountServiceTests.java index f105c63958304..8d54f58273fde 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountServiceTests.java @@ -96,7 +96,10 @@ public void stopThreadPool() { } public void testGetServiceAccountPrincipals() { - assertThat(ServiceAccountService.getServiceAccountPrincipals(), containsInAnyOrder("elastic/fleet-server", "elastic/kibana")); + assertThat( + ServiceAccountService.getServiceAccountPrincipals(), + containsInAnyOrder("elastic/enterprise-search-server", "elastic/fleet-server", "elastic/kibana") + ); } public void testTryParseToken() throws IOException, IllegalAccessException { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java index e7359467da442..7ad3bad40fa46 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java @@ -90,6 +90,7 @@ public void testToString() { ApplicationResourcePrivileges.builder().application("my_app").privileges("read", "write").resources("*").build() }; final ConfigurableClusterPrivilege[] configurableClusterPrivileges = new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges(new LinkedHashSet<>(Arrays.asList("app*"))), new ConfigurableClusterPrivileges.ManageApplicationPrivileges(new LinkedHashSet<>(Arrays.asList("app01", "app02"))) }; RoleDescriptor descriptor = new RoleDescriptor( @@ -107,7 +108,7 @@ public void testToString() { descriptor.toString(), is( "Role[name=test, cluster=[all,none]" - + ", global=[{APPLICATION:manage:applications=app01,app02}]" + + ", global=[{APPLICATION:manage:applications=app01,app02},{PROFILE:write:applications=app*}]" + ", indicesPrivileges=[IndicesPrivileges[indices=[i1,i2], allowRestrictedIndices=[false], privileges=[read]" + ", field_security=[grant=[body,title], except=null], query={\"match_all\": {}}],]" + ", applicationPrivileges=[ApplicationResourcePrivileges[application=my_app, privileges=[read,write], resources=[*]],]" @@ -189,7 +190,11 @@ public void testParse() throws Exception { "privileges": [ "p1", "p2" ], "allow_restricted_indices": true } - ] + ], + "global": { + "profile": { + } + } }"""; rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); assertEquals("test", rd.getName()); @@ -239,6 +244,8 @@ public void testParse() throws Exception { "manage": { "applications": [ "kibana", "logstash" ] } + }, + "profile": { } } }"""; @@ -259,7 +266,7 @@ public void testParse() throws Exception { assertThat(rd.getApplicationPrivileges()[1].getApplication(), equalTo("app2")); assertThat(rd.getConditionalClusterPrivileges(), Matchers.arrayWithSize(1)); - final ConfigurableClusterPrivilege conditionalPrivilege = rd.getConditionalClusterPrivileges()[0]; + ConfigurableClusterPrivilege conditionalPrivilege = rd.getConditionalClusterPrivileges()[0]; assertThat(conditionalPrivilege.getCategory(), equalTo(ConfigurableClusterPrivilege.Category.APPLICATION)); assertThat(conditionalPrivilege, instanceOf(ConfigurableClusterPrivileges.ManageApplicationPrivileges.class)); assertThat( @@ -267,6 +274,45 @@ public void testParse() throws Exception { containsInAnyOrder("kibana", "logstash") ); + q = """ + { + "cluster": [ "manage" ], + "global": { + "profile": { + "write": { + "applications": [ "", "kibana-*" ] + } + }, + "application": { + "manage": { + "applications": [ "apm*", "kibana-1" ] + } + } + } + }"""; + rd = RoleDescriptor.parse("testUpdateProfile", new BytesArray(q), false, XContentType.JSON); + assertThat(rd.getName(), is("testUpdateProfile")); + assertThat(rd.getClusterPrivileges(), arrayContaining("manage")); + assertThat(rd.getIndicesPrivileges(), Matchers.emptyArray()); + assertThat(rd.getRunAs(), Matchers.emptyArray()); + assertThat(rd.getApplicationPrivileges(), Matchers.emptyArray()); + assertThat(rd.getConditionalClusterPrivileges(), Matchers.arrayWithSize(2)); + + conditionalPrivilege = rd.getConditionalClusterPrivileges()[0]; + assertThat(conditionalPrivilege.getCategory(), equalTo(ConfigurableClusterPrivilege.Category.APPLICATION)); + assertThat(conditionalPrivilege, instanceOf(ConfigurableClusterPrivileges.ManageApplicationPrivileges.class)); + assertThat( + ((ConfigurableClusterPrivileges.ManageApplicationPrivileges) conditionalPrivilege).getApplicationNames(), + containsInAnyOrder("apm*", "kibana-1") + ); + conditionalPrivilege = rd.getConditionalClusterPrivileges()[1]; + assertThat(conditionalPrivilege.getCategory(), equalTo(ConfigurableClusterPrivilege.Category.PROFILE)); + assertThat(conditionalPrivilege, instanceOf(ConfigurableClusterPrivileges.WriteProfileDataPrivileges.class)); + assertThat( + ((ConfigurableClusterPrivileges.WriteProfileDataPrivileges) conditionalPrivilege).getApplicationNames(), + containsInAnyOrder("", "kibana-*") + ); + q = """ {"applications": [{"application": "myapp", "resources": ["*"], "privileges": ["login" ]}] }"""; rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); @@ -444,6 +490,88 @@ public void testParseIndicesPrivilegesFailsWhenExceptFieldsAreNotSubsetOfGranted assertThat(epe, TestMatchers.throwableWithMessage(containsString("f3"))); } + public void testGlobalPrivilegesOrdering() throws IOException { + final String roleName = randomAlphaOfLengthBetween(3, 30); + final String[] applicationNames = generateRandomStringArray(3, randomIntBetween(0, 3), false, true); + final String[] profileNames = generateRandomStringArray(3, randomIntBetween(0, 3), false, true); + ConfigurableClusterPrivilege[] configurableClusterPrivileges = new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges(Sets.newHashSet(profileNames)), + new ConfigurableClusterPrivileges.ManageApplicationPrivileges(Sets.newHashSet(applicationNames)) }; + RoleDescriptor role1 = new RoleDescriptor( + roleName, + new String[0], + new RoleDescriptor.IndicesPrivileges[0], + new RoleDescriptor.ApplicationResourcePrivileges[0], + configurableClusterPrivileges, + new String[0], + Map.of(), + Map.of() + ); + // swap + var temp = configurableClusterPrivileges[0]; + configurableClusterPrivileges[0] = configurableClusterPrivileges[1]; + configurableClusterPrivileges[1] = temp; + RoleDescriptor role2 = new RoleDescriptor( + roleName, + new String[0], + new RoleDescriptor.IndicesPrivileges[0], + new RoleDescriptor.ApplicationResourcePrivileges[0], + configurableClusterPrivileges, + new String[0], + Map.of(), + Map.of() + ); + assertThat(role2, is(role1)); + StringBuilder applicationNamesString = new StringBuilder(); + for (int i = 0; i < applicationNames.length; i++) { + if (i > 0) { + applicationNamesString.append(", "); + } + applicationNamesString.append("\"" + applicationNames[i] + "\""); + } + StringBuilder profileNamesString = new StringBuilder(); + for (int i = 0; i < profileNames.length; i++) { + if (i > 0) { + profileNamesString.append(", "); + } + profileNamesString.append("\"" + profileNames[i] + "\""); + } + String json = """ + { + "global": { + "profile": { + "write": { + "applications": [ %s ] + } + }, + "application": { + "manage": { + "applications": [ %s ] + } + } + } + }""".formatted(profileNamesString, applicationNamesString); + RoleDescriptor role3 = RoleDescriptor.parse(roleName, new BytesArray(json), false, XContentType.JSON); + assertThat(role3, is(role1)); + json = """ + { + "global": { + "application": { + "manage": { + "applications": [ %s ] + } + }, + "profile": { + "write": { + "applications": [ %s ] + } + } + } + }""".formatted(applicationNamesString, profileNamesString); + RoleDescriptor role4 = RoleDescriptor.parse(roleName, new BytesArray(json), false, XContentType.JSON); + assertThat(role4, is(role1)); + } + public void testIsEmpty() { assertTrue(new RoleDescriptor(randomAlphaOfLengthBetween(1, 10), null, null, null, null, null, null, null).isEmpty()); @@ -483,7 +611,9 @@ public void testIsEmpty() { booleans.get(3) ? new ConfigurableClusterPrivilege[0] : new ConfigurableClusterPrivilege[] { - new ConfigurableClusterPrivileges.ManageApplicationPrivileges(Collections.singleton("foo")) }, + randomBoolean() + ? new ConfigurableClusterPrivileges.ManageApplicationPrivileges(Collections.singleton("foo")) + : new ConfigurableClusterPrivileges.WriteProfileDataPrivileges(Collections.singleton("bar")) }, booleans.get(4) ? new String[0] : new String[] { "foo" }, booleans.get(5) ? new HashMap<>() : Collections.singletonMap("foo", "bar"), Collections.singletonMap("foo", "bar") @@ -536,15 +666,32 @@ private RoleDescriptor randomRoleDescriptor() { } applicationPrivileges[i] = builder.build(); } - final ConfigurableClusterPrivilege[] configurableClusterPrivileges; - if (randomBoolean()) { - configurableClusterPrivileges = new ConfigurableClusterPrivilege[] { + final ConfigurableClusterPrivilege[] configurableClusterPrivileges = switch (randomIntBetween(0, 4)) { + case 0 -> new ConfigurableClusterPrivilege[0]; + case 1 -> new ConfigurableClusterPrivilege[] { new ConfigurableClusterPrivileges.ManageApplicationPrivileges( Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) ) }; - } else { - configurableClusterPrivileges = new ConfigurableClusterPrivilege[0]; - } + case 2 -> new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ) }; + case 3 -> new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ), + new ConfigurableClusterPrivileges.ManageApplicationPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ) }; + case 4 -> new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.ManageApplicationPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ), + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ) }; + default -> throw new IllegalStateException("Unexpected value"); + }; final Map metadata = new HashMap<>(); while (randomBoolean()) { String key = randomAlphaOfLengthBetween(4, 12); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java index a4e9c2dde5ad1..b21b9dd7a7bef 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java @@ -137,7 +137,7 @@ public void testGetProfileByUid() { final PlainActionFuture future = new PlainActionFuture<>(); - final Set dataKeys = randomFrom(Set.of("app1"), Set.of("app2"), Set.of("app1", "app2"), Set.of(), null); + final Set dataKeys = randomFrom(Set.of("app1"), Set.of("app2"), Set.of("app1", "app2"), Set.of()); profileService.getProfile(uid, dataKeys, future); final Profile profile = future.actionGet(); @@ -163,7 +163,7 @@ public void testGetProfileByUid() { "foo", List.of("role1", "role2"), "realm_name_1", - null, + "domainA", "foo@example.com", "User Foo", "Curious Foo", diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesActionTests.java index 6e05692085275..58176f0918423 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesActionTests.java @@ -68,8 +68,11 @@ public void testSecurityDisabled() throws Exception { public void testBuildResponse() throws Exception { final RestGetUserPrivilegesAction.RestListener listener = new RestGetUserPrivilegesAction.RestListener(null); final Set cluster = new LinkedHashSet<>(Arrays.asList("monitor", "manage_ml", "manage_watcher")); - final Set conditionalCluster = Collections.singleton( - new ConfigurableClusterPrivileges.ManageApplicationPrivileges(new LinkedHashSet<>(Arrays.asList("app01", "app02"))) + final Set conditionalCluster = new LinkedHashSet<>( + Arrays.asList( + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges(new LinkedHashSet<>(Arrays.asList("app*"))), + new ConfigurableClusterPrivileges.ManageApplicationPrivileges(new LinkedHashSet<>(Arrays.asList("app01", "app02"))) + ) ); final Set index = new LinkedHashSet<>( Arrays.asList( @@ -114,6 +117,13 @@ public void testBuildResponse() throws Exception { { "cluster": [ "monitor", "manage_ml", "manage_watcher" ], "global": [ + { + "profile": { + "write": { + "applications": [ "app*" ] + } + } + }, { "application": { "manage": { diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/audit/logfile/audited_roles.txt b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/audit/logfile/audited_roles.txt index 2f2d0a167bcfc..7b5e24c97d65a 100644 --- a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/audit/logfile/audited_roles.txt +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/audit/logfile/audited_roles.txt @@ -7,4 +7,6 @@ role_descriptor2 role_descriptor3 {"cluster":[],"indices":[],"applications":[{"application":"maps","privileges":["{","}","\n","\\","\""],"resources":["raster:*"]},{"application":"maps","privileges":["*:*"],"resources":["noooooo!!\n\n\f\\\\r","{"]}],"run_as":["jack","nich*","//\""],"metadata":{"some meta":42}} role_descriptor4 -{"cluster":["manage_ml","grant_api_key","manage_rollup"],"global":{"application":{"manage":{"applications":["a+b+|b+a+"]}}},"indices":[{"names":["/. ? + * | { } [ ] ( ) \" \\/","*"],"privileges":["read","read_cross_cluster"],"field_security":{"grant":["almost","all*"],"except":["denied*"]}}],"applications":[],"run_as":["//+a+\"[a]/"],"metadata":{"?list":["e1","e2","*"],"some other meta":{"r":"t"}}} \ No newline at end of file +{"cluster":["manage_ml","grant_api_key","manage_rollup"],"global":{"application":{"manage":{"applications":["a+b+|b+a+"]}},"profile":{}},"indices":[{"names":["/. ? + * | { } [ ] ( ) \" \\/","*"],"privileges":["read","read_cross_cluster"],"field_security":{"grant":["almost","all*"],"except":["denied*"]}}],"applications":[],"run_as":["//+a+\"[a]/"],"metadata":{"?list":["e1","e2","*"],"some other meta":{"r":"t"}}} +role_descriptor5 +{"cluster":["all"],"global":{"application":{"manage":{"applications":["\""]}},"profile":{"write":{"applications":["","\""]}}},"indices":[],"applications":[],"run_as":["\"[a]/"]} diff --git a/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle b/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle index 23b825a8970b6..429b29bbc9fdb 100644 --- a/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle +++ b/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -60,9 +61,8 @@ BuildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> keystore 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' setting 'xpack.security.authc.api_key.enabled', 'true' - if (BuildParams.isSnapshotBuild() == false && bwcVersion.toString() == project.version) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } tasks.register("${baseName}#oldClusterTest", StandaloneRestIntegTestTask) { @@ -80,9 +80,6 @@ BuildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> useCluster baseCluster dependsOn "${baseName}#oldClusterTest" doFirst { - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } baseCluster.get().goToNextVersion() if (bwcVersion.before(BuildParams.bwcVersions.minimumWireCompatibleVersion)) { // When doing a full cluster restart of older versions we actually have to upgrade twice. First to 7.last, then to the current version. diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java index 22f4d9cc07eec..51f314e82fda7 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java @@ -245,8 +245,8 @@ public void testIgnoreMalformedValues() throws IOException { .startObject() .field( "field", - "POLYGON ((18.9401790919516 -33.9681188869036, 18.9401790919516 -33.9681188869037, 18.9401790919517 " - + "-33.9681188869037, 18.9401790919517 -33.9681188869036, 18.9401790919516 -33.9681188869036))" + "POLYGON ((18.9401790919516 -33.9681188869036, 18.9401790919516 -33.9681188869036, 18.9401790919517 " + + "-33.9681188869036, 18.9401790919517 -33.9681188869036, 18.9401790919516 -33.9681188869036))" ) .endObject() ); @@ -254,7 +254,7 @@ public void testIgnoreMalformedValues() throws IOException { ParsedDocument document = ignoreMapper.parse(sourceToParse); assertThat(document.docs().get(0).getFields("field").length, equalTo(0)); MapperParsingException exception = expectThrows(MapperParsingException.class, () -> failMapper.parse(sourceToParse)); - assertThat(exception.getCause().getMessage(), containsString("Cannot determine orientation")); + assertThat(exception.getCause().getMessage(), containsString("at least 4 polygon points required")); } } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoGridTilerTestCase.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoGridTilerTestCase.java index 15dd9383e5022..ce1cee90745a0 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoGridTilerTestCase.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoGridTilerTestCase.java @@ -145,6 +145,7 @@ public void testGeoGridSetValuesBoundingBoxes_coversAllLongitudeValues() throws assertThat(numBuckets, equalTo(expected)); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/84152") public void testGeoGridSetValuesBoundingBoxes_UnboundedGeoShapeCellValues() throws Exception { for (int i = 0; i < 1000; i++) { int precision = randomIntBetween(0, 3); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexAggregationBuilderTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexAggregationBuilderTests.java index dbe960087d91d..f72f2d2b61f70 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexAggregationBuilderTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexAggregationBuilderTests.java @@ -46,10 +46,10 @@ protected GeoHexGridAggregationBuilder createTestInstance() { geoHexGridAggregationBuilder.precision(randomIntBetween(0, H3.MAX_H3_RES)); } if (randomBoolean()) { - geoHexGridAggregationBuilder.size(randomIntBetween(0, 256 * 256)); + geoHexGridAggregationBuilder.size(randomIntBetween(1, 256 * 256)); } if (randomBoolean()) { - geoHexGridAggregationBuilder.shardSize(randomIntBetween(0, 256 * 256)); + geoHexGridAggregationBuilder.shardSize(randomIntBetween(1, 256 * 256)); } if (randomBoolean()) { geoHexGridAggregationBuilder.setGeoBoundingBox(GeoTestUtils.randomBBox()); diff --git a/x-pack/plugin/spatial/src/test/resources/rest-api-spec/test/50_geoline.yml b/x-pack/plugin/spatial/src/test/resources/rest-api-spec/test/50_geoline.yml index b2593f92290d4..ca4a0873c33c4 100644 --- a/x-pack/plugin/spatial/src/test/resources/rest-api-spec/test/50_geoline.yml +++ b/x-pack/plugin/spatial/src/test/resources/rest-api-spec/test/50_geoline.yml @@ -17,15 +17,15 @@ body: - index: _index: locations - _id: 1 + _id: "1" - '{"location": [13.37139831, 47.82930284], "rank": 2.0 }' - index: _index: locations - _id: 2 + _id: "2" - '{"location": [13.3784208402, 47.88832084022], "rank": 0.0 }' - index: _index: locations - _id: 3 + _id: "3" - '{"location": [13.371830148701, 48.2084200148], "rank": 1.2 }' - do: diff --git a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java index 34d91f69d8fd4..16bd33ca31d74 100644 --- a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java +++ b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java @@ -31,7 +31,7 @@ public abstract class JdbcIntegrationTestCase extends ESRestTestCase { @After public void checkSearchContent() throws IOException { - // Some context might linger due to fire and forget nature of scroll cleanup + // Some context might linger due to fire and forget nature of PIT cleanup assertNoSearchContexts(); } diff --git a/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java b/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java index 3cc5383defed8..5d93943f4223a 100644 --- a/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java +++ b/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentHelper; @@ -21,6 +22,7 @@ import org.elasticsearch.xpack.ql.TestNode; import org.elasticsearch.xpack.ql.TestNodes; import org.elasticsearch.xpack.sql.qa.rest.BaseRestSqlTestCase; +import org.hamcrest.Matchers; import org.junit.AfterClass; import org.junit.Before; @@ -111,8 +113,7 @@ private void testNullsOrderWithMissingOrderSupport(RestClient client) throws IOE assertNull(result.get(2)); } - @SuppressWarnings("unchecked") - private List runOrderByNullsLastQuery(RestClient queryClient) throws IOException { + private void indexDocs() throws IOException { Request putIndex = new Request("PUT", "/test"); putIndex.setJsonEntity(""" {"settings":{"index":{"number_of_shards":3}}}"""); @@ -124,17 +125,19 @@ private List runOrderByNullsLastQuery(RestClient queryClient) throws IO for (String doc : Arrays.asList("{\"int\":1,\"kw\":\"foo\"}", "{\"int\":2,\"kw\":\"bar\"}", "{\"kw\":\"bar\"}")) { bulk.append("{\"index\":{}}\n").append(doc).append("\n"); } + indexDocs.setJsonEntity(bulk.toString()); client().performRequest(indexDocs); + } + + @SuppressWarnings("unchecked") + private List runOrderByNullsLastQuery(RestClient queryClient) throws IOException { + indexDocs(); Request query = new Request("POST", "_sql"); query.setJsonEntity(sqlQueryEntityWithOptionalMode("SELECT int FROM test GROUP BY 1 ORDER BY 1 NULLS LAST", bwcVersion)); - Response queryResponse = queryClient.performRequest(query); - - assertEquals(200, queryResponse.getStatusLine().getStatusCode()); + Map result = performRequestAndReadBodyAsJson(queryClient, query); - InputStream content = queryResponse.getEntity().getContent(); - Map result = XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false); List> rows = (List>) result.get("rows"); return rows.stream().map(row -> (Integer) row.get(0)).collect(Collectors.toList()); } @@ -156,4 +159,42 @@ public static String sqlQueryEntityWithOptionalMode(String query, Version bwcVer return Strings.toString(json); } + public void testCursorFromOldNodeFailsOnNewNode() throws IOException { + assertCursorNotCompatibleAcrossVersions(bwcVersion, oldNodesClient, Version.CURRENT, newNodesClient); + } + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/83726") + public void testCursorFromNewNodeFailsOnOldNode() throws IOException { + assertCursorNotCompatibleAcrossVersions(Version.CURRENT, newNodesClient, bwcVersion, oldNodesClient); + } + + private void assertCursorNotCompatibleAcrossVersions(Version version1, RestClient client1, Version version2, RestClient client2) + throws IOException { + indexDocs(); + + Request req = new Request("POST", "_sql"); + // GROUP BY queries always return a cursor + req.setJsonEntity(sqlQueryEntityWithOptionalMode("SELECT int FROM test GROUP BY 1", bwcVersion)); + Map json = performRequestAndReadBodyAsJson(client1, req); + String cursor = (String) json.get("cursor"); + assertThat(cursor, Matchers.not(Matchers.emptyString())); + + Request scrollReq = new Request("POST", "_sql"); + scrollReq.setJsonEntity("{\"cursor\": \"%s\"}".formatted(cursor)); + ResponseException exception = expectThrows(ResponseException.class, () -> client2.performRequest(scrollReq)); + + assertThat( + exception.getMessage(), + Matchers.containsString("Unsupported cursor version [" + version1 + "], expected [" + version2 + "]") + ); + } + + private Map performRequestAndReadBodyAsJson(RestClient client, Request request) throws IOException { + Response response = client.performRequest(request); + assertEquals(200, response.getStatusLine().getStatusCode()); + try (InputStream content = response.getEntity().getContent()) { + return XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false); + } + } + } diff --git a/x-pack/plugin/sql/qa/server/multi-node/src/test/java/org/elasticsearch/xpack/sql/qa/multi_node/CliLenientIT.java b/x-pack/plugin/sql/qa/server/multi-node/src/test/java/org/elasticsearch/xpack/sql/qa/multi_node/CliLenientIT.java new file mode 100644 index 0000000000000..fc4a04570ff67 --- /dev/null +++ b/x-pack/plugin/sql/qa/server/multi-node/src/test/java/org/elasticsearch/xpack/sql/qa/multi_node/CliLenientIT.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.sql.qa.multi_node; + +import org.elasticsearch.xpack.sql.qa.cli.LenientTestCase; + +public class CliLenientIT extends LenientTestCase {} diff --git a/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/CliLenientIT.java b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/CliLenientIT.java new file mode 100644 index 0000000000000..87e056baa6751 --- /dev/null +++ b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/CliLenientIT.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.sql.qa.security; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.sql.qa.cli.EmbeddedCli.SecurityConfig; +import org.elasticsearch.xpack.sql.qa.cli.LenientTestCase; + +public class CliLenientIT extends LenientTestCase { + @Override + protected Settings restClientSettings() { + return RestSqlIT.securitySettings(); + } + + @Override + protected String getProtocol() { + return RestSqlIT.SSL_ENABLED ? "https" : "http"; + } + + @Override + protected SecurityConfig securityConfig() { + return CliSecurityIT.adminSecurityConfig(); + } +} diff --git a/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java index b27d6c2138a01..ab7b594e10b21 100644 --- a/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java +++ b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java @@ -281,18 +281,27 @@ protected AuditLogAsserter createAuditLogAsserter() { } /** - * Test the hijacking a scroll fails. This test is only implemented for - * REST because it is the only API where it is simple to hijack a scroll. + * Test the hijacking a cursor fails. This test is only implemented for + * REST because it is the only API where it is simple to hijack a cursor. * It should exercise the same code as the other APIs but if we were truly * paranoid we'd hack together something to test the others as well. */ - public void testHijackScrollFails() throws Exception { - createUser("full_access", "rest_minimal"); + public void testHijackCursorFails() throws Exception { + createUser("no_read", "read_nothing"); final String mode = randomMode(); + final String query = randomFrom( + List.of( + "SELECT * FROM test", + "SELECT a FROM test GROUP BY a", + "SELECT MAX(a) FROM test GROUP BY a ORDER BY 1", + "SHOW COLUMNS IN test" + ) + ); + Map adminResponse = RestActions.runSql( null, - new StringEntity(query("SELECT * FROM test").mode(mode).fetchSize(1).toString(), ContentType.APPLICATION_JSON), + new StringEntity(query(query).mode(mode).fetchSize(1).toString(), ContentType.APPLICATION_JSON), mode, false ); @@ -303,20 +312,18 @@ public void testHijackScrollFails() throws Exception { ResponseException e = expectThrows( ResponseException.class, () -> RestActions.runSql( - "full_access", + "no_read", new StringEntity(cursor(cursor).mode(mode).toString(), ContentType.APPLICATION_JSON), mode, false ) ); - // TODO return a better error message for bad scrolls - assertThat(e.getMessage(), containsString("No search context found for id")); - assertEquals(404, e.getResponse().getStatusLine().getStatusCode()); + + assertThat(e.getMessage(), containsString("is unauthorized for user")); + assertEquals(403, e.getResponse().getStatusLine().getStatusCode()); createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test") - .expect(true, SQL_ACTION_NAME, "full_access", empty()) - // one scroll access denied per shard - .expect("access_denied", SQL_ACTION_NAME, "full_access", "default_native", empty(), "InternalScrollSearchRequest") + .expect("access_denied", SQL_ACTION_NAME, "no_read", "default_native", empty(), "SqlQueryRequest") .assertLogs(); } diff --git a/x-pack/plugin/sql/qa/server/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java b/x-pack/plugin/sql/qa/server/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java new file mode 100644 index 0000000000000..afcfca0a01ed2 --- /dev/null +++ b/x-pack/plugin/sql/qa/server/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.sql.qa.single_node; + +import org.elasticsearch.xpack.sql.qa.cli.LenientTestCase; + +public class CliLenientIT extends LenientTestCase {} diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/LenientTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/LenientTestCase.java new file mode 100644 index 0000000000000..76f84541e5bb9 --- /dev/null +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/LenientTestCase.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.sql.qa.cli; + +import org.elasticsearch.test.hamcrest.RegexMatcher; + +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; + +public abstract class LenientTestCase extends CliIntegrationTestCase { + + public void testLenientCommand() throws IOException { + index("test", body -> body.field("name", "foo").field("tags", new String[] { "bar", "bar" })); + assertEquals("[?1l>[?1000l[?2004llenient set to [90mtrue[0m", command("lenient = true")); + assertThat(command("SELECT * FROM test"), RegexMatcher.matches("\\s*name\\s*\\|\\s*tags\\s*")); + assertThat(readLine(), containsString("----------")); + assertThat(readLine(), RegexMatcher.matches("\\s*foo\\s*\\|\\s*bar\\s*")); + assertEquals("", readLine()); + } + + public void testDefaultNoLenient() throws IOException { + index("test", body -> body.field("name", "foo").field("tags", new String[] { "bar", "bar" })); + assertThat( + command("SELECT * FROM test"), + containsString("Server encountered an error [Arrays (returned by [tags]) are not supported]") + ); + while ("][23;31;1m][0m".equals(readLine()) == false) + ; // clean console to avoid failures on shutdown + } + + public void testExplicitNoLenient() throws IOException { + index("test", body -> body.field("name", "foo").field("tags", new String[] { "bar", "bar" })); + assertEquals("[?1l>[?1000l[?2004llenient set to [90mfalse[0m", command("lenient = false")); + assertThat( + command("SELECT * FROM test"), + containsString("Server encountered an error [Arrays (returned by [tags]) are not supported]") + ); + while ("][23;31;1m][0m".equals(readLine()) == false) + ; // clean console to avoid failures on shutdown + } +} diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java index 900e257e9d56f..2f7bcb71a79f4 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java @@ -31,7 +31,7 @@ public abstract class JdbcIntegrationTestCase extends RemoteClusterAwareSqlRestT @After public void checkSearchContent() throws Exception { - // Some context might linger due to fire and forget nature of scroll cleanup + // Some context might linger due to fire and forget nature of PIT cleanup assertNoSearchContexts(provisioningClient()); } diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java index c5121168116e2..8a236202fbec7 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java @@ -49,6 +49,8 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.IntStream; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; @@ -74,6 +76,7 @@ import static org.elasticsearch.xpack.sql.proto.CoreProtocol.URL_PARAM_FORMAT; import static org.elasticsearch.xpack.sql.proto.CoreProtocol.WAIT_FOR_COMPLETION_TIMEOUT_NAME; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.lessThan; /** * Integration test for the rest sql action. The one that speaks json directly to a @@ -251,6 +254,7 @@ public void testNextPageWithDatetimeAndTimezoneParam() throws IOException { expected.put("columns", singletonList(columnInfo(mode, "tz", "integer", JDBCType.INTEGER, 11))); response = runSql(new StringEntity(sqlRequest, ContentType.APPLICATION_JSON), "", mode); } else { + assertNotNull(cursor); response = runSql( new StringEntity(cursor(cursor).mode(mode).toString(), ContentType.APPLICATION_JSON), StringUtils.EMPTY, @@ -267,16 +271,12 @@ public void testNextPageWithDatetimeAndTimezoneParam() throws IOException { ); } expected.put("rows", values); + assertTrue(response.containsKey("cursor") == false || response.get("cursor") != null); cursor = (String) response.remove("cursor"); assertResponse(expected, response); - assertNotNull(cursor); } - Map expected = new HashMap<>(); - expected.put("rows", emptyList()); - assertResponse( - expected, - runSql(new StringEntity(cursor(cursor).mode(mode).toString(), ContentType.APPLICATION_JSON), StringUtils.EMPTY, mode) - ); + + assertNull(cursor); deleteIndex("test_date_timezone"); } @@ -1179,7 +1179,7 @@ private void executeQueryWithNextPage(String format, String expectedHeader, Stri .toString(); String cursor = null; - for (int i = 0; i < 20; i += 2) { + for (int i = 0; i <= 20; i += 2) { Tuple response; if (i == 0) { response = runSqlAsText(StringUtils.EMPTY, new StringEntity(request, ContentType.APPLICATION_JSON), format); @@ -1198,25 +1198,17 @@ private void executeQueryWithNextPage(String format, String expectedHeader, Stri expected.append("---------------+---------------+---------------\n"); } } - expected.append(String.format(Locale.ROOT, expectedLineFormat, "text" + i, i, i + 5)); - expected.append(String.format(Locale.ROOT, expectedLineFormat, "text" + (i + 1), i + 1, i + 6)); + cursor = response.v2(); - assertEquals(expected.toString(), response.v1()); - assertNotNull(cursor); + if (i < 20) { + expected.append(String.format(Locale.ROOT, expectedLineFormat, "text" + i, i, i + 5)); + expected.append(String.format(Locale.ROOT, expectedLineFormat, "text" + (i + 1), i + 1, i + 6)); + assertEquals(expected.toString(), response.v1()); + assertNotNull(cursor); + } else { + assertNull(cursor); + } } - Map expected = new HashMap<>(); - expected.put("rows", emptyList()); - assertResponse( - expected, - runSql(new StringEntity(cursor(cursor).toString(), ContentType.APPLICATION_JSON), StringUtils.EMPTY, Mode.PLAIN.toString()) - ); - - Map response = runSql( - new StringEntity(cursor(cursor).toString(), ContentType.APPLICATION_JSON), - "/close", - Mode.PLAIN.toString() - ); - assertEquals(true, response.get("succeeded")); assertEquals(0, getNumberOfSearchContexts(provisioningClient(), "test")); } @@ -1433,6 +1425,19 @@ public void testAsyncTextPaginated() throws IOException, InterruptedException { } } + public void testCompressCursor() throws IOException { + String doc = IntStream.range(0, 1000) + .mapToObj(i -> String.format(Locale.ROOT, "\"field%d\": %d", i, i)) + .collect(Collectors.joining(",")); + index("{" + doc + "}"); + + String mode = randomMode(); + Map resp = toMap(runSql(query("SHOW COLUMNS FROM " + indexPattern("test")).fetchSize(1).mode(mode)), mode); + + // without compression, the cursor is at least * 1000 bytes (in fact it is ~35kb) + assertThat(resp.get("cursor").toString().length(), lessThan(5000)); + } + static Map runSql(RequestObjectBuilder builder, String mode) throws IOException { return toMap(runSql(builder.mode(mode)), mode); } diff --git a/x-pack/plugin/sql/sql-cli/build.gradle b/x-pack/plugin/sql/sql-cli/build.gradle index 426408950b890..46fc6cd554384 100644 --- a/x-pack/plugin/sql/sql-cli/build.gradle +++ b/x-pack/plugin/sql/sql-cli/build.gradle @@ -54,7 +54,7 @@ tasks.register("runcli") { dependsOn "shadowJar" doLast { List command = ["${BuildParams.runtimeJavaHome}/bin/java"] - if ('true'.equals(providers.systemProperty('debug').orElse('false').forUseAtConfigurationTime().get())) { + if ('true'.equals(providers.systemProperty('debug').orElse('false').get())) { command += '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000' } command += ['-jar', shadowJar.archiveFile.get().asFile.absolutePath] diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java index 8ccc079860937..97d5bcc3da927 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.sql.cli.command.CliSession; import org.elasticsearch.xpack.sql.cli.command.FetchSeparatorCliCommand; import org.elasticsearch.xpack.sql.cli.command.FetchSizeCliCommand; +import org.elasticsearch.xpack.sql.cli.command.LenientCliCommand; import org.elasticsearch.xpack.sql.cli.command.PrintLogoCommand; import org.elasticsearch.xpack.sql.cli.command.ServerInfoCliCommand; import org.elasticsearch.xpack.sql.cli.command.ServerQueryCliCommand; @@ -128,6 +129,7 @@ private void execute(String uri, boolean debug, boolean binary, String keystoreL new PrintLogoCommand(), new ClearScreenCliCommand(), new FetchSizeCliCommand(), + new LenientCliCommand(), new FetchSeparatorCliCommand(), new ServerInfoCliCommand(), new ServerQueryCliCommand() @@ -136,7 +138,7 @@ private void execute(String uri, boolean debug, boolean binary, String keystoreL ConnectionBuilder connectionBuilder = new ConnectionBuilder(cliTerminal); ConnectionConfiguration con = connectionBuilder.buildConnection(uri, keystoreLocation, binary); CliSession cliSession = new CliSession(new HttpClient(con)); - cliSession.setDebug(debug); + cliSession.cfg().setDebug(debug); if (checkConnection) { checkConnection(cliSession, cliTerminal, con); } @@ -150,7 +152,7 @@ private void checkConnection(CliSession cliSession, CliTerminal cliTerminal, Con try { cliSession.checkConnection(); } catch (ClientException ex) { - if (cliSession.isDebug()) { + if (cliSession.cfg().isDebug()) { cliTerminal.error("Client Exception", ex.getMessage()); cliTerminal.println(); cliTerminal.printStackTrace(ex); diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/AbstractServerCliCommand.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/AbstractServerCliCommand.java index a3ede76da53a7..89f8a71ca9f5c 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/AbstractServerCliCommand.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/AbstractServerCliCommand.java @@ -34,7 +34,7 @@ protected void handleExceptionWhileCommunicatingWithServer(CliTerminal terminal, .param(e.getMessage() == null ? e.getClass().getName() : e.getMessage()) .error("]") .ln(); - if (cliSession.isDebug()) { + if (cliSession.cfg().isDebug()) { terminal.printStackTrace(e); } } diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java index 34502aab9db3f..b48c4b84cd0cf 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java @@ -9,7 +9,6 @@ import org.elasticsearch.xpack.sql.client.ClientException; import org.elasticsearch.xpack.sql.client.ClientVersion; import org.elasticsearch.xpack.sql.client.HttpClient; -import org.elasticsearch.xpack.sql.proto.CoreProtocol; import org.elasticsearch.xpack.sql.proto.MainResponse; import org.elasticsearch.xpack.sql.proto.SqlVersion; @@ -20,52 +19,19 @@ */ public class CliSession { private final HttpClient httpClient; - private int fetchSize = CoreProtocol.FETCH_SIZE; - private String fetchSeparator = ""; - private boolean debug; - private boolean binary; + private final CliSessionConfiguration configuration; public CliSession(HttpClient httpClient) { this.httpClient = httpClient; + this.configuration = new CliSessionConfiguration(); } public HttpClient getClient() { return httpClient; } - public void setFetchSize(int fetchSize) { - if (fetchSize <= 0) { - throw new IllegalArgumentException("Must be > 0."); - } - this.fetchSize = fetchSize; - } - - public int getFetchSize() { - return fetchSize; - } - - public void setFetchSeparator(String fetchSeparator) { - this.fetchSeparator = fetchSeparator; - } - - public String getFetchSeparator() { - return fetchSeparator; - } - - public void setDebug(boolean debug) { - this.debug = debug; - } - - public boolean isDebug() { - return debug; - } - - public void setBinary(boolean binary) { - this.binary = binary; - } - - public boolean isBinary() { - return binary; + public CliSessionConfiguration cfg() { + return configuration; } public void checkConnection() throws ClientException { diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSessionConfiguration.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSessionConfiguration.java new file mode 100644 index 0000000000000..4507d36946bde --- /dev/null +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSessionConfiguration.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.sql.cli.command; + +import org.elasticsearch.xpack.sql.proto.CoreProtocol; + +/** + * Configuration for CLI session + */ +public class CliSessionConfiguration { + private int fetchSize; + private String fetchSeparator = ""; + private boolean debug; + private boolean lenient; + + public CliSessionConfiguration() { + this.fetchSize = CoreProtocol.FETCH_SIZE; + this.lenient = CoreProtocol.FIELD_MULTI_VALUE_LENIENCY; + } + + public void setFetchSize(int fetchSize) { + if (fetchSize <= 0) { + throw new IllegalArgumentException("Must be > 0."); + } + this.fetchSize = fetchSize; + } + + public int getFetchSize() { + return fetchSize; + } + + public void setFetchSeparator(String fetchSeparator) { + this.fetchSeparator = fetchSeparator; + } + + public String getFetchSeparator() { + return fetchSeparator; + } + + public void setDebug(boolean debug) { + this.debug = debug; + } + + public boolean isDebug() { + return debug; + } + + public boolean isLenient() { + return lenient; + } + + public void setLenient(boolean lenient) { + this.lenient = lenient; + } +} diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSeparatorCliCommand.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSeparatorCliCommand.java index bd07a5b9f04e2..efb6c9c054775 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSeparatorCliCommand.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSeparatorCliCommand.java @@ -22,8 +22,8 @@ public FetchSeparatorCliCommand() { @Override protected boolean doHandle(CliTerminal terminal, CliSession cliSession, Matcher m, String line) { - cliSession.setFetchSeparator(m.group(1)); - terminal.line().text("fetch separator set to \"").em(cliSession.getFetchSeparator()).text("\"").end(); + cliSession.cfg().setFetchSeparator(m.group(1)); + terminal.line().text("fetch separator set to \"").em(cliSession.cfg().getFetchSeparator()).text("\"").end(); return true; } } diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSizeCliCommand.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSizeCliCommand.java index c4b3f1aeeb0ae..f17b3c469aa2d 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSizeCliCommand.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSizeCliCommand.java @@ -23,7 +23,7 @@ public FetchSizeCliCommand() { @Override protected boolean doHandle(CliTerminal terminal, CliSession cliSession, Matcher m, String line) { try { - cliSession.setFetchSize(Integer.parseInt(m.group(1))); + cliSession.cfg().setFetchSize(Integer.parseInt(m.group(1))); } catch (NumberFormatException e) { terminal.line().error("Invalid fetch size [").param(m.group(1)).error("]").end(); return true; @@ -31,7 +31,7 @@ protected boolean doHandle(CliTerminal terminal, CliSession cliSession, Matcher terminal.line().error("Invalid fetch size [").param(m.group(1)).error("]. " + e.getMessage()).end(); return true; } - terminal.line().text("fetch size set to ").em(Integer.toString(cliSession.getFetchSize())).end(); + terminal.line().text("fetch size set to ").em(Integer.toString(cliSession.cfg().getFetchSize())).end(); return true; } } diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/LenientCliCommand.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/LenientCliCommand.java new file mode 100644 index 0000000000000..fd285a35c96e5 --- /dev/null +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/LenientCliCommand.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.sql.cli.command; + +import org.elasticsearch.xpack.sql.cli.CliTerminal; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * lenient command, enables/disables fields multi-value leniency. + * ie. with lenient = true, in case of array values, return the first value, with no guarantee of consistent results. + * + */ +public class LenientCliCommand extends AbstractCliCommand { + + public LenientCliCommand() { + super(Pattern.compile("lenient *= *(.+)", Pattern.CASE_INSENSITIVE)); + } + + @Override + protected boolean doHandle(CliTerminal terminal, CliSession cliSession, Matcher m, String line) { + cliSession.cfg().setLenient(Boolean.parseBoolean(m.group(1))); + terminal.line().text("lenient set to ").em(Boolean.toString(cliSession.cfg().isLenient())).end(); + return true; + } +} diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java index 1d929ed7708b4..ae582837b2e9f 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java @@ -26,7 +26,7 @@ protected boolean doHandle(CliTerminal terminal, CliSession cliSession, String l SimpleFormatter formatter; String data; try { - response = cliClient.basicQuery(line, cliSession.getFetchSize()); + response = cliClient.basicQuery(line, cliSession.cfg().getFetchSize(), cliSession.cfg().isLenient()); formatter = new SimpleFormatter(response.columns(), response.rows(), CLI); data = formatter.formatWithHeader(response.columns(), response.rows()); while (true) { @@ -36,8 +36,8 @@ protected boolean doHandle(CliTerminal terminal, CliSession cliSession, String l terminal.flush(); return true; } - if (false == cliSession.getFetchSeparator().equals("")) { - terminal.println(cliSession.getFetchSeparator()); + if (false == cliSession.cfg().getFetchSeparator().equals("")) { + terminal.println(cliSession.cfg().getFetchSeparator()); } response = cliSession.getClient().nextPage(response.cursor()); data = formatter.formatWithoutHeader(response.rows()); diff --git a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/BuiltinCommandTests.java b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/BuiltinCommandTests.java index 6c935885662a4..0d809f940c820 100644 --- a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/BuiltinCommandTests.java +++ b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/BuiltinCommandTests.java @@ -44,20 +44,20 @@ public void testFetchSeparator() throws Exception { CliSession cliSession = new CliSession(httpClient); FetchSeparatorCliCommand cliCommand = new FetchSeparatorCliCommand(); assertFalse(cliCommand.handle(testTerminal, cliSession, "fetch")); - assertEquals("", cliSession.getFetchSeparator()); + assertEquals("", cliSession.cfg().getFetchSeparator()); assertTrue(cliCommand.handle(testTerminal, cliSession, "fetch_separator = \"foo\"")); - assertEquals("foo", cliSession.getFetchSeparator()); + assertEquals("foo", cliSession.cfg().getFetchSeparator()); assertEquals("fetch separator set to \"foo\"", testTerminal.toString()); testTerminal.clear(); assertTrue(cliCommand.handle(testTerminal, cliSession, "fetch_separator=\"bar\"")); - assertEquals("bar", cliSession.getFetchSeparator()); + assertEquals("bar", cliSession.cfg().getFetchSeparator()); assertEquals("fetch separator set to \"bar\"", testTerminal.toString()); testTerminal.clear(); assertTrue(cliCommand.handle(testTerminal, cliSession, "fetch separator=\"baz\"")); - assertEquals("baz", cliSession.getFetchSeparator()); + assertEquals("baz", cliSession.cfg().getFetchSeparator()); assertEquals("fetch separator set to \"baz\"", testTerminal.toString()); verifyNoMoreInteractions(httpClient); } @@ -68,21 +68,21 @@ public void testFetchSize() throws Exception { CliSession cliSession = new CliSession(httpClient); FetchSizeCliCommand cliCommand = new FetchSizeCliCommand(); assertFalse(cliCommand.handle(testTerminal, cliSession, "fetch")); - assertEquals(1000L, cliSession.getFetchSize()); + assertEquals(1000L, cliSession.cfg().getFetchSize()); assertTrue(cliCommand.handle(testTerminal, cliSession, "fetch_size = \"foo\"")); - assertEquals(1000L, cliSession.getFetchSize()); + assertEquals(1000L, cliSession.cfg().getFetchSize()); assertEquals("Invalid fetch size [\"foo\"]", testTerminal.toString()); testTerminal.clear(); assertTrue(cliCommand.handle(testTerminal, cliSession, "fetch_size = 10")); - assertEquals(10L, cliSession.getFetchSize()); + assertEquals(10L, cliSession.cfg().getFetchSize()); assertEquals("fetch size set to 10", testTerminal.toString()); testTerminal.clear(); assertTrue(cliCommand.handle(testTerminal, cliSession, "fetch_size = -10")); - assertEquals(10L, cliSession.getFetchSize()); + assertEquals(10L, cliSession.cfg().getFetchSize()); assertEquals("Invalid fetch size [-10]. Must be > 0.", testTerminal.toString()); verifyNoMoreInteractions(httpClient); } @@ -98,4 +98,21 @@ public void testPrintLogo() throws Exception { verifyNoMoreInteractions(httpClient); } + public void testLenient() { + TestTerminal testTerminal = new TestTerminal(); + HttpClient httpClient = mock(HttpClient.class); + CliSession cliSession = new CliSession(httpClient); + LenientCliCommand cliCommand = new LenientCliCommand(); + assertFalse(cliCommand.handle(testTerminal, cliSession, "lenient")); + assertEquals(false, cliSession.cfg().isLenient()); + assertTrue(cliCommand.handle(testTerminal, cliSession, "lenient = true")); + assertEquals(true, cliSession.cfg().isLenient()); + assertEquals("lenient set to true", testTerminal.toString()); + testTerminal.clear(); + assertTrue(cliCommand.handle(testTerminal, cliSession, "lenient = false")); + assertEquals(false, cliSession.cfg().isLenient()); + assertEquals("lenient set to false", testTerminal.toString()); + testTerminal.clear(); + } + } diff --git a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommandTests.java b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommandTests.java index e005e9f668ff9..bc1eb75bd9a76 100644 --- a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommandTests.java +++ b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommandTests.java @@ -32,11 +32,11 @@ public void testExceptionHandling() throws Exception { TestTerminal testTerminal = new TestTerminal(); HttpClient client = mock(HttpClient.class); CliSession cliSession = new CliSession(client); - when(client.basicQuery("blah", 1000)).thenThrow(new SQLException("test exception")); + when(client.basicQuery("blah", 1000, false)).thenThrow(new SQLException("test exception")); ServerQueryCliCommand cliCommand = new ServerQueryCliCommand(); assertTrue(cliCommand.handle(testTerminal, cliSession, "blah")); assertEquals("Bad request [test exception]\n", testTerminal.toString()); - verify(client, times(1)).basicQuery(eq("blah"), eq(1000)); + verify(client, times(1)).basicQuery(eq("blah"), eq(1000), eq(false)); verifyNoMoreInteractions(client); } @@ -44,8 +44,8 @@ public void testOnePageQuery() throws Exception { TestTerminal testTerminal = new TestTerminal(); HttpClient client = mock(HttpClient.class); CliSession cliSession = new CliSession(client); - cliSession.setFetchSize(10); - when(client.basicQuery("test query", 10)).thenReturn(fakeResponse("", true, "foo")); + cliSession.cfg().setFetchSize(10); + when(client.basicQuery("test query", 10, false)).thenReturn(fakeResponse("", true, "foo")); ServerQueryCliCommand cliCommand = new ServerQueryCliCommand(); assertTrue(cliCommand.handle(testTerminal, cliSession, "test query")); assertEquals(""" @@ -53,7 +53,7 @@ public void testOnePageQuery() throws Exception { --------------- foo \s """, testTerminal.toString()); - verify(client, times(1)).basicQuery(eq("test query"), eq(10)); + verify(client, times(1)).basicQuery(eq("test query"), eq(10), eq(false)); verifyNoMoreInteractions(client); } @@ -61,8 +61,8 @@ public void testThreePageQuery() throws Exception { TestTerminal testTerminal = new TestTerminal(); HttpClient client = mock(HttpClient.class); CliSession cliSession = new CliSession(client); - cliSession.setFetchSize(10); - when(client.basicQuery("test query", 10)).thenReturn(fakeResponse("my_cursor1", true, "first")); + cliSession.cfg().setFetchSize(10); + when(client.basicQuery("test query", 10, false)).thenReturn(fakeResponse("my_cursor1", true, "first")); when(client.nextPage("my_cursor1")).thenReturn(fakeResponse("my_cursor2", false, "second")); when(client.nextPage("my_cursor2")).thenReturn(fakeResponse("", false, "third")); ServerQueryCliCommand cliCommand = new ServerQueryCliCommand(); @@ -74,7 +74,7 @@ public void testThreePageQuery() throws Exception { second \s third \s """, testTerminal.toString()); - verify(client, times(1)).basicQuery(eq("test query"), eq(10)); + verify(client, times(1)).basicQuery(eq("test query"), eq(10), eq(false)); verify(client, times(2)).nextPage(any()); verifyNoMoreInteractions(client); } @@ -83,10 +83,10 @@ public void testTwoPageQueryWithSeparator() throws Exception { TestTerminal testTerminal = new TestTerminal(); HttpClient client = mock(HttpClient.class); CliSession cliSession = new CliSession(client); - cliSession.setFetchSize(15); + cliSession.cfg().setFetchSize(15); // Set a separator - cliSession.setFetchSeparator("-----"); - when(client.basicQuery("test query", 15)).thenReturn(fakeResponse("my_cursor1", true, "first")); + cliSession.cfg().setFetchSeparator("-----"); + when(client.basicQuery("test query", 15, false)).thenReturn(fakeResponse("my_cursor1", true, "first")); when(client.nextPage("my_cursor1")).thenReturn(fakeResponse("", false, "second")); ServerQueryCliCommand cliCommand = new ServerQueryCliCommand(); assertTrue(cliCommand.handle(testTerminal, cliSession, "test query")); @@ -97,7 +97,7 @@ public void testTwoPageQueryWithSeparator() throws Exception { ----- second \s """, testTerminal.toString()); - verify(client, times(1)).basicQuery(eq("test query"), eq(15)); + verify(client, times(1)).basicQuery(eq("test query"), eq(15), eq(false)); verify(client, times(1)).nextPage(any()); verifyNoMoreInteractions(client); } @@ -106,8 +106,8 @@ public void testCursorCleanupOnError() throws Exception { TestTerminal testTerminal = new TestTerminal(); HttpClient client = mock(HttpClient.class); CliSession cliSession = new CliSession(client); - cliSession.setFetchSize(15); - when(client.basicQuery("test query", 15)).thenReturn(fakeResponse("my_cursor1", true, "first")); + cliSession.cfg().setFetchSize(15); + when(client.basicQuery("test query", 15, false)).thenReturn(fakeResponse("my_cursor1", true, "first")); when(client.nextPage("my_cursor1")).thenThrow(new SQLException("test exception")); when(client.queryClose("my_cursor1", Mode.CLI)).thenReturn(true); ServerQueryCliCommand cliCommand = new ServerQueryCliCommand(); @@ -118,7 +118,7 @@ public void testCursorCleanupOnError() throws Exception { first \s Bad request [test exception] """, testTerminal.toString()); - verify(client, times(1)).basicQuery(eq("test query"), eq(15)); + verify(client, times(1)).basicQuery(eq("test query"), eq(15), eq(false)); verify(client, times(1)).nextPage(any()); verify(client, times(1)).queryClose(eq("my_cursor1"), eq(Mode.CLI)); verifyNoMoreInteractions(client); diff --git a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java index 8c14a8008540c..d3784b70a00e2 100644 --- a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java @@ -61,6 +61,10 @@ public MainResponse serverInfo() throws SQLException { } public SqlQueryResponse basicQuery(String query, int fetchSize) throws SQLException { + return basicQuery(query, fetchSize, CoreProtocol.FIELD_MULTI_VALUE_LENIENCY); + } + + public SqlQueryResponse basicQuery(String query, int fetchSize, boolean fieldMultiValueLeniency) throws SQLException { // TODO allow customizing the time zone - this is what session set/reset/get should be about // method called only from CLI SqlQueryRequest sqlRequest = new SqlQueryRequest( @@ -74,7 +78,7 @@ public SqlQueryResponse basicQuery(String query, int fetchSize) throws SQLExcept Boolean.FALSE, null, new RequestInfo(Mode.CLI, ClientVersion.CURRENT), - false, + fieldMultiValueLeniency, false, cfg.binaryCommunication() ); diff --git a/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/HttpClientRequestTests.java b/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/HttpClientRequestTests.java index 6b4648702fb0f..6ff8fc6946856 100644 --- a/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/HttpClientRequestTests.java +++ b/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/HttpClientRequestTests.java @@ -106,7 +106,7 @@ private void assertBinaryRequestForCLI(XContentType xContentType) throws URISynt prepareMockResponse(); try { - httpClient.basicQuery(query, fetchSize); + httpClient.basicQuery(query, fetchSize, randomBoolean()); } catch (SQLException e) { logger.info("Ignored SQLException", e); } diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java index 51cbe5f4ca215..1ef55fc6d911e 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java @@ -9,7 +9,6 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancelledException; import org.junit.After; @@ -24,7 +23,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.greaterThan; public class SqlCancellationIT extends AbstractSqlBlockingIntegTestCase { @@ -91,18 +90,13 @@ public void testCancellation() throws Exception { disableBlocks(plugins); Exception exception = expectThrows(Exception.class, future::get); - Throwable inner = ExceptionsHelper.unwrap(exception, SearchPhaseExecutionException.class); + assertNotNull(ExceptionsHelper.unwrap(exception, TaskCancelledException.class)); if (cancelDuringSearch) { // Make sure we cancelled inside search - assertNotNull(inner); - assertThat(inner, instanceOf(SearchPhaseExecutionException.class)); - assertThat(inner.getCause(), instanceOf(TaskCancelledException.class)); + assertThat(getNumberOfContexts(plugins), greaterThan(0)); } else { // Make sure we were not cancelled inside search - assertNull(inner); assertThat(getNumberOfContexts(plugins), equalTo(0)); - Throwable cancellationException = ExceptionsHelper.unwrap(exception, TaskCancelledException.class); - assertNotNull(cancellationException); } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamInput.java index a013845dc2fd5..627072da48252 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamInput.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.sql.common.io; import org.elasticsearch.Version; +import org.elasticsearch.common.compress.CompressorFactory; +import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -23,22 +25,25 @@ */ public class SqlStreamInput extends NamedWriteableAwareStreamInput { - private final ZoneId zoneId; + public static SqlStreamInput fromString(String base64encoded, NamedWriteableRegistry namedWriteableRegistry, Version version) + throws IOException { + byte[] bytes = Base64.getDecoder().decode(base64encoded); + StreamInput in = StreamInput.wrap(bytes); + Version inVersion = Version.readVersion(in); + if (version.compareTo(inVersion) != 0) { + throw new SqlIllegalArgumentException("Unsupported cursor version [{}], expected [{}]", inVersion, version); + } - public SqlStreamInput(String base64encoded, NamedWriteableRegistry namedWriteableRegistry, Version version) throws IOException { - this(Base64.getDecoder().decode(base64encoded), namedWriteableRegistry, version); + InputStreamStreamInput uncompressingIn = new InputStreamStreamInput(CompressorFactory.COMPRESSOR.threadLocalInputStream(in)); + return new SqlStreamInput(uncompressingIn, namedWriteableRegistry, inVersion); } - public SqlStreamInput(byte[] input, NamedWriteableRegistry namedWriteableRegistry, Version version) throws IOException { - super(StreamInput.wrap(input), namedWriteableRegistry); + private final ZoneId zoneId; + + private SqlStreamInput(StreamInput input, NamedWriteableRegistry namedWriteableRegistry, Version version) throws IOException { + super(input, namedWriteableRegistry); - // version check first - Version ver = Version.readVersion(delegate); - if (version.compareTo(ver) != 0) { - throw new SqlIllegalArgumentException("Unsupported cursor version [{}], expected [{}]", ver, version); - } delegate.setVersion(version); - // configuration settings zoneId = delegate.readZoneId(); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamOutput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamOutput.java index 4ef5f67a7c4c2..5bfd829ee2db0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamOutput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamOutput.java @@ -8,35 +8,47 @@ package org.elasticsearch.xpack.sql.common.io; import org.elasticsearch.Version; +import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; +import org.elasticsearch.common.io.stream.StreamOutput; import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.time.ZoneId; import java.util.Base64; +/** + * Output stream for writing SQL cursors. The output is compressed if it would become larger than {@code compressionThreshold} + * bytes otherwise (see {@code DEFAULT_COMPRESSION_THRESHOLD}). + * + * The wire format is {@code version compressedPayload}. + */ public class SqlStreamOutput extends OutputStreamStreamOutput { private final ByteArrayOutputStream bytes; - public SqlStreamOutput(Version version, ZoneId zoneId) throws IOException { - this(new ByteArrayOutputStream(), version, zoneId); + public static SqlStreamOutput create(Version version, ZoneId zoneId) throws IOException { + ByteArrayOutputStream bytes = new ByteArrayOutputStream(); + StreamOutput uncompressedOut = new OutputStreamStreamOutput(Base64.getEncoder().wrap(bytes)); + Version.writeVersion(version, uncompressedOut); + OutputStream out = CompressorFactory.COMPRESSOR.threadLocalOutputStream(uncompressedOut); + return new SqlStreamOutput(bytes, out, version, zoneId); } - private SqlStreamOutput(ByteArrayOutputStream bytes, Version version, ZoneId zoneId) throws IOException { - super(Base64.getEncoder().wrap(new OutputStreamStreamOutput(bytes))); + private SqlStreamOutput(ByteArrayOutputStream bytes, OutputStream out, Version version, ZoneId zoneId) throws IOException { + super(out); this.bytes = bytes; - - Version.writeVersion(version, this); - writeZoneId(zoneId); + super.setVersion(version); + this.writeZoneId(zoneId); } /** * Should be called _after_ closing the stream - there are no guarantees otherwise. */ - public String streamAsString() { - // Base64 uses this encoding instead of UTF-8 + public String streamAsString() throws IOException { return bytes.toString(StandardCharsets.ISO_8859_1); } + } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java index c8f8ebbf268ab..19a3a8d18bee0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java @@ -124,7 +124,7 @@ public void nextPageInternal(SqlConfiguration cfg, Cursor cursor, ActionListener } public void cleanCursor(Cursor cursor, ActionListener listener) { - cursor.clear(client, listener); + cursor.clear(client, writableRegistry, listener); } public Client client() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggCursor.java index 3c51a495f3568..5fd156a8a8b84 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggCursor.java @@ -12,9 +12,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -41,6 +38,11 @@ import java.util.function.BiFunction; import java.util.function.Supplier; +import static org.elasticsearch.xpack.sql.execution.search.Querier.deserializeQuery; +import static org.elasticsearch.xpack.sql.execution.search.Querier.logSearchResponse; +import static org.elasticsearch.xpack.sql.execution.search.Querier.prepareRequest; +import static org.elasticsearch.xpack.sql.execution.search.Querier.serializeQuery; + /** * Cursor for composite aggregation (GROUP BY). * Stores the query that gets updated/slides across requests. @@ -132,7 +134,7 @@ public void nextPage(SqlConfiguration cfg, Client client, NamedWriteableRegistry log.trace("About to execute composite query {} on {}", StringUtils.toString(query), indices); } - SearchRequest request = Querier.prepareRequest(query, cfg.requestTimeout(), includeFrozen, indices); + SearchRequest request = prepareRequest(query, cfg.requestTimeout(), includeFrozen, indices); client.search(request, new ActionListener.Delegating<>(listener) { @Override @@ -169,7 +171,7 @@ static void handle( ) { if (log.isTraceEnabled()) { - Querier.logSearchResponse(response, log); + logSearchResponse(response, log); } // there are some results if (response.getAggregations().asList().isEmpty() == false) { @@ -244,31 +246,8 @@ private static void updateSourceAfterKey(Map afterKey, SearchSou } } - /** - * Deserializes the search source from a byte array. - */ - private static SearchSourceBuilder deserializeQuery(NamedWriteableRegistry registry, byte[] source) throws IOException { - try (NamedWriteableAwareStreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(source), registry)) { - return new SearchSourceBuilder(in); - } - } - - /** - * Serializes the search source to a byte array. - */ - private static byte[] serializeQuery(SearchSourceBuilder source) throws IOException { - if (source == null) { - return new byte[0]; - } - - try (BytesStreamOutput out = new BytesStreamOutput()) { - source.writeTo(out); - return BytesReference.toBytes(out.bytes()); - } - } - @Override - public void clear(Client client, ActionListener listener) { + public void clear(Client client, NamedWriteableRegistry registry, ActionListener listener) { listener.onResponse(true); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index dbdb23b30d914..298e758a6ff50 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -10,12 +10,20 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.ClosePointInTimeAction; +import org.elasticsearch.action.search.ClosePointInTimeRequest; +import org.elasticsearch.action.search.OpenPointInTimeAction; +import org.elasticsearch.action.search.OpenPointInTimeRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.search.aggregations.Aggregation; @@ -23,6 +31,7 @@ import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.filter.Filters; +import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.xcontent.XContentBuilder; @@ -122,27 +131,64 @@ public void query(List output, QueryContainer query, String index, Ac List> sortingColumns = query.sortingColumns(); listener = sortingColumns.isEmpty() ? listener : new LocalAggregationSorterListener(listener, sortingColumns, query.limit()); - ActionListener l = null; - if (query.isAggsOnly()) { + if (cfg.task() != null && cfg.task().isCancelled()) { + listener.onFailure(new TaskCancelledException("cancelled")); + } else if (query.isAggsOnly()) { + ActionListener l; if (query.aggs().useImplicitGroupBy()) { l = new ImplicitGroupActionListener(listener, client, cfg, output, query, search); } else { l = new CompositeActionListener(listener, client, cfg, output, query, search); } + client.search(search, l); } else { - search.scroll(cfg.pageTimeout()); - l = new ScrollActionListener(listener, client, cfg, output, query); + searchWithPointInTime(search, new SearchHitActionListener(listener, client, cfg, output, query, sourceBuilder)); } + } - if (cfg.task() != null && cfg.task().isCancelled()) { - listener.onFailure(new TaskCancelledException("cancelled")); - return; + private void searchWithPointInTime(SearchRequest search, ActionListener listener) { + final OpenPointInTimeRequest openPitRequest = new OpenPointInTimeRequest(search.indices()).indicesOptions(search.indicesOptions()) + .keepAlive(cfg.pageTimeout()); + + client.execute(OpenPointInTimeAction.INSTANCE, openPitRequest, wrap(openPointInTimeResponse -> { + String pitId = openPointInTimeResponse.getPointInTimeId(); + search.indices(Strings.EMPTY_ARRAY); + search.source().pointInTimeBuilder(new PointInTimeBuilder(pitId)); + ActionListener closePitOnErrorListener = wrap(searchResponse -> { + try { + listener.onResponse(searchResponse); + } catch (Exception e) { + closePointInTimeAfterError(client, pitId, e, listener); + } + }, searchError -> closePointInTimeAfterError(client, pitId, searchError, listener)); + client.search(search, closePitOnErrorListener); + }, listener::onFailure)); + } + + private static void closePointInTimeAfterError(Client client, String pointInTimeId, Exception e, ActionListener listener) { + closePointInTime(client, pointInTimeId, wrap(r -> listener.onFailure(e), closeError -> { + e.addSuppressed(closeError); + listener.onFailure(e); + })); + } + + public static void closePointInTime(Client client, String pointInTimeId, ActionListener listener) { + if (pointInTimeId != null) { + // request should not be made with the parent task assigned because the parent task might already be canceled + client = client instanceof ParentTaskAssigningClient wrapperClient ? wrapperClient.unwrap() : client; + + client.execute( + ClosePointInTimeAction.INSTANCE, + new ClosePointInTimeRequest(pointInTimeId), + wrap(clearPointInTimeResponse -> listener.onResponse(clearPointInTimeResponse.isSucceeded()), listener::onFailure) + ); + } else { + listener.onResponse(true); } - client.search(search, l); } - public static SearchRequest prepareRequest(SearchSourceBuilder source, TimeValue timeout, boolean includeFrozen, String... indices) { - source.timeout(timeout); + public static SearchRequest prepareRequest(SearchSourceBuilder source, TimeValue timeOut, boolean includeFrozen, String... indices) { + source.timeout(timeOut); SearchRequest searchRequest = new SearchRequest(INTRODUCING_UNSIGNED_LONG); searchRequest.indices(indices); @@ -181,6 +227,29 @@ protected static void logSearchResponse(SearchResponse response, Logger logger) ); } + /** + * Deserializes the search source from a byte array. + */ + public static SearchSourceBuilder deserializeQuery(NamedWriteableRegistry registry, byte[] source) throws IOException { + try (NamedWriteableAwareStreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(source), registry)) { + return new SearchSourceBuilder(in); + } + } + + /** + * Serializes the search source to a byte array. + */ + public static byte[] serializeQuery(SearchSourceBuilder source) throws IOException { + if (source == null) { + return new byte[0]; + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + source.writeTo(out); + return BytesReference.toBytes(out.bytes()); + } + } + /** * Listener used for local sorting (typically due to aggregations used inside `ORDER BY`). * @@ -504,24 +573,27 @@ private BucketExtractor createExtractor(FieldExtraction ref, BucketExtractor tot } /** - * Dedicated listener for column retrieval/non-grouped queries (scrolls). + * Dedicated listener for column retrieval/non-grouped queries (search hits). */ - static class ScrollActionListener extends BaseActionListener { + static class SearchHitActionListener extends BaseActionListener { private final QueryContainer query; private final BitSet mask; private final boolean multiValueFieldLeniency; + private final SearchSourceBuilder source; - ScrollActionListener( + SearchHitActionListener( ActionListener listener, Client client, SqlConfiguration cfg, List output, - QueryContainer query + QueryContainer query, + SearchSourceBuilder source ) { super(listener, client, cfg, output); this.query = query; this.mask = query.columnMask(output); this.multiValueFieldLeniency = cfg.multiValueFieldLeniency(); + this.source = source; } @Override @@ -534,12 +606,13 @@ protected void handleResponse(SearchResponse response, ActionListener list exts.add(createExtractor(ref.extraction())); } - ScrollCursor.handle( + SearchHitCursor.handle( + client, response, - () -> new SchemaSearchHitRowSet(schema, exts, mask, query.limit(), response), - p -> listener.onResponse(p), - p -> clear(response.getScrollId(), wrap(success -> listener.onResponse(p), listener::onFailure)), - schema + source, + () -> new SchemaSearchHitRowSet(schema, exts, mask, source.size(), query.limit(), response), + listener, + query.shouldIncludeFrozen() ); } @@ -579,7 +652,7 @@ private HitExtractor createExtractor(FieldExtraction ref) { /** * Base listener class providing clean-up and exception handling. - * Handles both scroll queries (scan/scroll) and regular/composite-aggs queries. + * Handles both search hits and composite-aggs queries. */ abstract static class BaseActionListener extends ActionListener.Delegating { @@ -595,52 +668,13 @@ abstract static class BaseActionListener extends ActionListener.Delegating cleanup(response, e))); - } - } catch (Exception ex) { - cleanup(response, ex); - } + handleResponse(response, delegate); } protected abstract void handleResponse(SearchResponse response, ActionListener listener); - // clean-up the scroll in case of exception - protected final void cleanup(SearchResponse response, Exception ex) { - if (response != null && response.getScrollId() != null) { - client.prepareClearScroll() - .addScrollId(response.getScrollId()) - // in case of failure, report the initial exception instead of the one resulting from cleaning the scroll - .execute(ActionListener.wrap(r -> delegate.onFailure(ex), e -> { - ex.addSuppressed(e); - delegate.onFailure(ex); - })); - } else { - delegate.onFailure(ex); - } - } - - protected final void clear(String scrollId, ActionListener listener) { - if (scrollId != null) { - client.prepareClearScroll() - .addScrollId(scrollId) - .execute( - ActionListener.wrap( - clearScrollResponse -> listener.onResponse(clearScrollResponse.isSucceeded()), - listener::onFailure - ) - ); - } else { - listener.onResponse(false); - } - } } @SuppressWarnings("rawtypes") diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaSearchHitRowSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaSearchHitRowSet.java index ea6131c564bf3..67712658529fb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaSearchHitRowSet.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaSearchHitRowSet.java @@ -15,15 +15,15 @@ import java.util.List; /** - * Initial results from a scroll search. Distinct from the following pages + * Initial results from a search hit search. Distinct from the following pages * because it has a {@link Schema} available. See {@link SearchHitRowSet} * for the next pages. */ class SchemaSearchHitRowSet extends SearchHitRowSet implements SchemaRowSet { private final Schema schema; - SchemaSearchHitRowSet(Schema schema, List exts, BitSet mask, int limitHits, SearchResponse response) { - super(exts, mask, limitHits, response); + SchemaSearchHitRowSet(Schema schema, List exts, BitSet mask, int sizeRequested, int limitHits, SearchResponse response) { + super(exts, mask, sizeRequested, limitHits, response); this.schema = schema; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursor.java deleted file mode 100644 index e240ca06375d7..0000000000000 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursor.java +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.sql.execution.search; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.search.ClearScrollRequest; -import org.elasticsearch.action.search.ClearScrollResponse; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchScrollRequest; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.xpack.ql.execution.search.extractor.HitExtractor; -import org.elasticsearch.xpack.ql.type.Schema; -import org.elasticsearch.xpack.sql.session.Cursor; -import org.elasticsearch.xpack.sql.session.Rows; -import org.elasticsearch.xpack.sql.session.SqlConfiguration; - -import java.io.IOException; -import java.util.BitSet; -import java.util.List; -import java.util.Objects; -import java.util.function.Consumer; -import java.util.function.Supplier; - -import static org.elasticsearch.action.ActionListener.wrap; - -public class ScrollCursor implements Cursor { - - private static final Logger log = LogManager.getLogger(ScrollCursor.class); - - public static final String NAME = "s"; - - private final String scrollId; - private final List extractors; - private final BitSet mask; - private final int limit; - - public ScrollCursor(String scrollId, List extractors, BitSet mask, int limit) { - this.scrollId = scrollId; - this.extractors = extractors; - this.mask = mask; - this.limit = limit; - } - - public ScrollCursor(StreamInput in) throws IOException { - scrollId = in.readString(); - limit = in.readVInt(); - - extractors = in.readNamedWriteableList(HitExtractor.class); - mask = BitSet.valueOf(in.readByteArray()); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(scrollId); - out.writeVInt(limit); - - out.writeNamedWriteableList(extractors); - out.writeByteArray(mask.toByteArray()); - } - - @Override - public String getWriteableName() { - return NAME; - } - - String scrollId() { - return scrollId; - } - - BitSet mask() { - return mask; - } - - List extractors() { - return extractors; - } - - int limit() { - return limit; - } - - @Override - public void nextPage(SqlConfiguration cfg, Client client, NamedWriteableRegistry registry, ActionListener listener) { - if (log.isTraceEnabled()) { - log.trace("About to execute scroll query {}", scrollId); - } - - SearchScrollRequest request = new SearchScrollRequest(scrollId).scroll(cfg.pageTimeout()); - client.searchScroll(request, wrap(response -> { - handle( - response, - () -> new SearchHitRowSet(extractors, mask, limit, response), - p -> listener.onResponse(p), - p -> clear(client, wrap(success -> listener.onResponse(p), listener::onFailure)), - Schema.EMPTY - ); - }, listener::onFailure)); - } - - @Override - public void clear(Client client, ActionListener listener) { - cleanCursor( - client, - scrollId, - wrap(clearScrollResponse -> listener.onResponse(clearScrollResponse.isSucceeded()), listener::onFailure) - ); - } - - static void handle( - SearchResponse response, - Supplier makeRowHit, - Consumer onPage, - Consumer clearScroll, - Schema schema - ) { - if (log.isTraceEnabled()) { - Querier.logSearchResponse(response, log); - } - SearchHit[] hits = response.getHits().getHits(); - // clean-up - if (hits.length > 0) { - SearchHitRowSet rowSet = makeRowHit.get(); - Tuple nextScrollData = rowSet.nextScrollData(); - - if (nextScrollData == null) { - // no more data, let's clean the scroll before continuing - clearScroll.accept(Page.last(rowSet)); - } else { - Cursor next = new ScrollCursor(nextScrollData.v1(), rowSet.extractors(), rowSet.mask(), nextScrollData.v2()); - onPage.accept(new Page(rowSet, next)); - } - } - // no-hits - else { - clearScroll.accept(Page.last(Rows.empty(schema))); - } - } - - @Override - public boolean equals(Object obj) { - if (obj == null || obj.getClass() != getClass()) { - return false; - } - ScrollCursor other = (ScrollCursor) obj; - return Objects.equals(scrollId, other.scrollId) - && Objects.equals(extractors, other.extractors) - && Objects.equals(limit, other.limit); - } - - @Override - public int hashCode() { - return Objects.hash(scrollId, extractors, limit); - } - - @Override - public String toString() { - return "cursor for scroll [" + scrollId + "]"; - } - - public static void cleanCursor(Client client, String scrollId, ActionListener listener) { - ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); - clearScrollRequest.addScrollId(scrollId); - client.clearScroll(clearScrollRequest, listener); - } -} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursor.java new file mode 100644 index 0000000000000..5258492a29af1 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursor.java @@ -0,0 +1,229 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.sql.execution.search; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.builder.PointInTimeBuilder; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.ql.execution.search.extractor.HitExtractor; +import org.elasticsearch.xpack.ql.util.StringUtils; +import org.elasticsearch.xpack.sql.session.Cursor; +import org.elasticsearch.xpack.sql.session.SqlConfiguration; +import org.elasticsearch.xpack.sql.util.Check; + +import java.io.IOException; +import java.util.Arrays; +import java.util.BitSet; +import java.util.List; +import java.util.Objects; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.sql.execution.search.Querier.closePointInTime; +import static org.elasticsearch.xpack.sql.execution.search.Querier.deserializeQuery; +import static org.elasticsearch.xpack.sql.execution.search.Querier.logSearchResponse; +import static org.elasticsearch.xpack.sql.execution.search.Querier.prepareRequest; +import static org.elasticsearch.xpack.sql.execution.search.Querier.serializeQuery; + +public class SearchHitCursor implements Cursor { + + private static final Logger log = LogManager.getLogger(SearchHitCursor.class); + + public static final String NAME = "h"; + + private final byte[] nextQuery; + private final List extractors; + private final BitSet mask; + private final int limit; + private final boolean includeFrozen; + + /** + * @param nextQuery a serialized {@link SearchSourceBuilder} representing the query to fetch the next page. The query is serialized + * because cursors have to be (de)serialized on the transport layer in {@code TextFormat.PLAIN_TEXT.format} which does + * not have all the required {@link NamedWriteable}`s available that is required to deserialize + * {@link SearchSourceBuilder}. As a workaround the deserialization of {@code nextQuery} is deferred until the query is + * needed. + */ + SearchHitCursor(byte[] nextQuery, List exts, BitSet mask, int remainingLimit, boolean includeFrozen) { + this.nextQuery = nextQuery; + this.extractors = exts; + this.mask = mask; + this.limit = remainingLimit; + this.includeFrozen = includeFrozen; + } + + public SearchHitCursor(StreamInput in) throws IOException { + nextQuery = in.readByteArray(); + limit = in.readVInt(); + + extractors = in.readNamedWriteableList(HitExtractor.class); + mask = BitSet.valueOf(in.readByteArray()); + includeFrozen = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeByteArray(nextQuery); + out.writeVInt(limit); + + out.writeNamedWriteableList(extractors); + out.writeByteArray(mask.toByteArray()); + out.writeBoolean(includeFrozen); + } + + @Override + public String getWriteableName() { + return NAME; + } + + byte[] next() { + return nextQuery; + } + + BitSet mask() { + return mask; + } + + List extractors() { + return extractors; + } + + int limit() { + return limit; + } + + boolean includeFrozen() { + return includeFrozen; + } + + @Override + public void nextPage(SqlConfiguration cfg, Client client, NamedWriteableRegistry registry, ActionListener listener) { + SearchSourceBuilder q; + try { + q = deserializeQuery(registry, nextQuery); + } catch (Exception ex) { + listener.onFailure(ex); + return; + } + + SearchSourceBuilder query = q; + if (log.isTraceEnabled()) { + log.trace("About to execute search hit query {}", StringUtils.toString(query)); + } + + SearchRequest request = prepareRequest(query, cfg.requestTimeout(), includeFrozen); + + client.search( + request, + ActionListener.wrap( + (SearchResponse response) -> handle( + client, + response, + request.source(), + makeRowSet(query.size(), response), + listener, + includeFrozen + ), + listener::onFailure + ) + ); + } + + private Supplier makeRowSet(int sizeRequested, SearchResponse response) { + return () -> new SearchHitRowSet(extractors, mask, sizeRequested, limit, response); + } + + static void handle( + Client client, + SearchResponse response, + SearchSourceBuilder source, + Supplier makeRowSet, + ActionListener listener, + boolean includeFrozen + ) { + + if (log.isTraceEnabled()) { + logSearchResponse(response, log); + } + + SearchHit[] hits = response.getHits().getHits(); + + SearchHitRowSet rowSet = makeRowSet.get(); + + if (rowSet.hasRemaining() == false) { + closePointInTime( + client, + response.pointInTimeId(), + ActionListener.wrap(r -> listener.onResponse(Page.last(rowSet)), listener::onFailure) + ); + } else { + source.pointInTimeBuilder(new PointInTimeBuilder(response.pointInTimeId())); + updateSearchAfter(hits, source); + + byte[] nextQuery; + try { + nextQuery = serializeQuery(source); + } catch (IOException e) { + listener.onFailure(e); + return; + } + + SearchHitCursor nextCursor = new SearchHitCursor( + nextQuery, + rowSet.extractors(), + rowSet.mask(), + rowSet.getRemainingLimit(), + includeFrozen + ); + listener.onResponse(new Page(rowSet, nextCursor)); + } + } + + private static void updateSearchAfter(SearchHit[] hits, SearchSourceBuilder source) { + SearchHit lastHit = hits[hits.length - 1]; + source.searchAfter(lastHit.getSortValues()); + } + + @Override + public void clear(Client client, NamedWriteableRegistry registry, ActionListener listener) { + SearchSourceBuilder query; + try { + query = deserializeQuery(registry, nextQuery); + } catch (IOException e) { + listener.onFailure(e); + return; + } + Check.isTrue(query.pointInTimeBuilder() != null, "Expected cursor with point-in-time id but got null"); + closePointInTime(client, query.pointInTimeBuilder().getEncodedId(), listener); + } + + @Override + public int hashCode() { + return Objects.hash(Arrays.hashCode(nextQuery), extractors, limit, mask, includeFrozen); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + SearchHitCursor other = (SearchHitCursor) obj; + return Arrays.equals(nextQuery, other.nextQuery) + && Objects.equals(extractors, other.extractors) + && Objects.equals(limit, other.limit) + && Objects.equals(includeFrozen, other.includeFrozen); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java index 2b453ff827df5..ba6a9854e4254 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java @@ -6,9 +6,7 @@ */ package org.elasticsearch.xpack.sql.execution.search; -import org.apache.lucene.search.TotalHits; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.core.Tuple; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.xpack.ql.execution.search.extractor.HitExtractor; @@ -37,11 +35,11 @@ class SearchHitRowSet extends ResultRowSet { private final int size; private final int[] indexPerLevel; - private final Tuple nextScrollData; + private final int remainingLimit; private int row = 0; - SearchHitRowSet(List exts, BitSet mask, int limit, SearchResponse response) { + SearchHitRowSet(List exts, BitSet mask, int sizeRequested, int limit, SearchResponse response) { super(exts, mask); this.hits = response.getHits().getHits(); @@ -85,30 +83,22 @@ class SearchHitRowSet extends ResultRowSet { indexPerLevel = new int[maxDepth + 1]; this.innerHit = innerHit; - String scrollId = response.getScrollId(); - - if (scrollId == null) { - /* SearchResponse can contain a null scroll when you start a - * scroll but all results fit in the first page. */ - nextScrollData = null; + // compute remaining limit (only if the limit is specified - that is, positive). + int remaining = limit < 0 ? limit : limit - size; + // either the search returned fewer records than requested or the limit is exhausted + if (size < sizeRequested || remaining == 0) { + remainingLimit = 0; } else { - TotalHits totalHits = response.getHits().getTotalHits(); - - // compute remaining limit (only if the limit is specified - that is, positive). - int remainingLimit = limit < 0 ? limit : limit - size; - // if the computed limit is zero, or the size is zero it means either there's nothing left or the limit has been reached - if (size == 0 || remainingLimit == 0 - // or the scroll has ended - || totalHits != null && totalHits.value == hits.length) { - nextScrollData = null; - } else { - nextScrollData = new Tuple<>(scrollId, remainingLimit); - } + remainingLimit = remaining; } } - protected boolean isLimitReached() { - return nextScrollData == null; + public boolean hasRemaining() { + return remainingLimit != 0; + } + + public int getRemainingLimit() { + return remainingLimit; } @Override @@ -218,8 +208,4 @@ protected void doReset() { public int size() { return size; } - - Tuple nextScrollData() { - return nextScrollData; - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormatterCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormatterCursor.java index b55adc0af34d3..a6ba80e42708b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormatterCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormatterCursor.java @@ -59,8 +59,8 @@ public void nextPage(SqlConfiguration cfg, Client client, NamedWriteableRegistry } @Override - public void clear(Client client, ActionListener listener) { - delegate.clear(client, listener); + public void clear(Client client, NamedWriteableRegistry registry, ActionListener listener) { + delegate.clear(client, registry, listener); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursor.java index 5dd5bb203c519..be45132d78314 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursor.java @@ -48,5 +48,5 @@ public static Page last(RowSet rowSet) { /** * Cleans the resources associated with the cursor */ - void clear(Client client, ActionListener listener); + void clear(Client client, NamedWriteableRegistry registry, ActionListener listener); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java index ccd687d1550ab..b94b60a850dab 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.sql.common.io.SqlStreamOutput; import org.elasticsearch.xpack.sql.execution.search.CompositeAggCursor; import org.elasticsearch.xpack.sql.execution.search.PivotCursor; -import org.elasticsearch.xpack.sql.execution.search.ScrollCursor; +import org.elasticsearch.xpack.sql.execution.search.SearchHitCursor; import org.elasticsearch.xpack.sql.execution.search.extractor.SqlBucketExtractors; import org.elasticsearch.xpack.sql.execution.search.extractor.SqlHitExtractors; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; @@ -46,7 +46,7 @@ public static List getNamedWriteables() { // cursors entries.add(new NamedWriteableRegistry.Entry(Cursor.class, EmptyCursor.NAME, in -> Cursor.EMPTY)); - entries.add(new NamedWriteableRegistry.Entry(Cursor.class, ScrollCursor.NAME, ScrollCursor::new)); + entries.add(new NamedWriteableRegistry.Entry(Cursor.class, SearchHitCursor.NAME, SearchHitCursor::new)); entries.add(new NamedWriteableRegistry.Entry(Cursor.class, CompositeAggCursor.NAME, CompositeAggCursor::new)); entries.add(new NamedWriteableRegistry.Entry(Cursor.class, PivotCursor.NAME, PivotCursor::new)); entries.add(new NamedWriteableRegistry.Entry(Cursor.class, TextFormatterCursor.NAME, TextFormatterCursor::new)); @@ -74,7 +74,7 @@ static String encodeToString(Cursor info, Version version, ZoneId zoneId) { if (info == Cursor.EMPTY) { return StringUtils.EMPTY; } - try (SqlStreamOutput output = new SqlStreamOutput(version, zoneId)) { + try (SqlStreamOutput output = SqlStreamOutput.create(version, zoneId)) { output.writeNamedWriteable(info); output.close(); // return the string only after closing the resource @@ -91,7 +91,7 @@ public static Tuple decodeFromStringWithZone(String base64) { if (base64.isEmpty()) { return new Tuple<>(Cursor.EMPTY, null); } - try (SqlStreamInput in = new SqlStreamInput(base64, WRITEABLE_REGISTRY, VERSION)) { + try (SqlStreamInput in = SqlStreamInput.fromString(base64, WRITEABLE_REGISTRY, VERSION)) { Cursor cursor = in.readNamedWriteable(Cursor.class); return new Tuple<>(cursor, in.zoneId()); } catch (IOException ex) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/EmptyCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/EmptyCursor.java index e1ed687e6006c..6e10629c5ffc9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/EmptyCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/EmptyCursor.java @@ -38,7 +38,7 @@ public void nextPage(SqlConfiguration cfg, Client client, NamedWriteableRegistry } @Override - public void clear(Client client, ActionListener listener) { + public void clear(Client client, NamedWriteableRegistry registry, ActionListener listener) { // There is nothing to clean listener.onResponse(false); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java index fe72838695b78..f4cd86ac1b8dc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java @@ -87,7 +87,7 @@ public void nextPage(SqlConfiguration cfg, Client client, NamedWriteableRegistry } @Override - public void clear(Client client, ActionListener listener) { + public void clear(Client client, NamedWriteableRegistry registry, ActionListener listener) { listener.onResponse(true); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/AbstractSqlWireSerializingTestCase.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/AbstractSqlWireSerializingTestCase.java index 888304f47a216..abcbdccf0e3b4 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/AbstractSqlWireSerializingTestCase.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/AbstractSqlWireSerializingTestCase.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.sql; import org.elasticsearch.Version; -import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireTestCase; @@ -23,14 +22,12 @@ public abstract class AbstractSqlWireSerializingTestCase ex @Override protected T copyInstance(T instance, Version version) throws IOException { - try (BytesStreamOutput output = new BytesStreamOutput()) { - ZoneId zoneId = instanceZoneId(instance); - SqlStreamOutput out = new SqlStreamOutput(version, zoneId); - instance.writeTo(out); - out.close(); - try (SqlStreamInput in = new SqlStreamInput(out.streamAsString(), getNamedWriteableRegistry(), version)) { - return instanceReader().read(in); - } + ZoneId zoneId = instanceZoneId(instance); + SqlStreamOutput out = SqlStreamOutput.create(version, zoneId); + instance.writeTo(out); + out.close(); + try (SqlStreamInput in = SqlStreamInput.fromString(out.streamAsString(), getNamedWriteableRegistry(), version)) { + return instanceReader().read(in); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java index 0d359480de949..30e1178a20781 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java @@ -9,6 +9,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.fieldcaps.FieldCapabilities; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; +import org.elasticsearch.action.search.ClosePointInTimeAction; +import org.elasticsearch.action.search.ClosePointInTimeRequest; +import org.elasticsearch.action.search.ClosePointInTimeResponse; +import org.elasticsearch.action.search.OpenPointInTimeAction; +import org.elasticsearch.action.search.OpenPointInTimeResponse; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; @@ -42,11 +47,13 @@ import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; @@ -158,6 +165,7 @@ public void testCancellationDuringSearch() throws InterruptedException { ClusterService mockClusterService = mockClusterService(nodeId); String[] indices = new String[] { "endgame" }; + String pitId = randomAlphaOfLength(10); // Emulation of field capabilities FieldCapabilitiesResponse fieldCapabilitiesResponse = mock(FieldCapabilitiesResponse.class); @@ -170,12 +178,21 @@ public void testCancellationDuringSearch() throws InterruptedException { return null; }).when(client).fieldCaps(any(), any()); + // Emulation of open pit + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(new OpenPointInTimeResponse(pitId)); + return null; + }).when(client).execute(eq(OpenPointInTimeAction.INSTANCE), any(), any()); + // Emulation of search cancellation ArgumentCaptor searchRequestCaptor = ArgumentCaptor.forClass(SearchRequest.class); when(client.prepareSearch(any())).thenReturn(new SearchRequestBuilder(client, SearchAction.INSTANCE).setIndices(indices)); doAnswer((Answer) invocation -> { @SuppressWarnings("unchecked") SearchRequest request = (SearchRequest) invocation.getArguments()[1]; + assertEquals(pitId, request.pointInTimeBuilder().getEncodedId()); TaskId parentTask = request.getParentTask(); assertNotNull(parentTask); assertEquals(task.getId(), parentTask.getId()); @@ -184,7 +201,18 @@ public void testCancellationDuringSearch() throws InterruptedException { ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onFailure(new TaskCancelledException("cancelled")); return null; - }).when(client).execute(any(), searchRequestCaptor.capture(), any()); + }).when(client).execute(eq(SearchAction.INSTANCE), searchRequestCaptor.capture(), any()); + + // Emulation of close pit + doAnswer(invocation -> { + ClosePointInTimeRequest request = (ClosePointInTimeRequest) invocation.getArguments()[1]; + assertEquals(pitId, request.getId()); + + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(new ClosePointInTimeResponse(true, 1)); + return null; + }).when(client).execute(eq(ClosePointInTimeAction.INSTANCE), any(), any()); IndexResolver indexResolver = indexResolver(client); PlanExecutor planExecutor = new PlanExecutor(client, indexResolver, new NamedWriteableRegistry(Collections.emptyList())); @@ -204,10 +232,12 @@ public void onFailure(Exception e) { countDownLatch.countDown(); } }, "", mock(TransportService.class), mockClusterService); - countDownLatch.await(); + assertTrue(countDownLatch.await(5, TimeUnit.SECONDS)); // Final verification to ensure no more interaction verify(client).fieldCaps(any(), any()); - verify(client).execute(any(), any(), any()); + verify(client, times(1)).execute(eq(OpenPointInTimeAction.INSTANCE), any(), any()); + verify(client, times(1)).execute(eq(SearchAction.INSTANCE), any(), any()); + verify(client, times(1)).execute(eq(ClosePointInTimeAction.INSTANCE), any(), any()); verify(client, times(1)).settings(); verify(client, times(1)).threadPool(); verifyNoMoreInteractions(client); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/common/io/SqlStreamTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/common/io/SqlStreamTests.java new file mode 100644 index 0000000000000..d239e07e8b06a --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/common/io/SqlStreamTests.java @@ -0,0 +1,83 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.sql.common.io; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; +import org.elasticsearch.common.io.stream.InputStreamStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.util.Arrays; +import java.util.Base64; +import java.util.List; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.lessThan; + +public class SqlStreamTests extends ESTestCase { + + public void testWriteAndRead() throws IOException { + BytesRef payload = new BytesRef(randomByteArrayOfLength(randomIntBetween(10, 1000))); + + SqlStreamOutput out = SqlStreamOutput.create(Version.CURRENT, randomZone()); + out.writeBytesRef(payload); + out.close(); + String encoded = out.streamAsString(); + + SqlStreamInput in = SqlStreamInput.fromString(encoded, new NamedWriteableRegistry(List.of()), Version.CURRENT); + BytesRef read = in.readBytesRef(); + + assertArrayEquals(payload.bytes, read.bytes); + } + + public void testPayloadIsCompressed() throws IOException { + SqlStreamOutput out = SqlStreamOutput.create(Version.CURRENT, randomZone()); + byte[] payload = new byte[1000]; + Arrays.fill(payload, (byte) 0); + out.write(payload); + out.close(); + + String result = out.streamAsString(); + assertThat(result.length(), lessThan(1000)); + } + + public void testOldCursorProducesVersionMismatchError() { + SqlIllegalArgumentException ex = expectThrows( + SqlIllegalArgumentException.class, + () -> SqlStreamInput.fromString( + // some cursor produced by ES 7.15.1 + "97S0AwFaAWMBCHRlc3RfZW1whgEBAQljb21wb3NpdGUHZ3JvdXBieQEDbWF4CDJkMTBjNGJhAAD/AQls" + + "YW5ndWFnZXMAAAD/AAD/AQAIYmRlZjg4ZTUBBmdlbmRlcgAAAQAAAQEKAQhiZGVmODhlNf8AAgEAAAAA" + + "AP////8PAAAAAAAAAAAAAAAAAVoDAAICAAAAAAAAAAAKAP////8PAgFtCDJkMTBjNGJhBXZhbHVlAAEE" + + "QllURQFrCGJkZWY4OGU1AAABAwA=", + new NamedWriteableRegistry(List.of()), + Version.V_8_2_0 + ) + ); + + assertThat(ex.getMessage(), containsString("Unsupported cursor version [7.15.1], expected [8.2.0]")); + } + + public void testVersionCanBeReadByOldNodes() throws IOException { + Version version = randomFrom(Version.V_7_0_0, Version.V_7_2_1, Version.V_8_1_0); + SqlStreamOutput out = SqlStreamOutput.create(version, randomZone()); + out.writeString("payload"); + out.close(); + String encoded = out.streamAsString(); + + byte[] bytes = Base64.getDecoder().decode(encoded); + InputStreamStreamInput in = new InputStreamStreamInput(new ByteArrayInputStream(bytes)); + + assertEquals(version, Version.readVersion(in)); + } + +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java index 4815a7feec358..8cff328daa88a 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java @@ -30,7 +30,7 @@ public static CompositeAggCursor randomCompositeCursor() { } return new CompositeAggCursor( - new byte[randomInt(256)], + new byte[randomInt(1024)], extractors, randomBitSet(extractorsSize), randomIntBetween(10, 1024), diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/QuerierTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/QuerierTests.java index 44016669595cf..bfe2394b8d822 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/QuerierTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/QuerierTests.java @@ -27,7 +27,7 @@ import java.util.concurrent.atomic.AtomicInteger; import static java.util.Collections.emptyList; -import static org.elasticsearch.xpack.sql.execution.search.ScrollCursorTests.randomHitExtractor; +import static org.elasticsearch.xpack.sql.execution.search.SearchHitCursorTests.randomHitExtractor; public class QuerierTests extends ESTestCase { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursorTests.java similarity index 72% rename from x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursorTests.java rename to x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursorTests.java index f4e19175134fe..e7146e1664c88 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursorTests.java @@ -21,18 +21,19 @@ import java.util.List; import java.util.function.Supplier; -public class ScrollCursorTests extends AbstractSqlWireSerializingTestCase { - public static ScrollCursor randomScrollCursor() { +public class SearchHitCursorTests extends AbstractSqlWireSerializingTestCase { + public static SearchHitCursor randomSearchHitCursor() { int extractorsSize = between(1, 20); List extractors = new ArrayList<>(extractorsSize); for (int i = 0; i < extractorsSize; i++) { extractors.add(randomHitExtractor(0)); } - return new ScrollCursor( - randomAlphaOfLength(5), + return new SearchHitCursor( + new byte[randomInt(256)], extractors, CompositeAggregationCursorTests.randomBitSet(extractorsSize), - randomIntBetween(10, 1024) + randomIntBetween(10, 1024), + randomBoolean() ); } @@ -46,12 +47,13 @@ static HitExtractor randomHitExtractor(int depth) { } @Override - protected ScrollCursor mutateInstance(ScrollCursor instance) throws IOException { - return new ScrollCursor( - instance.scrollId(), + protected SearchHitCursor mutateInstance(SearchHitCursor instance) throws IOException { + return new SearchHitCursor( + instance.next(), instance.extractors(), randomValueOtherThan(instance.mask(), () -> CompositeAggregationCursorTests.randomBitSet(instance.extractors().size())), - randomValueOtherThan(instance.limit(), () -> randomIntBetween(1, 1024)) + randomValueOtherThan(instance.limit(), () -> randomIntBetween(1, 1024)), + instance.includeFrozen() == false ); } @@ -61,22 +63,22 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { } @Override - protected ScrollCursor createTestInstance() { - return randomScrollCursor(); + protected SearchHitCursor createTestInstance() { + return randomSearchHitCursor(); } @Override - protected Reader instanceReader() { - return ScrollCursor::new; + protected Reader instanceReader() { + return SearchHitCursor::new; } @Override - protected ScrollCursor copyInstance(ScrollCursor instance, Version version) throws IOException { + protected SearchHitCursor copyInstance(SearchHitCursor instance, Version version) throws IOException { /* Randomly choose between internal protocol round trip and String based * round trips used to toXContent. */ if (randomBoolean()) { return super.copyInstance(instance, version); } - return (ScrollCursor) CursorTests.decodeFromString(Cursors.encodeToString(instance, randomZone())); + return (SearchHitCursor) CursorTests.decodeFromString(Cursors.encodeToString(instance, randomZone())); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/CursorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/CursorTests.java index ea5a55b92e8fb..08e0f6fca8912 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/CursorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/CursorTests.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.sql.plugin; import org.elasticsearch.Version; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.logging.LoggerMessageFormat; @@ -16,26 +14,21 @@ import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.action.BasicFormatter; import org.elasticsearch.xpack.sql.action.SqlQueryResponse; -import org.elasticsearch.xpack.sql.execution.search.ScrollCursor; -import org.elasticsearch.xpack.sql.execution.search.ScrollCursorTests; +import org.elasticsearch.xpack.sql.execution.search.SearchHitCursorTests; import org.elasticsearch.xpack.sql.proto.ColumnInfo; import org.elasticsearch.xpack.sql.proto.Mode; import org.elasticsearch.xpack.sql.session.Cursor; import org.elasticsearch.xpack.sql.session.Cursors; import org.elasticsearch.xpack.sql.session.CursorsTestUtil; -import org.mockito.ArgumentCaptor; import java.util.ArrayList; -import java.util.BitSet; import java.util.Collections; import java.util.List; import java.util.function.Supplier; import static org.elasticsearch.action.support.PlainActionFuture.newFuture; import static org.elasticsearch.xpack.sql.proto.SqlVersion.DATE_NANOS_SUPPORT_VERSION; -import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; public class CursorTests extends ESTestCase { @@ -44,26 +37,11 @@ public void testEmptyCursorClearCursor() { Client clientMock = mock(Client.class); Cursor cursor = Cursor.EMPTY; PlainActionFuture future = newFuture(); - cursor.clear(clientMock, future); + cursor.clear(clientMock, writableRegistry(), future); assertFalse(future.actionGet()); verifyNoMoreInteractions(clientMock); } - @SuppressWarnings("unchecked") - public void testScrollCursorClearCursor() { - Client clientMock = mock(Client.class); - ActionListener listenerMock = mock(ActionListener.class); - String cursorString = randomAlphaOfLength(10); - Cursor cursor = new ScrollCursor(cursorString, Collections.emptyList(), new BitSet(0), randomInt()); - - cursor.clear(clientMock, listenerMock); - - ArgumentCaptor request = ArgumentCaptor.forClass(ClearScrollRequest.class); - verify(clientMock).clearScroll(request.capture(), any(ActionListener.class)); - assertEquals(Collections.singletonList(cursorString), request.getValue().getScrollIds()); - verifyNoMoreInteractions(listenerMock); - } - private static SqlQueryResponse createRandomSqlResponse() { int columnCount = between(1, 10); @@ -79,25 +57,25 @@ private static SqlQueryResponse createRandomSqlResponse() { @SuppressWarnings("unchecked") static Cursor randomNonEmptyCursor() { - Supplier cursorSupplier = randomFrom(() -> ScrollCursorTests.randomScrollCursor(), () -> { + Supplier cursorSupplier = randomFrom(SearchHitCursorTests::randomSearchHitCursor, () -> { SqlQueryResponse response = createRandomSqlResponse(); if (response.columns() != null && response.rows() != null) { return new TextFormatterCursor( - ScrollCursorTests.randomScrollCursor(), + SearchHitCursorTests.randomSearchHitCursor(), new BasicFormatter(response.columns(), response.rows(), BasicFormatter.FormatOption.CLI) ); } else { - return ScrollCursorTests.randomScrollCursor(); + return SearchHitCursorTests.randomSearchHitCursor(); } }, () -> { SqlQueryResponse response = createRandomSqlResponse(); if (response.columns() != null && response.rows() != null) { return new TextFormatterCursor( - ScrollCursorTests.randomScrollCursor(), + SearchHitCursorTests.randomSearchHitCursor(), new BasicFormatter(response.columns(), response.rows(), BasicFormatter.FormatOption.TEXT) ); } else { - return ScrollCursorTests.randomScrollCursor(); + return SearchHitCursorTests.randomSearchHitCursor(); } }); return cursorSupplier.get(); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/session/ListCursorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/session/ListCursorTests.java index b4d4c0201c182..ba08e25c381d8 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/session/ListCursorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/session/ListCursorTests.java @@ -19,7 +19,7 @@ public class ListCursorTests extends AbstractSqlWireSerializingTestCase { public static ListCursor randomPagingListCursor() { - int size = between(1, 20); + int size = between(1, 100); int depth = between(1, 20); List> values = new ArrayList<>(size); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/10_basic.yml index aef14870d7cc5..5e6307774d91d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/10_basic.yml @@ -18,7 +18,7 @@ - do: index: index: aggregate_metric_test - id: 1 + id: "1" body: metric: min: 18.2 @@ -75,7 +75,7 @@ - do: index: index: test - id: 1 + id: "1" body: metric: min: 18.2 @@ -85,7 +85,7 @@ - do: index: index: test - id: 2 + id: "2" body: metric: min: 50 @@ -151,7 +151,7 @@ - do: index: index: test - id: 1 + id: "1" body: metric: min: 18.2 @@ -161,7 +161,7 @@ - do: index: index: test - id: 2 + id: "2" body: metric: min: 50 @@ -228,7 +228,7 @@ - do: index: index: test - id: 1 + id: "1" body: metric: min: 18.2 @@ -238,7 +238,7 @@ - do: index: index: test - id: 2 + id: "2" body: metric: min: 50 @@ -248,7 +248,7 @@ - do: index: index: test - id: 3 + id: "3" body: metric: min: 150 diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/10_basic.yml index 7a6c2a0d53b0d..be971c00005d9 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/10_basic.yml @@ -31,19 +31,19 @@ setup: - do: index: index: test1 - id: 1 + id: "1" body: {} - do: index: index: test1 - id: 2 + id: "2" body: { "foo": "bar" } - do: index: index: test2 - id: 1 + id: "1" body: {} - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/20_dynamic_mapping.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/20_dynamic_mapping.yml index 4357b4ecf171a..09dd1c1bdb6e5 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/20_dynamic_mapping.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/20_dynamic_mapping.yml @@ -13,7 +13,7 @@ - do: index: index: test1 - id: 1 + id: "1" body: {} - do: @@ -26,7 +26,7 @@ - do: index: index: test1 - id: 1 + id: "1" body: {} - do: @@ -67,7 +67,7 @@ - do: index: index: test1 - id: 1 + id: "1" body: foo: bar diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_streams/10_data_stream_resolvability.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_streams/10_data_stream_resolvability.yml index eacbb90bcd058..4d464cfcb4548 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_streams/10_data_stream_resolvability.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_streams/10_data_stream_resolvability.yml @@ -280,14 +280,14 @@ - do: index: index: simple-data-stream1 - id: 1 + id: "1" op_type: create body: { keys: [1,2,3], '@timestamp': '2020-12-12' } - do: index: index: simple-data-stream1 - id: 2 + id: "2" op_type: create body: { keys: [4,5,6], '@timestamp': '2020-12-12' } @@ -526,7 +526,7 @@ body: - create: _index: simple-data-stream1 - _id: 1 + _id: "1" - event: - category: process "@timestamp": 2020-02-03T12:34:56Z diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/graph/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/graph/10_basic.yml index 6c691f4f60e14..29e67b3f53479 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/graph/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/graph/10_basic.yml @@ -18,13 +18,13 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { keys: [1,2,3] } - do: index: index: test_1 - id: 2 + id: "2" body: { keys: [4,5,6] } - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/get_memory_stats.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/get_memory_stats.yml new file mode 100644 index 0000000000000..84f5d13885a7d --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/get_memory_stats.yml @@ -0,0 +1,110 @@ +# The three tests call the API in different ways but all get the +# same result as they run against a single node cluster +--- +"ML memory all nodes": + - skip: + features: [arbitrary_key] + - do: + ml.get_memory_stats: {} + - set: + nodes._arbitrary_key_: node_id + + - do: + ml.get_memory_stats: {} + + - match: { _nodes.total: 1 } + - match: { _nodes.successful: 1 } + - match: { _nodes.failed: 0 } + - is_true: cluster_name + - is_true: nodes.$node_id.name + - is_true: nodes.$node_id.ephemeral_id + - is_true: nodes.$node_id.transport_address + - is_true: nodes.$node_id.attributes + - is_true: nodes.$node_id.roles + - gt: { nodes.$node_id.mem.total_in_bytes: 0 } + - gt: { nodes.$node_id.mem.adjusted_total_in_bytes: 0 } + - gt: { nodes.$node_id.mem.ml.max_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.native_code_overhead_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.anomaly_detectors_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.data_frame_analytics_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.native_inference_in_bytes: 0 } + - gt: { nodes.$node_id.jvm.heap_max_in_bytes: 0 } + - gt: { nodes.$node_id.jvm.java_inference_max_in_bytes: 0 } + # This next one has to be >= 0 rather than 0 because the cache is invalidated + # lazily after models are no longer in use, and previous tests could have + # caused a model to be cached + - gte: { nodes.$node_id.jvm.java_inference_in_bytes: 0 } + +--- +"ML memory for ML nodes": + - skip: + features: [arbitrary_key] + - do: + ml.get_memory_stats: {} + - set: + nodes._arbitrary_key_: node_id + + - do: + ml.get_memory_stats: + node_id: "ml:true" + master_timeout: "1m" + + - match: { _nodes.total: 1 } + - match: { _nodes.successful: 1 } + - match: { _nodes.failed: 0 } + - is_true: cluster_name + - is_true: nodes.$node_id.name + - is_true: nodes.$node_id.ephemeral_id + - is_true: nodes.$node_id.transport_address + - is_true: nodes.$node_id.attributes + - is_true: nodes.$node_id.roles + - gt: { nodes.$node_id.mem.total_in_bytes: 0 } + - gt: { nodes.$node_id.mem.adjusted_total_in_bytes: 0 } + - gt: { nodes.$node_id.mem.ml.max_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.native_code_overhead_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.anomaly_detectors_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.data_frame_analytics_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.native_inference_in_bytes: 0 } + - gt: { nodes.$node_id.jvm.heap_max_in_bytes: 0 } + - gt: { nodes.$node_id.jvm.java_inference_max_in_bytes: 0 } + # This next one has to be >= 0 rather than 0 because the cache is invalidated + # lazily after models are no longer in use, and previous tests could have + # caused a model to be cached + - gte: { nodes.$node_id.jvm.java_inference_in_bytes: 0 } + +--- +"ML memory for specific node": + - skip: + features: [arbitrary_key] + - do: + ml.get_memory_stats: {} + - set: + nodes._arbitrary_key_: node_id + + - do: + ml.get_memory_stats: + node_id: $node_id + timeout: "29s" + + - match: { _nodes.total: 1 } + - match: { _nodes.successful: 1 } + - match: { _nodes.failed: 0 } + - is_true: cluster_name + - is_true: nodes.$node_id.name + - is_true: nodes.$node_id.ephemeral_id + - is_true: nodes.$node_id.transport_address + - is_true: nodes.$node_id.attributes + - is_true: nodes.$node_id.roles + - gt: { nodes.$node_id.mem.total_in_bytes: 0 } + - gt: { nodes.$node_id.mem.adjusted_total_in_bytes: 0 } + - gt: { nodes.$node_id.mem.ml.max_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.native_code_overhead_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.anomaly_detectors_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.data_frame_analytics_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.native_inference_in_bytes: 0 } + - gt: { nodes.$node_id.jvm.heap_max_in_bytes: 0 } + - gt: { nodes.$node_id.jvm.java_inference_max_in_bytes: 0 } + # This next one has to be >= 0 rather than 0 because the cache is invalidated + # lazily after models are no longer in use, and previous tests could have + # caused a model to be cached + - gte: { nodes.$node_id.jvm.java_inference_in_bytes: 0 } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/preview_datafeed.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/preview_datafeed.yml index bd8b567c2a514..4cff060d25356 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/preview_datafeed.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/preview_datafeed.yml @@ -17,7 +17,7 @@ setup: - do: index: index: airline-data - id: 1 + id: "1" body: > { "time": "2017-02-18T00:00:00Z", @@ -29,7 +29,7 @@ setup: - do: index: index: airline-data - id: 2 + id: "2" body: > { "time": "2017-02-18T00:30:00Z", @@ -41,7 +41,7 @@ setup: - do: index: index: airline-data - id: 3 + id: "3" body: > { "time": "2017-02-18T01:00:00Z", @@ -53,7 +53,7 @@ setup: - do: index: index: airline-data - id: 4 + id: "4" body: > { "time": "2017-02-18T01:01:00Z", @@ -774,7 +774,7 @@ setup: - do: index: index: index-airline-data-date-nanos - id: 1 + id: "1" body: > { "time": "2017-02-18T00:00:00Z", @@ -786,7 +786,7 @@ setup: - do: index: index: index-airline-data-date-nanos - id: 2 + id: "2" body: > { "time": "2017-02-18T00:30:00Z", @@ -798,7 +798,7 @@ setup: - do: index: index: index-airline-data-date-nanos - id: 3 + id: "3" body: > { "time": "2017-02-18T01:00:00Z", @@ -810,7 +810,7 @@ setup: - do: index: index: index-airline-data-date-nanos - id: 4 + id: "4" body: > { "time": "2017-02-18T01:01:00Z", diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/11_idx_arrays.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/11_idx_arrays.yml index 53afc52043d19..21badf967b7aa 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/11_idx_arrays.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/11_idx_arrays.yml @@ -21,7 +21,7 @@ teardown: - do: delete: index: foo - id: 1 + id: "1" ignore: 404 --- @@ -55,7 +55,7 @@ teardown: - do: index: index: foo - id: 1 + id: "1" body: { foo: bar } - do: @@ -63,7 +63,7 @@ teardown: Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" get: index: foo - id: 1 + id: "1" - match: { _index: foo } - match: { _id: "1"} - match: { _source: { foo: bar }} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/searchable_snapshots/10_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/searchable_snapshots/10_usage.yml index 4c280d5313c76..1f56a8c373968 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/searchable_snapshots/10_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/searchable_snapshots/10_usage.yml @@ -14,15 +14,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - field: foo - index: _index: docs - _id: 2 + _id: "2" - field: bar - index: _index: docs - _id: 3 + _id: "3" - field: baz - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/10_index_doc.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/10_index_doc.yml index 5732b19f93665..dab154917b17b 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/10_index_doc.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/10_index_doc.yml @@ -98,7 +98,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: - id: 1 + id: "1" index: only_index body: > { @@ -108,7 +108,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: - id: 2 + id: "2" index: everything body: > { @@ -162,7 +162,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: refresh: true - id: 7 + id: "7" index: only_read body: > { @@ -174,7 +174,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: refresh: true - id: 8 + id: "8" index: only_delete body: > { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/11_delete_doc.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/11_delete_doc.yml index 32e4694311f88..5c19aa3bbfcad 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/11_delete_doc.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/11_delete_doc.yml @@ -140,14 +140,14 @@ teardown: delete: refresh: true index: only_delete - id: 3 + id: "3" - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user delete: refresh: true index: everything - id: 8 + id: "8" - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user @@ -172,10 +172,10 @@ teardown: body: # The rest test won't send streaming content unless it has multiple bodies, so we send the same delete twice - delete: _index: everything - _id: 10 + _id: "10" - delete: _index: everything - _id: 10 + _id: "10" - do: # superuser search: @@ -199,7 +199,7 @@ teardown: delete: refresh: true index: only_read - id: 1 + id: "1" - do: catch: forbidden @@ -207,7 +207,7 @@ teardown: delete: refresh: true index: only_index - id: 2 + id: "2" - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/12_index_alias.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/12_index_alias.yml index 1f490ec08dac1..e1901ced2817e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/12_index_alias.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/12_index_alias.yml @@ -127,7 +127,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: - id: 1 + id: "1" index: can_write_1 body: > { @@ -137,7 +137,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: - id: 2 + id: "2" index: can_write_2 body: > { @@ -195,7 +195,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: refresh: true - id: 7 + id: "7" index: can_read_1 body: > { @@ -207,7 +207,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: refresh: true - id: 8 + id: "8" index: can_read_2 body: > { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/13_index_datemath.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/13_index_datemath.yml index 2651519e5f785..db1dd72553b3f 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/13_index_datemath.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/13_index_datemath.yml @@ -46,7 +46,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: - id: 1 + id: "1" index: "" body: > { @@ -81,7 +81,7 @@ teardown: catch: forbidden headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: - id: 4 + id: "4" index: "" body: > { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/14_cat_indices.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/14_cat_indices.yml index 63ab3527c58b5..09d0d416e54da 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/14_cat_indices.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/14_cat_indices.yml @@ -135,19 +135,19 @@ teardown: - do: index: index: index_to_monitor - id: 0 + id: "0" body: { foo: bar } - do: index: index: index_to_monitor - id: 1 + id: "1" body: { foo: bar } - do: index: index: index_to_monitor - id: 2 + id: "2" body: { foo: bar } - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/20_get_doc.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/20_get_doc.yml index 4e9367c238ae7..6e22bb4b8b43e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/20_get_doc.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/20_get_doc.yml @@ -126,7 +126,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user get: - id: 1 + id: "1" index: only_read - match: { _index: only_read } @@ -136,7 +136,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user get: - id: 4 + id: "4" index: read_write - match: { _index: read_write } - match: { _id: "4" } @@ -145,7 +145,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user get: - id: 5 + id: "5" index: everything - match: { _index: everything } - match: { _id: "5" } @@ -206,14 +206,14 @@ teardown: catch: forbidden headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user get: - id: 2 + id: "2" index: only_index - do: catch: forbidden headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user get: - id: 3 + id: "3" index: only_delete - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/30_dynamic_put_mapping.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/30_dynamic_put_mapping.yml index 8176a276b0301..7b50942478751 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/30_dynamic_put_mapping.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/30_dynamic_put_mapping.yml @@ -70,7 +70,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: - id: 1 + id: "1" index: write_alias body: > { @@ -80,7 +80,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: - id: 2 + id: "2" index: write_alias body: > { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/31_rollover_using_alias.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/31_rollover_using_alias.yml index fd9f6d1d46050..73979883291c3 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/31_rollover_using_alias.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/31_rollover_using_alias.yml @@ -72,7 +72,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: - id: 1 + id: "1" index: write_manage_alias body: > { @@ -82,7 +82,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: - id: 2 + id: "2" index: write_manage_alias body: > { @@ -120,7 +120,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: - id: 3 + id: "3" index: write_manage_alias body: > { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/50_data_streams.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/50_data_streams.yml index e1e6f86ef51aa..4757247a946d3 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/50_data_streams.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/50_data_streams.yml @@ -118,7 +118,7 @@ teardown: - do: # superuser index: index: simple-data-stream1 - id: 1 + id: "1" op_type: create body: { foo: bar, "@timestamp": "2020-12-12" } @@ -158,7 +158,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: index: $idx0name - id: 1 + id: "1" if_seq_no: $seqno if_primary_term: $primary_term op_type: index @@ -257,7 +257,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: index: $idx0name - id: 1 + id: "1" op_type: index if_seq_no: $seqno if_primary_term: $primary_term @@ -267,7 +267,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: - id: 2 + id: "2" op_type: create index: write-data-stream1 body: { foo: bar, bar: baz, "@timestamp": "2000-12-12" } @@ -287,7 +287,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: index: $idx1name - id: 2 + id: "2" op_type: index if_seq_no: $seqno if_primary_term: $primary_term @@ -526,7 +526,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: index: simple-allows-auto-configure - id: 1 + id: "1" op_type: create body: { foo: bar, "@timestamp": "2020-12-12" } @@ -536,7 +536,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: index: simple-data-stream1 - id: 1 + id: "1" op_type: create body: { foo: bar, "@timestamp": "2020-12-12" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/55_auto_configure.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/55_auto_configure.yml index ab818c3f12e64..85c455c41277e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/55_auto_configure.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/55_auto_configure.yml @@ -67,7 +67,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: index: index-auto-configure - id: 1 + id: "1" op_type: create body: { foo: bar, "@timestamp": "2020-12-12" } @@ -77,6 +77,6 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: index: index-limited - id: 1 + id: "1" op_type: create body: { "@timestamp": "2020-12-12" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/service_accounts/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/service_accounts/10_basic.yml index 7524379427f91..5c6d2d0c78275 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/service_accounts/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/service_accounts/10_basic.yml @@ -20,18 +20,27 @@ teardown: name: api-token-kibana ignore: 404 + - do: + security.delete_service_token: + namespace: elastic + service: enterprise-search-server + name: api-token-enterprise-search-server + ignore: 404 + --- "Test get service accounts": - do: security.get_service_accounts: {} - - length: { '': 2 } + - length: { '': 3 } + - is_true: "elastic/enterprise-search-server" - is_true: "elastic/fleet-server" - is_true: "elastic/kibana" - do: security.get_service_accounts: namespace: elastic - - length: { '': 2 } + - length: { '': 3 } + - is_true: "elastic/enterprise-search-server" - is_true: "elastic/fleet-server" - is_true: "elastic/kibana" @@ -66,6 +75,16 @@ teardown: - match: { "token.name": "api-token-kibana" } - set: { "token.value": service_token_kibana } + - do: + security.create_service_token: + namespace: elastic + service: enterprise-search-server + name: api-token-enterprise-search-server + + - is_true: created + - match: { "token.name": "api-token-enterprise-search-server" } + - set: { "token.value": service_token_enterprise_search_server } + - do: headers: Authorization: Bearer ${service_token_fleet} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/set_security_user/10_small_users_one_index.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/set_security_user/10_small_users_one_index.yml index 7442c74a9eae6..7644764bc518a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/set_security_user/10_small_users_one_index.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/set_security_user/10_small_users_one_index.yml @@ -97,7 +97,7 @@ teardown: Authorization: "Basic am9lOngtcGFjay10ZXN0LXBhc3N3b3Jk" index: index: shared_logs - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -108,7 +108,7 @@ teardown: Authorization: "Basic am9objp4LXBhY2stdGVzdC1wYXNzd29yZA==" index: index: shared_logs - id: 2 + id: "2" pipeline: "my_pipeline" body: > { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/set_security_user/20_api_key.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/set_security_user/20_api_key.yml index c30192d7f5d7c..7576dfe7ce250 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/set_security_user/20_api_key.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/set_security_user/20_api_key.yml @@ -48,7 +48,7 @@ teardown: Authorization: ApiKey ${login_creds} index: index: index - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -69,7 +69,7 @@ teardown: Authorization: ApiKey ${login_creds} index: index: index - id: 2 + id: "2" pipeline: "my_pipeline" body: > { @@ -91,7 +91,7 @@ teardown: Authorization: ApiKey ${login_creds} index: index: index - id: 3 + id: "3" pipeline: "my_pipeline" body: > { @@ -105,7 +105,7 @@ teardown: - do: get: index: index - id: 1 + id: "1" - match: { _source.user.api_key.name: "with-metadata" } - match: { _source.user.api_key.id: $id_with_metadata } - match: { _source.user.api_key.metadata: { "string": "hello", "number": 42, "complex": {"foo": "bar", "values": [1, 3, 5]} } } @@ -113,7 +113,7 @@ teardown: - do: get: index: index - id: 2 + id: "2" - match: { _source.user.api_key.name: "no-metadata" } - match: { _source.user.api_key.id: $id_no_metadata } - is_false: _source.user.api_key.metadata @@ -121,7 +121,7 @@ teardown: - do: get: index: index - id: 3 + id: "3" - match: { _source.user.api_key.name: "empty-metadata" } - match: { _source.user.api_key.id: $id_empty_metadata } - is_false: _source.user.api_key.metadata diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/snapshot/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/snapshot/10_basic.yml index 19484e4dd7efe..1d370082c8e48 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/snapshot/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/snapshot/10_basic.yml @@ -30,7 +30,7 @@ setup: - do: index: index: test_index - id: 1 + id: "1" body: { foo: bar } - do: indices.flush: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/20_geo_centroid.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/20_geo_centroid.yml index a792ee33ff371..10ab8bed65d34 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/20_geo_centroid.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/20_geo_centroid.yml @@ -15,27 +15,27 @@ body: - index: _index: locations - _id: 1 + _id: "1" - '{"location": "POINT(4.912350 52.374081)", "city": "Amsterdam", "name": "NEMO Science Museum"}' - index: _index: locations - _id: 2 + _id: "2" - '{"location": "POINT(4.901618 52.369219)", "city": "Amsterdam", "name": "Museum Het Rembrandthuis"}' - index: _index: locations - _id: 3 + _id: "3" - '{"location": "POINT(4.914722 52.371667)", "city": "Amsterdam", "name": "Nederlands Scheepvaartmuseum"}' - index: _index: locations - _id: 4 + _id: "4" - '{"location": "POINT(4.405200 51.222900)", "city": "Antwerp", "name": "Letterenhuis"}' - index: _index: locations - _id: 5 + _id: "5" - '{"location": "POINT(2.336389 48.861111)", "city": "Paris", "name": "Musée du Louvre"}' - index: _index: locations - _id: 6 + _id: "6" - '{"location": "POINT(2.327000 48.860000)", "city": "Paris", "name": "Musée dOrsay"}' - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/30_geotile_grid.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/30_geotile_grid.yml index 2b7471bdf858e..7696e8ab6f16d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/30_geotile_grid.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/30_geotile_grid.yml @@ -15,27 +15,27 @@ body: - index: _index: locations - _id: 1 + _id: "1" - '{"location": "POINT(4.912350 52.374081)", "city": "Amsterdam", "name": "NEMO Science Museum"}' - index: _index: locations - _id: 2 + _id: "2" - '{"location": "POINT(4.901618 52.369219)", "city": "Amsterdam", "name": "Museum Het Rembrandthuis"}' - index: _index: locations - _id: 3 + _id: "3" - '{"location": "POINT(4.914722 52.371667)", "city": "Amsterdam", "name": "Nederlands Scheepvaartmuseum"}' - index: _index: locations - _id: 4 + _id: "4" - '{"location": "POINT(4.405200 51.222900)", "city": "Antwerp", "name": "Letterenhuis"}' - index: _index: locations - _id: 5 + _id: "5" - '{"location": "POINT(2.336389 48.861111)", "city": "Paris", "name": "Musée du Louvre"}' - index: _index: locations - _id: 6 + _id: "6" - '{"location": "POINT(2.327000 48.860000)", "city": "Paris", "name": "Musée dOrsay"}' - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/40_geohash_grid.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/40_geohash_grid.yml index 4f41f6b75c481..809d8697f505c 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/40_geohash_grid.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/40_geohash_grid.yml @@ -15,27 +15,27 @@ body: - index: _index: locations - _id: 1 + _id: "1" - '{"location": "POINT(4.912350 52.374081)", "city": "Amsterdam", "name": "NEMO Science Museum"}' - index: _index: locations - _id: 2 + _id: "2" - '{"location": "POINT(4.901618 52.369219)", "city": "Amsterdam", "name": "Museum Het Rembrandthuis"}' - index: _index: locations - _id: 3 + _id: "3" - '{"location": "POINT(4.914722 52.371667)", "city": "Amsterdam", "name": "Nederlands Scheepvaartmuseum"}' - index: _index: locations - _id: 4 + _id: "4" - '{"location": "POINT(4.405200 51.222900)", "city": "Antwerp", "name": "Letterenhuis"}' - index: _index: locations - _id: 5 + _id: "5" - '{"location": "POINT(2.336389 48.861111)", "city": "Paris", "name": "Musée du Louvre"}' - index: _index: locations - _id: 6 + _id: "6" - '{"location": "POINT(2.327000 48.860000)", "city": "Paris", "name": "Musée dOrsay"}' - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/60_geo_line.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/60_geo_line.yml index f62eb6f10bec5..1e146abef8582 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/60_geo_line.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/60_geo_line.yml @@ -17,15 +17,15 @@ body: - index: _index: races - _id: 1 + _id: "1" - '{"position": "POINT(4.912350 52.374081)", "race_id": "Amsterdam", "timestamp": 4}' - index: _index: races - _id: 2 + _id: "2" - '{"position": "POINT(4.901618 52.369219)", "race_id": "Amsterdam", "timestamp": 3}' - index: _index: races - _id: 3 + _id: "3" - '{"position": "POINT(4.914722 52.371667)", "race_id": "Amsterdam", "timestamp": 10}' - do: @@ -84,27 +84,27 @@ body: - index: _index: test1 - _id: 1 + _id: "1" - '{ "date" : "2020-01-01T01:00:00.0Z", "entity" : "e1", "location" : { "lat" : 50.3, "lon" : 0.13 }}' - index: _index: test1 - _id: 2 + _id: "2" - '{ "date" : "2020-01-01T01:00:01.0Z", "entity" : "e1", "location" : { "lat" : 50.4, "lon" : 0.13 } }' - index: _index: test1 - _id: 3 + _id: "3" - '{ "date" : "2020-01-01T01:00:03.0Z", "entity" : "e1", "location" : { "lat" : 50.5, "lon" : 0.13 }}' - index: _index: test2 - _id: 1 + _id: "1" - '{ "date" : "2020-01-02T02:00:01.0Z", "entity" : "e2", "location" : { "lat" : 51.3, "lon" : 0.13 }}' - index: _index: test2 - _id: 2 + _id: "2" - '{ "date" : "2020-01-02T02:00:02.0Z", "entity" : "e2", "location" : { "lat" : 51.4, "lon" : 0.13 }}' - index: _index: test2 - _id: 3 + _id: "3" - '{ "date" : "2020-01-02T02:00:03.0Z", "entity" : "e2", "location" : { "lat" : 51.5, "lon" : 0.13 }}' - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/70_script_doc_values.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/70_script_doc_values.yml index 3831268c4f783..422afd9522aef 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/70_script_doc_values.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/70_script_doc_values.yml @@ -13,7 +13,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: geo_shape: "POLYGON((24.04725 59.942,24.04825 59.94125,24.04875 59.94125,24.04875 59.94175,24.048 59.9425,24.0475 59.94275,24.0465 59.94225,24.046 59.94225,24.04575 59.9425,24.04525 59.94225,24.04725 59.942))" - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/sql.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/sql.yml index af45542eefb11..dc09cf91be72b 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/sql.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/sql.yml @@ -6,17 +6,17 @@ setup: body: - index: _index: test - _id: 1 + _id: "1" - str: test1 int: 1 - index: _index: test - _id: 2 + _id: "2" - str: test2 int: 2 - index: _index: test - _id: 3 + _id: "3" - str: test3 int: 3 @@ -38,6 +38,9 @@ setup: --- "Paging through results": + - skip: + version: "8.1.99 - " + reason: PIT cursors introduced in 8.2 no longer return empty last page - do: sql.query: format: json @@ -73,6 +76,41 @@ setup: - is_false: cursor - length: { rows: 0 } +--- +"Paging through results with PIT cursor": + - skip: + version: " - 8.1.99" + reason: PIT cursors introduced in 8.2 no longer return empty last page + - do: + sql.query: + format: json + body: + query: "SELECT * FROM test ORDER BY int asc" + fetch_size: 2 + - match: { columns.0.name: int } + - match: { columns.1.name: str } + - match: { rows.0.0: 1 } + - match: { rows.0.1: test1 } + - match: { rows.1.0: 2 } + - match: { rows.1.1: test2 } + - is_true: cursor + - set: { cursor: cursor } + + - do: + sql.query: + format: json + body: + cursor: "$cursor" + - match: { rows.0.0: 3 } + - match: { rows.0.1: test3 } + - is_false: columns + - is_false: cursor + + - do: + indices.stats: { index: 'test' } + + - match: { indices.test.total.search.open_contexts: 0 } + --- "Getting textual representation": - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/translate.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/translate.yml index 6439b6f1be92b..21ba2e0cf2883 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/translate.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/translate.yml @@ -6,7 +6,7 @@ body: - index: _index: test - _id: 1 + _id: "1" - str: test1 int: 1 diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/terms_enum/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/terms_enum/10_basic.yml index d29701277d12b..a0d2caa17a946 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/terms_enum/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/terms_enum/10_basic.yml @@ -186,24 +186,24 @@ setup: - do: index: index: test_k - id: 1 + id: "1" body: { foo: "bar_k", foo_non_indexed: "bar_k", "timestamp":"2021-01-01T01:01:01.000Z" } - do: index: index: test_ck - id: 2 + id: "2" body: { other: "foo", "timestamp":"2020-01-01T01:01:01.000Z" } - do: index: index: test_f - id: 3 + id: "3" body: { foo: { bar: "bar_f" }, "timestamp":"2019-01-01T01:01:01.000Z" } - do: index: index: test_security - id: 4 + id: "4" body: { foo: "bar_dls", foo_non_indexed: "bar_dls"} - do: #superuser diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/preview_transforms.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/preview_transforms.yml index 06ddbb21d89f3..08b6bcd978f1e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/preview_transforms.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/preview_transforms.yml @@ -16,7 +16,7 @@ setup: - do: index: index: airline-data - id: 1 + id: "1" body: > { "time": "2017-02-18T00:00:00Z", @@ -28,7 +28,7 @@ setup: - do: index: index: airline-data - id: 2 + id: "2" body: > { "time": "2017-02-18T00:30:00Z", @@ -40,7 +40,7 @@ setup: - do: index: index: airline-data - id: 3 + id: "3" body: > { "time": "2017-02-18T01:00:00Z", @@ -52,7 +52,7 @@ setup: - do: index: index: airline-data - id: 4 + id: "4" body: > { "time": "2017-02-18T01:01:00Z", diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/transforms_cat_apis.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/transforms_cat_apis.yml index ae3554ce9e634..640f5af7b58c7 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/transforms_cat_apis.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/transforms_cat_apis.yml @@ -57,7 +57,7 @@ setup: - do: index: index: airline-data - id: 1 + id: "1" body: > { "time": "2017-02-18T00:00:00Z", @@ -69,7 +69,7 @@ setup: - do: index: index: airline-data - id: 2 + id: "2" body: > { "time": "2017-02-18T00:30:00Z", @@ -81,7 +81,7 @@ setup: - do: index: index: airline-data - id: 3 + id: "3" body: > { "time": "2017-02-18T01:00:00Z", @@ -93,7 +93,7 @@ setup: - do: index: index: airline-data - id: 4 + id: "4" body: > { "time": "2017-02-18T01:01:00Z", diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/user_profile/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/user_profile/10_basic.yml index f6d727b69099d..c9c6346d3fd38 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/user_profile/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/user_profile/10_basic.yml @@ -39,6 +39,8 @@ teardown: - match: { "user.username" : "joe" } - match: { "user.roles" : [ "superuser" ] } - match: { "user.full_name" : "Bazooka Joe" } + - match: { "user.realm_name" : "default_native" } + - is_false: "user.realm_domain" - is_true: _doc - set: { uid: profile_uid } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/10_dense_vector_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/10_dense_vector_basic.yml index d95f249ae0a26..4418a7a602eae 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/10_dense_vector_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/10_dense_vector_basic.yml @@ -21,7 +21,7 @@ setup: - do: index: index: test-index - id: 1 + id: "1" body: vector: [230.0, 300.33, -34.8988, 15.555, -200.0] indexed_vector: [230.0, 300.33, -34.8988, 15.555, -200.0] @@ -29,7 +29,7 @@ setup: - do: index: index: test-index - id: 2 + id: "2" body: vector: [-0.5, 100.0, -13, 14.8, -156.0] indexed_vector: [-0.5, 100.0, -13, 14.8, -156.0] @@ -37,7 +37,7 @@ setup: - do: index: index: test-index - id: 3 + id: "3" body: vector: [0.5, 111.3, -13.0, 14.8, -156.0] indexed_vector: [0.5, 111.3, -13.0, 14.8, -156.0] diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/15_dense_vector_l1l2.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/15_dense_vector_l1l2.yml index a1a59f77aece7..d0e3d4dcdf0c5 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/15_dense_vector_l1l2.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/15_dense_vector_l1l2.yml @@ -16,21 +16,21 @@ setup: - do: index: index: test-index - id: 1 + id: "1" body: my_dense_vector: [230.0, 300.33, -34.8988, 15.555, -200.0] - do: index: index: test-index - id: 2 + id: "2" body: my_dense_vector: [-0.5, 100.0, -13, 14.8, -156.0] - do: index: index: test-index - id: 3 + id: "3" body: my_dense_vector: [0.5, 111.3, -13.0, 14.8, -156.0] diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/20_dense_vector_special_cases.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/20_dense_vector_special_cases.yml index eb7b9850f4399..64ec4bf3639d2 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/20_dense_vector_special_cases.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/20_dense_vector_special_cases.yml @@ -31,7 +31,7 @@ setup: catch: bad_request index: index: test-index - id: 1 + id: "1" body: vector: [10, 2] - match: { error.type: "mapper_parsing_exception" } @@ -40,7 +40,7 @@ setup: catch: bad_request index: index: test-index - id: 1 + id: "1" body: indexed_vector: [10, 2] - match: { error.type: "mapper_parsing_exception" } @@ -50,14 +50,14 @@ setup: - do: index: index: test-index - id: 1 + id: "1" body: vector: [10, 10, 10] - do: index: index: test-index - id: 2 + id: "2" body: vector: [10.5, 10.9, 10.4] @@ -110,7 +110,7 @@ setup: - do: index: index: test-index - id: 1 + id: "1" body: vector: [1, 2, 3] @@ -152,14 +152,14 @@ setup: - do: index: index: test-index - id: 1 + id: "1" body: vector: [10, 10, 10] - do: index: index: test-index - id: 2 + id: "2" body: some_other_field: "random_value" diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/40_knn_search.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/40_knn_search.yml index d0711bf8a6514..18aaf2ab8264e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/40_knn_search.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/40_knn_search.yml @@ -25,7 +25,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: name: moose.jpg vector: [-0.5, 100.0, -13, 14.8, -156.0] @@ -33,7 +33,7 @@ setup: - do: index: index: test - id: 3 + id: "3" body: name: rabbit.jpg vector: [0.5, 111.3, -13.0, 14.8, -156.0] diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/50_dense_vector_field_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/50_dense_vector_field_usage.yml index 004c815556fe1..610c381eba2cf 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/50_dense_vector_field_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/50_dense_vector_field_usage.yml @@ -30,7 +30,7 @@ setup: - do: index: index: futest - id: 2 + id: "2" body: name: moose.jpg vector: [ -0.5, 100.0, -13, 14.8, -156.0 ] @@ -38,7 +38,7 @@ setup: - do: index: index: futest - id: 3 + id: "3" body: name: rabbit.jpg vector: [ 0.5, 111.3, -13.0, 14.8, -156.0 ] diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/60_knn_and_binary_dv_fields_api.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/60_knn_and_binary_dv_fields_api.yml new file mode 100644 index 0000000000000..b583a25738215 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/60_knn_and_binary_dv_fields_api.yml @@ -0,0 +1,848 @@ +--- +"size and isEmpty code works for any vector, including empty": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + search: + body: + query: + script_score: + query: { match_all: {} } + script: + source: | + def dv = field(params.field).get(); + if (dv.isEmpty()) { + return dv.size(); + } + return dv.vector[2] * dv.size() + params: + field: bdv + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 3 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 2 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 1 } + - match: { hits.hits.3._id: "missing_vector" } + - match: { hits.hits.3._score: 0 } + + - do: + search: + body: + query: + script_score: + query: { match_all: {} } + script: + source: | + def dv = field(params.field).get(); + if (dv.isEmpty()) { + return dv.size(); + } + return dv.vector[2] * dv.size() + params: + field: knn + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 3 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 2 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 1 } + - match: { hits.hits.3._id: "missing_vector" } + - match: { hits.hits.3._score: 0 } + +--- +"null can be used for default value": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + search: + body: + query: + script_score: + query: { match_all: {} } + script: + source: | + DenseVector dv = field(params.field).get(null); + if (dv == null) { + return 1; + } + return dv.vector[2]; + params: + field: bdv + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 3 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 2 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 1 } + - match: { hits.hits.3._id: "missing_vector" } + - match: { hits.hits.3._score: 1 } + + - do: + search: + body: + query: + script_score: + query: { match_all: {} } + script: + source: | + DenseVector dv = field(params.field).get(null); + if (dv == null) { + return 1; + } + return dv.vector[2]; + params: + field: knn + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 3 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 2 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 1 } + - match: { hits.hits.3._id: "missing_vector" } + - match: { hits.hits.3._score: 1 } + +--- +"empty dense vector throws for vector accesses": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + catch: bad_request + search: + body: + query: + script_score: + query: { "bool": { "must_not": { "exists": { "field": "bdv" } } } } + script: + source: | + field(params.field).get().vector[2] + params: + field: bdv + + - match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" } + - match: { error.failed_shards.0.reason.caused_by.reason: "Dense vector value missing for a field, use isEmpty() to check for a missing vector value" } + + - do: + catch: bad_request + search: + body: + query: + script_score: + query: { "bool": { "must_not": { "exists": { "field": "bdv" } } } } + script: + source: | + field(params.field).get().vector[2] + params: + field: knn + + - match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" } + - match: { error.failed_shards.0.reason.caused_by.reason: "Dense vector value missing for a field, use isEmpty() to check for a missing vector value" } + + - do: + search: + body: + query: + script_score: + query: { "bool": { "must_not": { "exists": { "field": "bdv" } } } } + script: + source: | + float[] q = new float[1]; + q[0] = 3; + DenseVector dv = field(params.field).get(); + float score = 0; + try { score += dv.magnitude } catch (IllegalArgumentException e) { score += 10; } + try { score += dv.dotProduct(q) } catch (IllegalArgumentException e) { score += 200; } + try { score += dv.l1Norm(q) } catch (IllegalArgumentException e) { score += 3000; } + try { score += dv.l2Norm(q) } catch (IllegalArgumentException e) { score += 40000; } + try { score += dv.vector[0] } catch (IllegalArgumentException e) { score += 500000; } + try { score += dv.dims } catch (IllegalArgumentException e) { score += 6000000; } + return score; + params: + field: bdv + + - match: { hits.hits.0._id: "missing_vector" } + - match: { hits.hits.0._score: 6543210 } + + - do: + search: + body: + query: + script_score: + query: { "bool": { "must_not": { "exists": { "field": "bdv" } } } } + script: + source: | + float[] q = new float[1]; + q[0] = 3; + DenseVector dv = field(params.field).get(); + float score = 0; + try { score += dv.magnitude } catch (IllegalArgumentException e) { score += 10; } + try { score += dv.dotProduct(q) } catch (IllegalArgumentException e) { score += 200; } + try { score += dv.l1Norm(q) } catch (IllegalArgumentException e) { score += 3000; } + try { score += dv.l2Norm(q) } catch (IllegalArgumentException e) { score += 40000; } + try { score += dv.cosineSimilarity(q) } catch (IllegalArgumentException e) { score += 200000; } + try { score += dv.vector[0] } catch (IllegalArgumentException e) { score += 500000; } + try { score += dv.dims } catch (IllegalArgumentException e) { score += 6000000; } + return score; + params: + field: knn + + - match: { hits.hits.0._id: "missing_vector" } + - match: { hits.hits.0._score: 6743210 } + +--- +"dot product works on dense vectors": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + field(params.field).get().dotProduct(params.query) + params: + query: [4, 5, 6] + field: bdv + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 27 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 21 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 15 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + field(params.field).get().dotProduct(query) + params: + field: bdv + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 27 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 21 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 15 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + field(params.field).get().dotProduct(params.query) + params: + query: [4, 5, 6] + field: knn + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 27 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 21 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 15 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + field(params.field).get().dotProduct(query) + params: + field: knn + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 27 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 21 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 15 } + +--- +"iterator over dense vector values": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + catch: bad_request + search: + body: + query: + script_score: + query: { match_all: {} } + script: + source: | + float sum = 0.0f; + for (def v : field(params.field)) { + sum += v; + } + return sum; + params: + field: bdv + + - match: { error.failed_shards.0.reason.caused_by.type: "unsupported_operation_exception" } + - match: { error.failed_shards.0.reason.caused_by.reason: "Cannot iterate over single valued dense_vector field, use get() instead" } + + - do: + catch: bad_request + search: + body: + query: + script_score: + query: { match_all: {} } + script: + source: | + float sum = 0.0f; + for (def v : field(params.field)) { + sum += v; + } + return sum; + params: + field: knn + + - match: { error.failed_shards.0.reason.caused_by.type: "unsupported_operation_exception" } + - match: { error.failed_shards.0.reason.caused_by.reason: "Cannot iterate over single valued dense_vector field, use get() instead"} + +--- +"l1Norm works on dense vectors": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + field(params.field).get().l1Norm(params.query) + params: + query: [4, 5, 6] + field: bdv + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 12 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + field(params.field).get().l1Norm(query) + params: + field: bdv + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 12 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + field(params.field).get().l1Norm(params.query) + params: + query: [4, 5, 6] + field: knn + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 12 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + field(params.field).get().l1Norm(query) + params: + field: knn + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 12 } + +--- +"l2Norm works on dense vectors": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + (int) field(params.field).get().l2Norm(params.query) + params: + query: [4, 5, 6] + field: bdv + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 7 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 6 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 5 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + (int) field(params.field).get().l2Norm(query) + params: + field: bdv + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 7 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 6 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 5 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + (int) field(params.field).get().l2Norm(params.query) + params: + query: [4, 5, 6] + field: knn + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 7 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 6 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 5 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + (int) field(params.field).get().l2Norm(query) + params: + field: knn + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 7 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 6 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 5 } + +--- +"cosineSimilarity works on dense vectors": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + (int) (field(params.field).get().cosineSimilarity(query) * 100.0f) + params: + field: bdv + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 98 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 97 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 92 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + (int) (field(params.field).get().cosineSimilarity(params.query) * 100.0f) + params: + query: [4, 5, 6] + field: knn + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 98 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 97 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 92 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + (int) (field(params.field).get().cosineSimilarity(params.query) * 100.0f) + params: + query: [4, 5, 6] + field: bdv + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 98 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 97 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 92 } + +--- +"query vector of wrong type errors": + - skip: + version: " - 8.0.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + + - do: + catch: bad_request + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + (int) field(params.field).get().l2Norm(params.query) + params: + query: "one, two, three" + field: bdv + + - match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" } + - match: { error.failed_shards.0.reason.caused_by.reason: "Cannot use vector [one, two, three] with class [java.lang.String] as query vector" } + + - do: + catch: bad_request + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + (int) field(params.field).get().l2Norm(params.query) + params: + query: "one, two, three" + field: knn + + - match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" } + - match: { error.failed_shards.0.reason.caused_by.reason: "Cannot use vector [one, two, three] with class [java.lang.String] as query vector" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/10_wildcard_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/10_wildcard_basic.yml index 2e9bcc5dd6b29..67444b09a8d32 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/10_wildcard_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/10_wildcard_basic.yml @@ -20,20 +20,20 @@ setup: - do: index: index: test-index - id: 1 + id: "1" body: my_wildcard: hello world null_wildcard: null - do: index: index: test-index - id: 2 + id: "2" body: my_wildcard: goodbye world - do: index: index: test-index - id: 3 + id: "3" body: my_wildcard: cAsE iNsEnSiTiVe World null_wildcard: HAS_VALUE diff --git a/x-pack/plugin/src/yamlRestTestV7Compat/resources/rest-api-spec/test/freeze.gone/10_basic_compat.yml b/x-pack/plugin/src/yamlRestTestV7Compat/resources/rest-api-spec/test/freeze.gone/10_basic_compat.yml index 234e4e0b9e5f7..26cf471ea9db7 100644 --- a/x-pack/plugin/src/yamlRestTestV7Compat/resources/rest-api-spec/test/freeze.gone/10_basic_compat.yml +++ b/x-pack/plugin/src/yamlRestTestV7Compat/resources/rest-api-spec/test/freeze.gone/10_basic_compat.yml @@ -9,7 +9,7 @@ setup: - do: index: index: some-test-index-1 - id: 1 + id: "1" body: { foo: bar } --- diff --git a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/80_transform.yml b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/80_transform.yml index cafa1fc384d06..4ded3005fb4ef 100644 --- a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/80_transform.yml +++ b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/80_transform.yml @@ -137,7 +137,7 @@ teardown: transform_id: "simple-remote-transform" body: > { - "source": { "index": ["my_remote_cluster:remote_test_index", "my_remote_cluster:remote_test_index_2"] } + "source": { "index": ["my_remote_cluster:remote_test_index*"] } } - do: headers: { Authorization: "Basic am9lOnRyYW5zZm9ybS1wYXNzd29yZA==" } @@ -146,7 +146,8 @@ teardown: - match: { count: 1 } - match: { transforms.0.id: "simple-remote-transform" } - match: { transforms.0.state: "stopped" } - # we added test_index_2, which has 2 more docs: + # the source now includes test_index_2, which has 2 more docs + # note that test_index_3 fits the wildcard pattern, but is not authorized, this test should not return a count of 4 as this would mean broken access control - match: { transforms.0.checkpointing.operations_behind: 2 } - do: @@ -155,7 +156,7 @@ teardown: transform_id: "simple-remote-transform" - do: - catch: /Cannot preview transform \[simple-remote-transform\] because user bob lacks all the required permissions for indices. \[my_remote_cluster:remote_test_index, my_remote_cluster:remote_test_index_2, simple-remote-transform\]/ + catch: /Cannot preview transform \[simple-remote-transform\] because user bob lacks all the required permissions for indices. \[my_remote_cluster:remote_test_index\*, simple-remote-transform\]/ headers: { Authorization: "Basic Ym9iOnRyYW5zZm9ybS1wYXNzd29yZA==" } # This is bob transform.preview_transform: transform_id: "simple-remote-transform" diff --git a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/remote_cluster/80_transform.yml b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/remote_cluster/80_transform.yml index c550f148f956b..83f94a213303a 100644 --- a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/remote_cluster/80_transform.yml +++ b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/remote_cluster/80_transform.yml @@ -22,7 +22,7 @@ setup: "cluster": [], "indices": [ { - "names": ["remote_test_index*"], + "names": ["remote_test_index", "remote_test_index_2"], "privileges": ["read", "view_index_metadata"] } ] @@ -139,3 +139,60 @@ teardown: - length: { aggregations.user.buckets: 2 } - match: { aggregations.user.buckets.0.key: "d" } - match: { aggregations.user.buckets.0.doc_count: 1 } + + # create a 3rd index, but for this index joe has no privileges + - do: + indices.create: + index: remote_test_index_3 + body: + settings: + index: + number_of_shards: 3 + number_of_replicas: 0 + aliases: + test_alias: {} + mappings: + properties: + time: + type: date + user: + type: keyword + stars: + type: integer + coolness: + type: integer + + - do: + bulk: + refresh: true + body: + - '{"index": {"_index": "remote_test_index_3"}}' + - '{"user": "z", "stars": 2, "date" : "2018-11-29T12:12:12.123456789Z"}' + - '{"index": {"_index": "remote_test_index_3"}}' + - '{"user": "x", "stars": 1, "date" : "2018-11-29T12:14:12.123456789Z"}' + - do: + search: + rest_total_hits_as_int: true + index: remote_test_index_3 + body: + aggs: + user: + terms: + field: user + + - match: { _shards.total: 3 } + - match: { hits.total: 2 } + - length: { aggregations.user.buckets: 2 } + + # search should fail for joe + - do: + catch: /action \[indices:data/read/search\] is unauthorized for user \[joe\] .*/ + headers: { Authorization: "Basic am9lOnRyYW5zZm9ybS1wYXNzd29yZA==" } + search: + rest_total_hits_as_int: true + index: remote_test_index_3 + body: + aggs: + user: + terms: + field: user diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TestFeatureResetIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TestFeatureResetIT.java index 3a5ea944761d2..31dc881dfd1f9 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TestFeatureResetIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TestFeatureResetIT.java @@ -6,10 +6,10 @@ */ package org.elasticsearch.xpack.transform.integration; +import org.apache.http.client.methods.HttpPost; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; -import org.elasticsearch.client.feature.ResetFeaturesRequest; import org.elasticsearch.client.transform.transforms.TimeSyncConfig; import org.elasticsearch.client.transform.transforms.TransformConfig; import org.elasticsearch.client.transform.transforms.pivot.SingleGroupSource; @@ -86,8 +86,7 @@ public void testTransformFeatureReset() throws Exception { assertTrue(putTransform(config, RequestOptions.DEFAULT).isAcknowledged()); assertTrue(startTransform(config.getId(), RequestOptions.DEFAULT).isAcknowledged()); - TestRestHighLevelClient highLevelClient = new TestRestHighLevelClient(); - highLevelClient.features().resetFeatures(new ResetFeaturesRequest(), RequestOptions.DEFAULT); + client().performRequest(new Request(HttpPost.METHOD_NAME, "/_features/_reset")); Response response = adminClient().performRequest(new Request("GET", "/_cluster/state?metric=metadata")); Map metadata = (Map) ESRestTestCase.entityAsMap(response).get("metadata"); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java index 4b62cc6e80376..5a6551774e02e 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java @@ -7,15 +7,16 @@ package org.elasticsearch.xpack.transform.integration; +import org.apache.http.client.methods.HttpGet; import org.apache.logging.log4j.Level; import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.core.AcknowledgedResponse; @@ -412,13 +413,18 @@ protected Map toLazy(ToXContent parsedObject) throws Exception { } private void waitForPendingTasks() { - ListTasksRequest listTasksRequest = new ListTasksRequest(); - listTasksRequest.setWaitForCompletion(true); - listTasksRequest.setDetailed(true); - listTasksRequest.setTimeout(TimeValue.timeValueSeconds(10)); - try (RestHighLevelClient restClient = new TestRestHighLevelClient()) { - - restClient.tasks().list(listTasksRequest, RequestOptions.DEFAULT); + Request request = new Request(HttpGet.METHOD_NAME, "/_tasks"); + Map parameters = Map.of( + "wait_for_completion", + Boolean.TRUE.toString(), + "detailed", + Boolean.TRUE.toString(), + "timeout", + TimeValue.timeValueSeconds(10).getStringRep() + ); + request.addParameters(parameters); + try { + client().performRequest(request); } catch (Exception e) { throw new AssertionError("Failed to wait for pending tasks to complete", e); } diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCheckpointServiceNodeTests.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCheckpointServiceNodeTests.java index 7c4746989c888..1e3d89b565e0a 100644 --- a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCheckpointServiceNodeTests.java +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCheckpointServiceNodeTests.java @@ -44,6 +44,8 @@ import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.search.suggest.completion.CompletionStats; import org.elasticsearch.test.client.NoOpClient; +import org.elasticsearch.transport.ActionNotFoundTransportException; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointStats; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo; @@ -64,11 +66,9 @@ import java.time.Instant; import java.util.ArrayList; import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Set; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -85,22 +85,24 @@ public class TransformCheckpointServiceNodeTests extends TransformSingleNodeTest private class MockClientForCheckpointing extends NoOpClient { - private volatile ShardStats[] shardStats; + private final boolean supportTransformCheckpointApi; + private volatile Map checkpoints; private volatile String[] indices; - MockClientForCheckpointing(String testName) { + /** + * Mock client for checkpointing + * + * @param testName name of the test, used for naming the threadpool + * @param supportTransformCheckpointApi whether to mock the checkpoint API, if false throws action not found + */ + MockClientForCheckpointing(String testName, boolean supportTransformCheckpointApi) { super(testName); + this.supportTransformCheckpointApi = supportTransformCheckpointApi; } - void setShardStats(ShardStats[] shardStats) { - this.shardStats = shardStats; - - Set indicesSet = new HashSet<>(); - for (ShardStats s : shardStats) { - indicesSet.add(s.getShardRouting().getIndexName()); - } - - this.indices = indicesSet.toArray(new String[0]); + void setCheckpoints(Map checkpoints) { + this.checkpoints = checkpoints; + this.indices = checkpoints.keySet().toArray(new String[0]); } @SuppressWarnings("unchecked") @@ -111,6 +113,18 @@ protected void ActionListener listener ) { + if (request instanceof GetCheckpointAction.Request) { + // throw action not found if checkpoint API is not supported, transform should fallback to legacy checkpointing + if (supportTransformCheckpointApi == false) { + listener.onFailure(new ActionNotFoundTransportException(GetCheckpointAction.NAME)); + return; + } + + final GetCheckpointAction.Response getCheckpointResponse = new GetCheckpointAction.Response(checkpoints); + listener.onResponse((Response) getCheckpointResponse); + return; + } + if (request instanceof GetIndexRequest) { // for this test we only need the indices assert (indices != null); @@ -118,11 +132,13 @@ protected void listener.onResponse((Response) indexResponse); return; - } else if (request instanceof IndicesStatsRequest) { + } + + if (request instanceof IndicesStatsRequest) { // IndicesStatsResponse is package private, therefore using a mock final IndicesStatsResponse indicesStatsResponse = mock(IndicesStatsResponse.class); - when(indicesStatsResponse.getShards()).thenReturn(shardStats); + when(indicesStatsResponse.getShards()).thenReturn(createShardStats(checkpoints)); when(indicesStatsResponse.getFailedShards()).thenReturn(0); listener.onResponse((Response) indicesStatsResponse); @@ -137,7 +153,7 @@ protected void public void createComponents() { // it's not possible to run it as @BeforeClass as clients aren't initialized if (mockClientForCheckpointing == null) { - mockClientForCheckpointing = new MockClientForCheckpointing("TransformCheckpointServiceNodeTests"); + mockClientForCheckpointing = new MockClientForCheckpointing("TransformCheckpointServiceNodeTests", randomBoolean()); } ClusterService clusterService = mock(ClusterService.class); transformsConfigManager = new IndexBasedTransformConfigManager( @@ -270,7 +286,7 @@ public void testGetCheckpointStats() throws InterruptedException { assertAsync(listener -> transformsConfigManager.putTransformCheckpoint(checkpoint2, listener), true, null, null); - mockClientForCheckpointing.setShardStats(createShardStats(createCheckPointMap(transformId, 20, 20, 20))); + mockClientForCheckpointing.setCheckpoints(createCheckPointMap(transformId, 20, 20, 20)); TransformCheckpointingInfo checkpointInfo = new TransformCheckpointingInfo( new TransformCheckpointStats(1, null, null, timestamp, 0L), new TransformCheckpointStats(2, position, progress, timestamp + 100L, 0L), @@ -286,7 +302,7 @@ public void testGetCheckpointStats() throws InterruptedException { null ); - mockClientForCheckpointing.setShardStats(createShardStats(createCheckPointMap(transformId, 10, 50, 33))); + mockClientForCheckpointing.setCheckpoints(createCheckPointMap(transformId, 10, 50, 33)); checkpointInfo = new TransformCheckpointingInfo( new TransformCheckpointStats(1, null, null, timestamp, 0L), new TransformCheckpointStats(2, position, progress, timestamp + 100L, 0L), @@ -302,7 +318,7 @@ public void testGetCheckpointStats() throws InterruptedException { ); // same as current - mockClientForCheckpointing.setShardStats(createShardStats(createCheckPointMap(transformId, 10, 10, 10))); + mockClientForCheckpointing.setCheckpoints(createCheckPointMap(transformId, 10, 10, 10)); checkpointInfo = new TransformCheckpointingInfo( new TransformCheckpointStats(1, null, null, timestamp, 0L), new TransformCheckpointStats(2, position, progress, timestamp + 100L, 0L), diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointIT.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointIT.java new file mode 100644 index 0000000000000..2130a15760acb --- /dev/null +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointIT.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.checkpoint; + +import org.apache.commons.lang3.ArrayUtils; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; +import org.elasticsearch.xpack.transform.TransformSingleNodeTestCase; + +import java.util.Arrays; +import java.util.Comparator; +import java.util.stream.Collectors; + +/** + * Test suite for checkpointing using transform getcheckpoint API + */ +public class TransformGetCheckpointIT extends TransformSingleNodeTestCase { + + public void testGetCheckpoint() throws Exception { + final String indexNamePrefix = "test_index-"; + final int shards = randomIntBetween(1, 5); + final int indices = randomIntBetween(1, 5); + + for (int i = 0; i < indices; ++i) { + client().admin() + .indices() + .prepareCreate(indexNamePrefix + i) + .setSettings(Settings.builder().put("index.number_of_shards", shards).put("index.number_of_replicas", 1)) + .get(); + } + + final GetCheckpointAction.Request request = new GetCheckpointAction.Request( + new String[] { indexNamePrefix + "*" }, + IndicesOptions.LENIENT_EXPAND_OPEN + ); + + final GetCheckpointAction.Response response = client().execute(GetCheckpointAction.INSTANCE, request).get(); + assertEquals(indices, response.getCheckpoints().size()); + + // empty indices should report -1 as sequence id + assertFalse( + response.getCheckpoints().entrySet().stream().anyMatch(entry -> Arrays.stream(entry.getValue()).anyMatch(l -> l != -1L)) + ); + + final int docsToCreatePerShard = randomIntBetween(0, 10); + for (int d = 0; d < docsToCreatePerShard; ++d) { + for (int i = 0; i < indices; ++i) { + for (int j = 0; j < shards; ++j) { + client().prepareIndex(indexNamePrefix + i).setSource("{" + "\"field\":" + j + "}", XContentType.JSON).get(); + } + } + } + + client().admin().indices().refresh(new RefreshRequest(indexNamePrefix + "*")); + + final GetCheckpointAction.Response response2 = client().execute(GetCheckpointAction.INSTANCE, request).get(); + assertEquals(indices, response2.getCheckpoints().size()); + + // check the sum, counting starts with 0, so we have to take docsToCreatePerShard - 1 + long checkpointSum = response2.getCheckpoints().values().stream().map(l -> Arrays.stream(l).sum()).mapToLong(Long::valueOf).sum(); + assertEquals( + "Expected " + + (docsToCreatePerShard - 1) * shards * indices + + " as sum of " + + response2.getCheckpoints() + .entrySet() + .stream() + .map(e -> e.getKey() + ": {" + Strings.arrayToCommaDelimitedString(ArrayUtils.toObject(e.getValue())) + "}") + .collect(Collectors.joining(",")), + (docsToCreatePerShard - 1) * shards * indices, + checkpointSum + ); + + final IndicesStatsResponse statsResponse = client().admin().indices().prepareStats(indexNamePrefix + "*").get(); + + assertEquals( + "Checkpoint API and indices stats don't match", + Arrays.stream(statsResponse.getShards()) + .filter(i -> i.getShardRouting().primary()) + .sorted(Comparator.comparingInt(value -> value.getShardRouting().id())) + .mapToLong(s -> s.getSeqNoStats().getGlobalCheckpoint()) + .sum(), + checkpointSum + ); + } + +} diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointTests.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointTests.java new file mode 100644 index 0000000000000..2b3d403e74c18 --- /dev/null +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointTests.java @@ -0,0 +1,291 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.checkpoint; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ActionTestUtils; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.seqno.SeqNoStats; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.indices.EmptySystemIndices; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.transport.MockTransport; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.transform.TransformField; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction.Request; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction.Response; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction; +import org.elasticsearch.xpack.transform.action.TransportGetCheckpointAction; +import org.elasticsearch.xpack.transform.action.TransportGetCheckpointNodeAction; +import org.junit.After; +import org.junit.Before; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; + +import static java.util.Collections.emptySet; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class TransformGetCheckpointTests extends ESSingleNodeTestCase { + + private TransportService transportService; + private ClusterService clusterService; + private IndicesService indicesService; + private ThreadPool threadPool; + private IndexNameExpressionResolver indexNameExpressionResolver; + private MockTransport mockTransport; + private Task transformTask; + private final String indexNamePattern = "test_index-"; + private String[] testIndices; + private int numberOfNodes; + private int numberOfIndices; + private int numberOfShards; + + private TestTransportGetCheckpointAction getCheckpointAction; + private TestTransportGetCheckpointNodeAction getCheckpointNodeAction; + private ClusterState clusterStateWithIndex; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + numberOfNodes = randomIntBetween(1, 10); + numberOfIndices = randomIntBetween(1, 10); + // create at least as many shards as nodes, so every node has at least 1 shard + numberOfShards = randomIntBetween(numberOfNodes, numberOfNodes * 3); + threadPool = new TestThreadPool("GetCheckpointActionTests"); + indexNameExpressionResolver = new MockResolver(); + clusterService = getInstanceFromNode(ClusterService.class); + indicesService = getInstanceFromNode(IndicesService.class); + mockTransport = new MockTransport() { + @Override + protected void onSendRequest(long requestId, String action, TransportRequest request, DiscoveryNode node) { + if (action.equals(GetCheckpointNodeAction.NAME)) { + getCheckpointNodeAction.execute( + null, + (GetCheckpointNodeAction.Request) request, + ActionListener.wrap(r -> { this.handleResponse(requestId, r); }, e -> { + this.handleError(requestId, new TransportException(e.getMessage(), e)); + + }) + ); + } + } + }; + + transportService = mockTransport.createTransportService( + clusterService.getSettings(), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + boundAddress -> clusterService.localNode(), + null, + emptySet() + ); + transportService.start(); + transportService.acceptIncomingRequests(); + + List testIndicesList = new ArrayList<>(); + for (int i = 0; i < numberOfIndices; ++i) { + testIndicesList.add(indexNamePattern + i); + } + testIndices = testIndicesList.toArray(new String[0]); + clusterStateWithIndex = ClusterStateCreationUtils.state(numberOfNodes, testIndices, numberOfShards); + + transformTask = new Task( + 1L, + "persistent", + "action", + TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX + "the_id", + TaskId.EMPTY_TASK_ID, + Collections.emptyMap() + ); + getCheckpointAction = new TestTransportGetCheckpointAction(); + getCheckpointNodeAction = new TestTransportGetCheckpointNodeAction(); + } + + @Override + @After + public void tearDown() throws Exception { + ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + threadPool = null; + super.tearDown(); + } + + public void testEmptyCheckpoint() throws InterruptedException { + GetCheckpointAction.Request request = new GetCheckpointAction.Request(Strings.EMPTY_ARRAY, IndicesOptions.LENIENT_EXPAND_OPEN); + assertCheckpointAction(request, response -> { + assertNotNull(response.getCheckpoints()); + Map checkpoints = response.getCheckpoints(); + assertTrue(checkpoints.isEmpty()); + + }); + } + + public void testSingleIndexRequest() throws InterruptedException { + GetCheckpointAction.Request request = new GetCheckpointAction.Request( + new String[] { indexNamePattern + "0" }, + IndicesOptions.LENIENT_EXPAND_OPEN + ); + + assertCheckpointAction(request, response -> { + assertNotNull(response.getCheckpoints()); + Map checkpoints = response.getCheckpoints(); + assertEquals(1, checkpoints.size()); + assertTrue(checkpoints.containsKey(indexNamePattern + "0")); + for (int i = 0; i < numberOfShards; ++i) { + assertEquals(42 + i, checkpoints.get(indexNamePattern + "0")[i]); + } + assertEquals(numberOfNodes, getCheckpointNodeAction.getCalls()); + + }); + } + + public void testMultiIndexRequest() throws InterruptedException { + GetCheckpointAction.Request request = new GetCheckpointAction.Request(testIndices, IndicesOptions.LENIENT_EXPAND_OPEN); + assertCheckpointAction(request, response -> { + assertNotNull(response.getCheckpoints()); + Map checkpoints = response.getCheckpoints(); + assertEquals(testIndices.length, checkpoints.size()); + for (int i = 0; i < this.numberOfIndices; ++i) { + assertTrue(checkpoints.containsKey(indexNamePattern + i)); + for (int j = 0; j < numberOfShards; ++j) { + assertEquals(42 + i + j, checkpoints.get(indexNamePattern + i)[j]); + } + } + assertEquals(numberOfNodes, getCheckpointNodeAction.getCalls()); + }); + } + + class TestTransportGetCheckpointAction extends TransportGetCheckpointAction { + + TestTransportGetCheckpointAction() { + super(transportService, new ActionFilters(emptySet()), indicesService, clusterService, indexNameExpressionResolver); + } + + @Override + protected void doExecute(Task task, Request request, ActionListener listener) { + resolveIndicesAndGetCheckpoint(task, request, listener, clusterStateWithIndex); + } + + } + + class TestTransportGetCheckpointNodeAction extends TransportGetCheckpointNodeAction { + + private final IndicesService mockIndicesService; + private int calls; + + TestTransportGetCheckpointNodeAction() { + super(transportService, new ActionFilters(emptySet()), indicesService); + calls = 0; + mockIndicesService = mock(IndicesService.class); + for (int i = 0; i < numberOfIndices; ++i) { + IndexService mockIndexService = mock(IndexService.class); + IndexMetadata indexMeta = clusterStateWithIndex.metadata().index(indexNamePattern + i); + + IndexSettings mockIndexSettings = new IndexSettings(indexMeta, clusterService.getSettings()); + when(mockIndexService.getIndexSettings()).thenReturn(mockIndexSettings); + for (int j = 0; j < numberOfShards; ++j) { + IndexShard mockIndexShard = mock(IndexShard.class); + when(mockIndexService.getShard(j)).thenReturn(mockIndexShard); + SeqNoStats seqNoStats = new SeqNoStats(42 + i + j, 42 + i + j, 42 + i + j); + when(mockIndexShard.seqNoStats()).thenReturn(seqNoStats); + } + + when(mockIndicesService.indexServiceSafe(indexMeta.getIndex())).thenReturn(mockIndexService); + } + } + + @Override + protected void doExecute( + Task task, + GetCheckpointNodeAction.Request request, + ActionListener listener + ) { + ++calls; + getGlobalCheckpoints(mockIndicesService, request.getShards(), listener); + } + + public int getCalls() { + return calls; + } + } + + static class MockResolver extends IndexNameExpressionResolver { + MockResolver() { + super(new ThreadContext(Settings.EMPTY), EmptySystemIndices.INSTANCE); + } + + @Override + public String[] concreteIndexNames(ClusterState state, IndicesRequest request) { + return request.indices(); + } + + @Override + public String[] concreteIndexNames( + ClusterState state, + IndicesOptions options, + boolean includeDataStreams, + String... indexExpressions + ) { + return indexExpressions; + } + + @Override + public Index[] concreteIndices(ClusterState state, IndicesRequest request) { + Index[] out = new Index[request.indices().length]; + for (int x = 0; x < out.length; x++) { + out[x] = new Index(request.indices()[x], "_na_"); + } + return out; + } + } + + private void assertCheckpointAction(GetCheckpointAction.Request request, Consumer furtherTests) + throws InterruptedException { + CountDownLatch latch = new CountDownLatch(1); + AtomicBoolean listenerCalled = new AtomicBoolean(false); + + LatchedActionListener listener = new LatchedActionListener<>(ActionListener.wrap(r -> { + assertTrue("listener called more than once", listenerCalled.compareAndSet(false, true)); + furtherTests.accept(r); + }, e -> { fail("got unexpected exception: " + e); }), latch); + + ActionTestUtils.execute(getCheckpointAction, transformTask, request, listener); + assertTrue("timed out after 20s", latch.await(20, TimeUnit.SECONDS)); + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java index 1cde900c3b54a..3c9b3f3596026 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java @@ -60,6 +60,8 @@ import org.elasticsearch.xpack.core.transform.TransformMessages; import org.elasticsearch.xpack.core.transform.TransformNamedXContentProvider; import org.elasticsearch.xpack.core.transform.action.DeleteTransformAction; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction; import org.elasticsearch.xpack.core.transform.action.GetTransformAction; import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction; import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction; @@ -72,6 +74,8 @@ import org.elasticsearch.xpack.core.transform.action.UpgradeTransformsAction; import org.elasticsearch.xpack.core.transform.action.ValidateTransformAction; import org.elasticsearch.xpack.transform.action.TransportDeleteTransformAction; +import org.elasticsearch.xpack.transform.action.TransportGetCheckpointAction; +import org.elasticsearch.xpack.transform.action.TransportGetCheckpointNodeAction; import org.elasticsearch.xpack.transform.action.TransportGetTransformAction; import org.elasticsearch.xpack.transform.action.TransportGetTransformStatsAction; import org.elasticsearch.xpack.transform.action.TransportPreviewTransformAction; @@ -188,10 +192,14 @@ public List getRestHandlers( new ActionHandler<>(PreviewTransformAction.INSTANCE, TransportPreviewTransformAction.class), new ActionHandler<>(UpdateTransformAction.INSTANCE, TransportUpdateTransformAction.class), new ActionHandler<>(SetResetModeAction.INSTANCE, TransportSetTransformResetModeAction.class), - new ActionHandler<>(ValidateTransformAction.INSTANCE, TransportValidateTransformAction.class), new ActionHandler<>(UpgradeTransformsAction.INSTANCE, TransportUpgradeTransformsAction.class), new ActionHandler<>(ResetTransformAction.INSTANCE, TransportResetTransformAction.class), + // internal, no rest endpoint + new ActionHandler<>(ValidateTransformAction.INSTANCE, TransportValidateTransformAction.class), + new ActionHandler<>(GetCheckpointAction.INSTANCE, TransportGetCheckpointAction.class), + new ActionHandler<>(GetCheckpointNodeAction.INSTANCE, TransportGetCheckpointNodeAction.class), + // usage and info new ActionHandler<>(XPackUsageFeatureAction.TRANSFORM, TransformUsageTransportAction.class), new ActionHandler<>(XPackInfoFeatureAction.TRANSFORM, TransformInfoTransportAction.class) diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointAction.java new file mode 100644 index 0000000000000..0397c38ec90e5 --- /dev/null +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointAction.java @@ -0,0 +1,225 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.transform.action; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.NoShardAvailableActionException; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.UnavailableShardsException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.GroupedActionListener; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.ActionNotFoundTransportException; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponseHandler; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction.Request; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction.Response; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.TreeMap; + +public class TransportGetCheckpointAction extends HandledTransportAction { + + private static final Logger logger = LogManager.getLogger(TransportGetCheckpointAction.class); + private final ClusterService clusterService; + private final IndicesService indicesService; + private final TransportService transportService; + private final IndexNameExpressionResolver indexNameExpressionResolver; + + @Inject + public TransportGetCheckpointAction( + final TransportService transportService, + final ActionFilters actionFilters, + final IndicesService indicesService, + final ClusterService clusterService, + final IndexNameExpressionResolver indexNameExpressionResolver + ) { + super(GetCheckpointAction.NAME, transportService, actionFilters, Request::new); + this.transportService = transportService; + this.indicesService = indicesService; + this.clusterService = clusterService; + this.indexNameExpressionResolver = indexNameExpressionResolver; + } + + @Override + protected void doExecute(Task task, Request request, ActionListener listener) { + final ClusterState state = clusterService.state(); + resolveIndicesAndGetCheckpoint(task, request, listener, state); + } + + protected void resolveIndicesAndGetCheckpoint(Task task, Request request, ActionListener listener, final ClusterState state) { + // note: when security is turned on, the indices are already resolved + // TODO: do a quick check and only resolve if necessary?? + String[] concreteIndices = this.indexNameExpressionResolver.concreteIndexNames(state, request); + + Map> nodesAndShards = resolveIndicesToPrimaryShards(state, concreteIndices); + + if (nodesAndShards.size() == 0) { + listener.onResponse(new Response(Collections.emptyMap())); + return; + } + + new AsyncGetCheckpointsFromNodesAction(state, task, nodesAndShards, new OriginalIndices(request), listener).start(); + } + + private Map> resolveIndicesToPrimaryShards(ClusterState state, String[] concreteIndices) { + if (concreteIndices.length == 0) { + return Collections.emptyMap(); + } + + final DiscoveryNodes nodes = state.nodes(); + Map> nodesAndShards = new HashMap<>(); + + ShardsIterator shardsIt = state.routingTable().allShards(concreteIndices); + for (ShardRouting shard : shardsIt) { + // only take primary shards, which should be exactly 1, this isn't strictly necessary + // and we should consider taking any shard copy, but then we need another way to de-dup + if (shard.primary() == false) { + continue; + } + if (shard.assignedToNode() && nodes.get(shard.currentNodeId()) != null) { + // special case: a node that holds the shard is on an old version + if (nodes.get(shard.currentNodeId()).getVersion().before(Version.V_8_2_0)) { + throw new ActionNotFoundTransportException(GetCheckpointNodeAction.NAME); + } + + String nodeId = shard.currentNodeId(); + nodesAndShards.computeIfAbsent(nodeId, k -> new HashSet<>()).add(shard.shardId()); + } else { + throw new NoShardAvailableActionException(shard.shardId(), " no primary shards available for shard [" + shard + "]"); + } + } + return nodesAndShards; + } + + protected class AsyncGetCheckpointsFromNodesAction { + private final Task task; + private final ActionListener listener; + private final Map> nodesAndShards; + private final OriginalIndices originalIndices; + private final DiscoveryNodes nodes; + private final String localNodeId; + + protected AsyncGetCheckpointsFromNodesAction( + ClusterState clusterState, + Task task, + Map> nodesAndShards, + OriginalIndices originalIndices, + ActionListener listener + ) { + this.task = task; + this.listener = listener; + this.nodesAndShards = nodesAndShards; + this.originalIndices = originalIndices; + this.nodes = clusterState.nodes(); + this.localNodeId = clusterService.localNode().getId(); + } + + public void start() { + GroupedActionListener groupedListener = new GroupedActionListener<>( + ActionListener.wrap(responses -> { + // the final list should be ordered by key + Map checkpointsByIndexReduced = new TreeMap<>(); + + // merge the node responses + for (GetCheckpointNodeAction.Response response : responses) { + response.getCheckpoints().forEach((index, checkpoint) -> { + if (checkpointsByIndexReduced.containsKey(index)) { + long[] shardCheckpoints = checkpointsByIndexReduced.get(index); + for (int i = 0; i < checkpoint.length; ++i) { + shardCheckpoints[i] = Math.max(shardCheckpoints[i], checkpoint[i]); + } + } else { + checkpointsByIndexReduced.put(index, checkpoint); + } + }); + } + + listener.onResponse(new Response(checkpointsByIndexReduced)); + }, listener::onFailure), + nodesAndShards.size() + ); + + for (Entry> oneNodeAndItsShards : nodesAndShards.entrySet()) { + if (localNodeId.equals(oneNodeAndItsShards.getKey())) { + TransportGetCheckpointNodeAction.getGlobalCheckpoints(indicesService, oneNodeAndItsShards.getValue(), groupedListener); + continue; + } + + GetCheckpointNodeAction.Request nodeCheckpointsRequest = new GetCheckpointNodeAction.Request( + oneNodeAndItsShards.getValue(), + originalIndices + ); + DiscoveryNode node = nodes.get(oneNodeAndItsShards.getKey()); + + // paranoia: this should not be possible using the same cluster state + if (node == null) { + listener.onFailure( + new UnavailableShardsException( + oneNodeAndItsShards.getValue().iterator().next(), + "Node not found for [{}] shards", + oneNodeAndItsShards.getValue().size() + ) + ); + return; + } + + logger.trace("get checkpoints from node {}", node); + transportService.sendChildRequest( + node, + GetCheckpointNodeAction.NAME, + nodeCheckpointsRequest, + task, + TransportRequestOptions.EMPTY, + new TransportResponseHandler() { + + @Override + public GetCheckpointNodeAction.Response read(StreamInput in) throws IOException { + return new GetCheckpointNodeAction.Response(in); + } + + @Override + public void handleResponse(GetCheckpointNodeAction.Response response) { + groupedListener.onResponse(response); + } + + @Override + public void handleException(TransportException exp) { + groupedListener.onFailure(exp); + } + + } + ); + } + } + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java new file mode 100644 index 0000000000000..b257ed80acf14 --- /dev/null +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.transform.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction.Request; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction.Response; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +public class TransportGetCheckpointNodeAction extends HandledTransportAction { + + private final IndicesService indicesService; + + @Inject + public TransportGetCheckpointNodeAction( + final TransportService transportService, + final ActionFilters actionFilters, + final IndicesService indicesService + ) { + super(GetCheckpointNodeAction.NAME, transportService, actionFilters, Request::new); + this.indicesService = indicesService; + } + + @Override + protected void doExecute(Task task, Request request, ActionListener listener) { + getGlobalCheckpoints(indicesService, request.getShards(), listener); + } + + protected static void getGlobalCheckpoints(IndicesService indicesService, Set shards, ActionListener listener) { + Map checkpointsByIndexOfThisNode = new HashMap<>(); + for (ShardId shardId : shards) { + final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); + final IndexShard indexShard = indexService.getShard(shardId.id()); + + checkpointsByIndexOfThisNode.computeIfAbsent(shardId.getIndexName(), k -> { + long[] seqNumbers = new long[indexService.getIndexSettings().getNumberOfShards()]; + Arrays.fill(seqNumbers, SequenceNumbers.UNASSIGNED_SEQ_NO); + return seqNumbers; + }); + checkpointsByIndexOfThisNode.get(shardId.getIndexName())[shardId.getId()] = indexShard.seqNoStats().getGlobalCheckpoint(); + } + listener.onResponse(new Response(checkpointsByIndexOfThisNode)); + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java index 5d460e1c6f8b1..ea294a6cd65ca 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java @@ -267,7 +267,7 @@ private void getPreview( mappings.set(deducedMappings); function.preview( client, - ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()), + ClientHelper.getPersistableSafeSecurityHeaders(threadPool.getThreadContext(), clusterService.state()), source, deducedMappings, NUMBER_OF_PREVIEW_BUCKETS, diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java index 6b400c718c6b3..2f79d118a48a5 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java @@ -94,7 +94,10 @@ protected void masterOperation(Task task, Request request, ClusterState clusterS XPackPlugin.checkReadyForXPackCustomMetadata(clusterState); // set headers to run transform as calling user - Map filteredHeaders = ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()); + Map filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders( + threadPool.getThreadContext(), + clusterService.state() + ); TransformConfig config = request.getConfig().setHeaders(filteredHeaders).setCreateTime(Instant.now()).setVersion(Version.CURRENT); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java index c5d4bd518538a..a18ede68d06a3 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java @@ -119,7 +119,10 @@ protected void doExecute(Task task, Request request, ActionListener li } // set headers to run transform as calling user - Map filteredHeaders = ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()); + Map filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders( + threadPool.getThreadContext(), + clusterService.state() + ); TransformConfigUpdate update = request.getUpdate(); update.setHeaders(filteredHeaders); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java index 801fb1f7064de..0ed005f4f92ed 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; @@ -21,8 +22,10 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.transport.ActionNotFoundTransportException; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo.TransformCheckpointingInfoBuilder; @@ -59,6 +62,9 @@ class DefaultCheckpointProvider implements CheckpointProvider { protected final TransformAuditor transformAuditor; protected final TransformConfig transformConfig; + // set of clusters that do not support 8.2+ checkpoint actions + private final Set fallbackToBWC = new HashSet<>(); + DefaultCheckpointProvider( final Clock clock, final Client client, @@ -130,7 +136,7 @@ protected void getIndexCheckpoints(ActionListener> listener) remoteClient, transformConfig.getHeaders(), remoteIndex.getValue().toArray(new String[0]), - remoteIndex.getKey() + RemoteClusterService.REMOTE_CLUSTER_INDEX_SEPARATOR, + remoteIndex.getKey(), groupedListener ); } @@ -139,11 +145,69 @@ protected void getIndexCheckpoints(ActionListener> listener) } } - private static void getCheckpointsFromOneCluster( + private void getCheckpointsFromOneCluster( + Client client, + Map headers, + String[] indices, + String cluster, + ActionListener> listener + ) { + if (fallbackToBWC.contains(cluster)) { + getCheckpointsFromOneClusterBWC(client, headers, indices, cluster, listener); + } else { + getCheckpointsFromOneClusterV2(client, headers, indices, cluster, ActionListener.wrap(response -> { + logger.debug( + "[{}] Successfully retrieved checkpoints from cluster [{}] using transform checkpoint API", + transformConfig.getId(), + cluster + ); + listener.onResponse(response); + }, e -> { + Throwable unwrappedException = ExceptionsHelper.unwrapCause(e); + if (unwrappedException instanceof ActionNotFoundTransportException) { + // this is an implementation detail, so not necessary to audit or warn, but only report as debug + logger.debug( + "[{}] Cluster [{}] does not support transform checkpoint API, falling back to legacy checkpointing", + transformConfig.getId(), + cluster + ); + + fallbackToBWC.add(cluster); + getCheckpointsFromOneClusterBWC(client, headers, indices, cluster, listener); + } else { + listener.onFailure(e); + } + })); + } + } + + private static void getCheckpointsFromOneClusterV2( + Client client, + Map headers, + String[] indices, + String cluster, + ActionListener> listener + ) { + GetCheckpointAction.Request getCheckpointRequest = new GetCheckpointAction.Request(indices, IndicesOptions.LENIENT_EXPAND_OPEN); + + ClientHelper.executeWithHeadersAsync( + headers, + ClientHelper.TRANSFORM_ORIGIN, + client, + GetCheckpointAction.INSTANCE, + getCheckpointRequest, + ActionListener.wrap(checkpointResponse -> listener.onResponse(checkpointResponse.getCheckpoints()), listener::onFailure) + ); + } + + /** + * BWC fallback for nodes/cluster older than 8.2 + */ + private static void getCheckpointsFromOneClusterBWC( Client client, Map headers, String[] indices, - String prefix, + String cluster, ActionListener> listener ) { // 1st get index to see the indexes the user has access to @@ -189,14 +253,14 @@ private static void getCheckpointsFromOneCluster( ); return; } - listener.onResponse(extractIndexCheckPoints(response.getShards(), userIndices, prefix)); + listener.onResponse(extractIndexCheckPoints(response.getShards(), userIndices, cluster)); }, e -> listener.onFailure(new CheckpointException("Failed to create checkpoint", e))) ); }, e -> listener.onFailure(new CheckpointException("Failed to create checkpoint", e))) ); } - static Map extractIndexCheckPoints(ShardStats[] shards, Set userIndices, String prefix) { + static Map extractIndexCheckPoints(ShardStats[] shards, Set userIndices, String cluster) { Map> checkpointsByIndex = new TreeMap<>(); for (ShardStats shard : shards) { @@ -205,7 +269,9 @@ static Map extractIndexCheckPoints(ShardStats[] shards, Set checkpoints = checkpointsByIndex.get(fullIndexName); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProviderTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProviderTests.java index 4e30dd38141fb..902a69b885079 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProviderTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProviderTests.java @@ -12,18 +12,12 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; -import org.elasticsearch.action.admin.indices.get.GetIndexAction; -import org.elasticsearch.action.admin.indices.get.GetIndexResponse; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -36,6 +30,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; import org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; @@ -53,6 +48,7 @@ import org.mockito.stubbing.Answer; import java.time.Clock; +import java.util.Collections; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; @@ -62,8 +58,8 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -255,19 +251,8 @@ private void testCreateNextCheckpoint( TransformCheckpoint lastCheckpoint, TransformCheckpoint expectedNextCheckpoint ) throws InterruptedException { - GetIndexResponse getIndexResponse = new GetIndexResponse( - new String[] { "some-index" }, - ImmutableOpenMap.of(), - ImmutableOpenMap.of(), - ImmutableOpenMap.of(), - ImmutableOpenMap.of(), - ImmutableOpenMap.of() - ); - doAnswer(withResponse(getIndexResponse)).when(client).execute(eq(GetIndexAction.INSTANCE), any(), any()); - IndicesStatsResponse indicesStatsResponse = mock(IndicesStatsResponse.class); - when(indicesStatsResponse.getShards()).thenReturn(new ShardStats[0]); - when(indicesStatsResponse.getFailedShards()).thenReturn(0); - doAnswer(withResponse(indicesStatsResponse)).when(client).execute(eq(IndicesStatsAction.INSTANCE), any(), any()); + GetCheckpointAction.Response checkpointResponse = new GetCheckpointAction.Response(Collections.emptyMap()); + doAnswer(withResponse(checkpointResponse)).when(client).execute(eq(GetCheckpointAction.INSTANCE), any(), any()); TransformConfig transformConfig = newTransformConfigWithDateHistogram( transformId, diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVector.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVector.java new file mode 100644 index 0000000000000..785016bed097a --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVector.java @@ -0,0 +1,141 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; +import org.elasticsearch.xpack.vectors.mapper.VectorEncoderDecoder; + +import java.nio.ByteBuffer; +import java.util.List; + +public class BinaryDenseVector implements DenseVector { + protected final BytesRef docVector; + protected final int dims; + protected final Version indexVersion; + + protected float[] decodedDocVector; + + public BinaryDenseVector(BytesRef docVector, int dims, Version indexVersion) { + this.docVector = docVector; + this.indexVersion = indexVersion; + this.dims = dims; + } + + @Override + public float[] getVector() { + if (decodedDocVector == null) { + decodedDocVector = new float[dims]; + VectorEncoderDecoder.decodeDenseVector(docVector, decodedDocVector); + } + return decodedDocVector; + } + + @Override + public float getMagnitude() { + return VectorEncoderDecoder.getMagnitude(indexVersion, docVector); + } + + @Override + public double dotProduct(float[] queryVector) { + ByteBuffer byteBuffer = wrap(docVector); + + double dotProduct = 0; + for (float v : queryVector) { + dotProduct += byteBuffer.getFloat() * v; + } + return dotProduct; + } + + @Override + public double dotProduct(List queryVector) { + ByteBuffer byteBuffer = wrap(docVector); + + double dotProduct = 0; + for (int i = 0; i < queryVector.size(); i++) { + dotProduct += byteBuffer.getFloat() * queryVector.get(i).floatValue(); + } + return dotProduct; + } + + @Override + public double l1Norm(float[] queryVector) { + ByteBuffer byteBuffer = wrap(docVector); + + double l1norm = 0; + for (float v : queryVector) { + l1norm += Math.abs(v - byteBuffer.getFloat()); + } + return l1norm; + } + + @Override + public double l1Norm(List queryVector) { + ByteBuffer byteBuffer = wrap(docVector); + + double l1norm = 0; + for (int i = 0; i < queryVector.size(); i++) { + l1norm += Math.abs(queryVector.get(i).floatValue() - byteBuffer.getFloat()); + } + return l1norm; + } + + @Override + public double l2Norm(float[] queryVector) { + ByteBuffer byteBuffer = wrap(docVector); + double l2norm = 0; + for (float queryValue : queryVector) { + double diff = byteBuffer.getFloat() - queryValue; + l2norm += diff * diff; + } + return Math.sqrt(l2norm); + } + + @Override + public double l2Norm(List queryVector) { + ByteBuffer byteBuffer = wrap(docVector); + double l2norm = 0; + for (Number number : queryVector) { + double diff = byteBuffer.getFloat() - number.floatValue(); + l2norm += diff * diff; + } + return Math.sqrt(l2norm); + } + + @Override + public double cosineSimilarity(float[] queryVector, boolean normalizeQueryVector) { + if (normalizeQueryVector) { + return dotProduct(queryVector) / (DenseVector.getMagnitude(queryVector) * getMagnitude()); + } + return dotProduct(queryVector) / getMagnitude(); + } + + @Override + public double cosineSimilarity(List queryVector) { + return dotProduct(queryVector) / (DenseVector.getMagnitude(queryVector) * getMagnitude()); + } + + @Override + public int size() { + return 1; + } + + @Override + public boolean isEmpty() { + return false; + } + + @Override + public int getDims() { + return dims; + } + + private static ByteBuffer wrap(BytesRef dv) { + return ByteBuffer.wrap(dv.bytes, dv.offset, dv.length); + } +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorDocValuesField.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorDocValuesField.java new file mode 100644 index 0000000000000..ad1d016132547 --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorDocValuesField.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; + +import java.io.IOException; + +public class BinaryDenseVectorDocValuesField extends DenseVectorDocValuesField { + + protected final BinaryDocValues input; + protected final Version indexVersion; + protected final int dims; + protected BytesRef value; + + public BinaryDenseVectorDocValuesField(BinaryDocValues input, String name, int dims, Version indexVersion) { + super(name); + this.input = input; + this.indexVersion = indexVersion; + this.dims = dims; + } + + @Override + public void setNextDocId(int docId) throws IOException { + if (input.advanceExact(docId)) { + value = input.binaryValue(); + } else { + value = null; + } + } + + @Override + public DenseVectorScriptDocValues getScriptDocValues() { + return new DenseVectorScriptDocValues(this, dims); + } + + @Override + public boolean isEmpty() { + return value == null; + } + + @Override + public DenseVector get() { + if (isEmpty()) { + return DenseVector.EMPTY; + } + + return new BinaryDenseVector(value, dims, indexVersion); + } + + @Override + public DenseVector get(DenseVector defaultValue) { + if (isEmpty()) { + return defaultValue; + } + return new BinaryDenseVector(value, dims, indexVersion); + } + + @Override + public DenseVector getInternal() { + return get(null); + } +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValues.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValues.java deleted file mode 100644 index 852b63500a9bf..0000000000000 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValues.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.vectors.query; - -import org.apache.lucene.index.BinaryDocValues; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; -import org.elasticsearch.xpack.vectors.mapper.VectorEncoderDecoder; - -import java.io.IOException; -import java.nio.ByteBuffer; - -public class BinaryDenseVectorScriptDocValues extends DenseVectorScriptDocValues { - - public static class BinaryDenseVectorSupplier implements DenseVectorSupplier { - - private final BinaryDocValues in; - private BytesRef value; - - public BinaryDenseVectorSupplier(BinaryDocValues in) { - this.in = in; - } - - @Override - public void setNextDocId(int docId) throws IOException { - if (in.advanceExact(docId)) { - value = in.binaryValue(); - } else { - value = null; - } - } - - @Override - public BytesRef getInternal(int index) { - throw new UnsupportedOperationException(); - } - - public BytesRef getInternal() { - return value; - } - - @Override - public int size() { - if (value == null) { - return 0; - } else { - return 1; - } - } - } - - private final BinaryDenseVectorSupplier bdvSupplier; - private final Version indexVersion; - private final float[] vector; - - BinaryDenseVectorScriptDocValues(BinaryDenseVectorSupplier supplier, Version indexVersion, int dims) { - super(supplier, dims); - this.bdvSupplier = supplier; - this.indexVersion = indexVersion; - this.vector = new float[dims]; - } - - @Override - public int size() { - return supplier.size(); - } - - @Override - public float[] getVectorValue() { - VectorEncoderDecoder.decodeDenseVector(bdvSupplier.getInternal(), vector); - return vector; - } - - @Override - public float getMagnitude() { - return VectorEncoderDecoder.getMagnitude(indexVersion, bdvSupplier.getInternal()); - } - - @Override - public double dotProduct(float[] queryVector) { - BytesRef value = bdvSupplier.getInternal(); - ByteBuffer byteBuffer = ByteBuffer.wrap(value.bytes, value.offset, value.length); - - double dotProduct = 0; - for (float queryValue : queryVector) { - dotProduct += queryValue * byteBuffer.getFloat(); - } - return (float) dotProduct; - } - - @Override - public double l1Norm(float[] queryVector) { - BytesRef value = bdvSupplier.getInternal(); - ByteBuffer byteBuffer = ByteBuffer.wrap(value.bytes, value.offset, value.length); - - double l1norm = 0; - for (float queryValue : queryVector) { - l1norm += Math.abs(queryValue - byteBuffer.getFloat()); - } - return l1norm; - } - - @Override - public double l2Norm(float[] queryVector) { - BytesRef value = bdvSupplier.getInternal(); - ByteBuffer byteBuffer = ByteBuffer.wrap(value.bytes, value.offset, value.length); - double l2norm = 0; - for (float queryValue : queryVector) { - double diff = queryValue - byteBuffer.getFloat(); - l2norm += diff * diff; - } - return Math.sqrt(l2norm); - } -} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVector.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVector.java new file mode 100644 index 0000000000000..4ffbccbd9e415 --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVector.java @@ -0,0 +1,227 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import java.util.List; + +/** + * DenseVector value type for the painless. + */ +/* dotProduct, l1Norm, l2Norm, cosineSimilarity have three flavors depending on the type of the queryVector + * 1) float[], this is for the ScoreScriptUtils class bindings which have converted a List based query vector into an array + * 2) List, A painless script will typically use Lists since they are easy to pass as params and have an easy + * literal syntax. Working with Lists directly, instead of converting to a float[], trades off runtime operations against + * memory pressure. Dense Vectors may have high dimensionality, up to 2048. Allocating a float[] per doc per script API + * call is prohibitively expensive. + * 3) Object, the whitelisted method for the painless API. Calls into the float[] or List version based on the + class of the argument and checks dimensionality. + */ +public interface DenseVector { + float[] getVector(); + + float getMagnitude(); + + double dotProduct(float[] queryVector); + + double dotProduct(List queryVector); + + @SuppressWarnings("unchecked") + default double dotProduct(Object queryVector) { + if (queryVector instanceof float[] array) { + checkDimensions(getDims(), array.length); + return dotProduct(array); + + } else if (queryVector instanceof List list) { + checkDimensions(getDims(), list.size()); + return dotProduct((List) list); + } + + throw new IllegalArgumentException(badQueryVectorType(queryVector)); + } + + double l1Norm(float[] queryVector); + + double l1Norm(List queryVector); + + @SuppressWarnings("unchecked") + default double l1Norm(Object queryVector) { + if (queryVector instanceof float[] array) { + checkDimensions(getDims(), array.length); + return l1Norm(array); + + } else if (queryVector instanceof List list) { + checkDimensions(getDims(), list.size()); + return l1Norm((List) list); + } + + throw new IllegalArgumentException(badQueryVectorType(queryVector)); + } + + double l2Norm(float[] queryVector); + + double l2Norm(List queryVector); + + @SuppressWarnings("unchecked") + default double l2Norm(Object queryVector) { + if (queryVector instanceof float[] array) { + checkDimensions(getDims(), array.length); + return l2Norm(array); + + } else if (queryVector instanceof List list) { + checkDimensions(getDims(), list.size()); + return l2Norm((List) list); + } + + throw new IllegalArgumentException(badQueryVectorType(queryVector)); + } + + /** + * Get the cosine similarity with the un-normalized query vector + */ + default double cosineSimilarity(float[] queryVector) { + return cosineSimilarity(queryVector, true); + } + + /** + * Get the cosine similarity with the query vector + * @param normalizeQueryVector - normalize the query vector, does not change the contents of passed in query vector + */ + double cosineSimilarity(float[] queryVector, boolean normalizeQueryVector); + + /** + * Get the cosine similarity with the un-normalized query vector + */ + double cosineSimilarity(List queryVector); + + /** + * Get the cosine similarity with the un-normalized query vector. Handles queryVectors of type float[] and List. + */ + @SuppressWarnings("unchecked") + default double cosineSimilarity(Object queryVector) { + if (queryVector instanceof float[] array) { + checkDimensions(getDims(), array.length); + return cosineSimilarity(array); + + } else if (queryVector instanceof List list) { + checkDimensions(getDims(), list.size()); + return cosineSimilarity((List) list); + } + + throw new IllegalArgumentException(badQueryVectorType(queryVector)); + } + + boolean isEmpty(); + + int getDims(); + + int size(); + + static float getMagnitude(float[] vector) { + double mag = 0.0f; + for (float elem : vector) { + mag += elem * elem; + } + return (float) Math.sqrt(mag); + } + + static float getMagnitude(List vector) { + double mag = 0.0f; + for (Number number : vector) { + float elem = number.floatValue(); + mag += elem * elem; + } + return (float) Math.sqrt(mag); + } + + static void checkDimensions(int dvDims, int qvDims) { + if (dvDims != qvDims) { + throw new IllegalArgumentException( + "The query vector has a different number of dimensions [" + qvDims + "] than the document vectors [" + dvDims + "]." + ); + } + } + + private static String badQueryVectorType(Object queryVector) { + return "Cannot use vector [" + queryVector + "] with class [" + queryVector.getClass().getName() + "] as query vector"; + } + + DenseVector EMPTY = new DenseVector() { + public static final String MISSING_VECTOR_FIELD_MESSAGE = "Dense vector value missing for a field," + + " use isEmpty() to check for a missing vector value"; + + @Override + public float getMagnitude() { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double dotProduct(float[] queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double dotProduct(List queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double l1Norm(List queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double l1Norm(float[] queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double l2Norm(List queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double l2Norm(float[] queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double cosineSimilarity(float[] queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double cosineSimilarity(float[] queryVector, boolean normalizeQueryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double cosineSimilarity(List queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public float[] getVector() { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public boolean isEmpty() { + return true; + } + + @Override + public int getDims() { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public int size() { + return 0; + } + }; +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorDocValuesField.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorDocValuesField.java new file mode 100644 index 0000000000000..dd4a00fef3af0 --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorDocValuesField.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.script.field.DocValuesField; + +import java.util.Iterator; + +public abstract class DenseVectorDocValuesField implements DocValuesField, DenseVectorScriptDocValues.DenseVectorSupplier { + protected final String name; + + public DenseVectorDocValuesField(String name) { + this.name = name; + } + + @Override + public String getName() { + return name; + } + + @Override + public int size() { + return isEmpty() ? 0 : 1; + } + + @Override + public BytesRef getInternal(int index) { + throw new UnsupportedOperationException(); + } + + /** + * Get the DenseVector for a document if one exists, DenseVector.EMPTY otherwise + */ + public abstract DenseVector get(); + + public abstract DenseVector get(DenseVector defaultValue); + + public abstract DenseVectorScriptDocValues getScriptDocValues(); + + // DenseVector fields are single valued, so Iterable does not make sense. + @Override + public Iterator iterator() { + throw new UnsupportedOperationException("Cannot iterate over single valued dense_vector field, use get() instead"); + } +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorScriptDocValues.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorScriptDocValues.java index 650ebca1d5ee5..43d04f5ccde7a 100644 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorScriptDocValues.java +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorScriptDocValues.java @@ -10,24 +10,16 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.fielddata.ScriptDocValues; -public abstract class DenseVectorScriptDocValues extends ScriptDocValues { - - public interface DenseVectorSupplier extends Supplier { - - @Override - default BytesRef getInternal(int index) { - throw new UnsupportedOperationException(); - } - - T getInternal(); - } +public class DenseVectorScriptDocValues extends ScriptDocValues { public static final String MISSING_VECTOR_FIELD_MESSAGE = "A document doesn't have a value for a vector field!"; private final int dims; + protected final DenseVectorSupplier dvSupplier; - public DenseVectorScriptDocValues(DenseVectorSupplier supplier, int dims) { + public DenseVectorScriptDocValues(DenseVectorSupplier supplier, int dims) { super(supplier); + this.dvSupplier = supplier; this.dims = dims; } @@ -35,60 +27,58 @@ public int dims() { return dims; } + private DenseVector getCheckedVector() { + DenseVector vector = dvSupplier.getInternal(); + if (vector == null) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + return vector; + } + /** * Get dense vector's value as an array of floats */ - public abstract float[] getVectorValue(); + public float[] getVectorValue() { + return getCheckedVector().getVector(); + } /** * Get dense vector's magnitude */ - public abstract float getMagnitude(); + public float getMagnitude() { + return getCheckedVector().getMagnitude(); + } - public abstract double dotProduct(float[] queryVector); + public double dotProduct(float[] queryVector) { + return getCheckedVector().dotProduct(queryVector); + } - public abstract double l1Norm(float[] queryVector); + public double l1Norm(float[] queryVector) { + return getCheckedVector().l1Norm(queryVector); + } - public abstract double l2Norm(float[] queryVector); + public double l2Norm(float[] queryVector) { + return getCheckedVector().l2Norm(queryVector); + } @Override public BytesRef get(int index) { throw new UnsupportedOperationException( - "accessing a vector field's value through 'get' or 'value' is not supported!" + "Use 'vectorValue' or 'magnitude' instead!'" + "accessing a vector field's value through 'get' or 'value' is not supported, use 'vectorValue' or 'magnitude' instead." ); } - public static DenseVectorScriptDocValues empty(DenseVectorSupplier supplier, int dims) { - return new DenseVectorScriptDocValues(supplier, dims) { - @Override - public float[] getVectorValue() { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - - @Override - public float getMagnitude() { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - - @Override - public double dotProduct(float[] queryVector) { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - - @Override - public double l1Norm(float[] queryVector) { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - - @Override - public double l2Norm(float[] queryVector) { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - - @Override - public int size() { - return supplier.size(); - } - }; + @Override + public int size() { + return dvSupplier.getInternal() == null ? 0 : 1; + } + + public interface DenseVectorSupplier extends Supplier { + @Override + default BytesRef getInternal(int index) { + throw new UnsupportedOperationException(); + } + + DenseVector getInternal(); } } diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DocValuesWhitelistExtension.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DocValuesWhitelistExtension.java index c53d1379dc252..953044c3a5500 100644 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DocValuesWhitelistExtension.java +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DocValuesWhitelistExtension.java @@ -19,7 +19,10 @@ public class DocValuesWhitelistExtension implements PainlessExtension { - private static final Whitelist WHITELIST = WhitelistLoader.loadFromResourceFiles(DocValuesWhitelistExtension.class, "whitelist.txt"); + private static final Whitelist WHITELIST = WhitelistLoader.loadFromResourceFiles( + DocValuesWhitelistExtension.class, + "org.elasticsearch.xpack.vectors.txt" + ); @Override public Map, List> getContextWhitelists() { diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVector.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVector.java new file mode 100644 index 0000000000000..1c240892ab2bd --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVector.java @@ -0,0 +1,109 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.util.VectorUtil; + +import java.util.Arrays; +import java.util.List; + +public class KnnDenseVector implements DenseVector { + protected final float[] docVector; + + public KnnDenseVector(float[] docVector) { + this.docVector = docVector; + } + + @Override + public float[] getVector() { + // we need to copy the value, since {@link VectorValues} can reuse + // the underlying array across documents + return Arrays.copyOf(docVector, docVector.length); + } + + @Override + public float getMagnitude() { + return DenseVector.getMagnitude(docVector); + } + + @Override + public double dotProduct(float[] queryVector) { + return VectorUtil.dotProduct(docVector, queryVector); + } + + @Override + public double dotProduct(List queryVector) { + double dotProduct = 0; + for (int i = 0; i < docVector.length; i++) { + dotProduct += docVector[i] * queryVector.get(i).floatValue(); + } + return dotProduct; + } + + @Override + public double l1Norm(float[] queryVector) { + double result = 0.0; + for (int i = 0; i < docVector.length; i++) { + result += Math.abs(docVector[i] - queryVector[i]); + } + return result; + } + + @Override + public double l1Norm(List queryVector) { + double result = 0.0; + for (int i = 0; i < docVector.length; i++) { + result += Math.abs(docVector[i] - queryVector.get(i).floatValue()); + } + return result; + } + + @Override + public double l2Norm(float[] queryVector) { + return Math.sqrt(VectorUtil.squareDistance(docVector, queryVector)); + } + + @Override + public double l2Norm(List queryVector) { + double l2norm = 0; + for (int i = 0; i < docVector.length; i++) { + double diff = docVector[i] - queryVector.get(i).floatValue(); + l2norm += diff * diff; + } + return Math.sqrt(l2norm); + } + + @Override + public double cosineSimilarity(float[] queryVector, boolean normalizeQueryVector) { + if (normalizeQueryVector) { + return dotProduct(queryVector) / (DenseVector.getMagnitude(queryVector) * getMagnitude()); + } + + return dotProduct(queryVector) / getMagnitude(); + } + + @Override + public double cosineSimilarity(List queryVector) { + return dotProduct(queryVector) / (DenseVector.getMagnitude(queryVector) * getMagnitude()); + } + + @Override + public boolean isEmpty() { + return false; + } + + @Override + public int getDims() { + return docVector.length; + } + + @Override + public int size() { + return 1; + } +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorDocValuesField.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorDocValuesField.java new file mode 100644 index 0000000000000..58b2e60a0fb80 --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorDocValuesField.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.index.VectorValues; +import org.elasticsearch.core.Nullable; + +import java.io.IOException; + +import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; + +public class KnnDenseVectorDocValuesField extends DenseVectorDocValuesField { + protected VectorValues input; // null if no vectors + protected float[] vector; + protected final int dims; + + public KnnDenseVectorDocValuesField(@Nullable VectorValues input, String name, int dims) { + super(name); + this.dims = dims; + this.input = input; + } + + @Override + public void setNextDocId(int docId) throws IOException { + if (input == null) { + return; + } + int currentDoc = input.docID(); + if (currentDoc == NO_MORE_DOCS || docId < currentDoc) { + vector = null; + } else if (docId == currentDoc) { + vector = input.vectorValue(); + } else { + currentDoc = input.advance(docId); + if (currentDoc == docId) { + vector = input.vectorValue(); + } else { + vector = null; + } + } + } + + @Override + public DenseVectorScriptDocValues getScriptDocValues() { + return new DenseVectorScriptDocValues(this, dims); + } + + public boolean isEmpty() { + return vector == null; + } + + @Override + public DenseVector get() { + if (isEmpty()) { + return DenseVector.EMPTY; + } + + return new KnnDenseVector(vector); + } + + @Override + public DenseVector get(DenseVector defaultValue) { + if (isEmpty()) { + return defaultValue; + } + + return new KnnDenseVector(vector); + } + + @Override + public DenseVector getInternal() { + return get(null); + } +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValues.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValues.java deleted file mode 100644 index fc6f1bdb59906..0000000000000 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValues.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.vectors.query; - -import org.apache.lucene.index.VectorValues; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.VectorUtil; - -import java.io.IOException; -import java.util.Arrays; - -import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; - -public class KnnDenseVectorScriptDocValues extends DenseVectorScriptDocValues { - - public static class KnnDenseVectorSupplier implements DenseVectorSupplier { - - private final VectorValues in; - private float[] vector; - - public KnnDenseVectorSupplier(VectorValues in) { - this.in = in; - } - - @Override - public void setNextDocId(int docId) throws IOException { - int currentDoc = in.docID(); - if (currentDoc == NO_MORE_DOCS || docId < currentDoc) { - vector = null; - } else if (docId == currentDoc) { - vector = in.vectorValue(); - } else { - currentDoc = in.advance(docId); - if (currentDoc == docId) { - vector = in.vectorValue(); - } else { - vector = null; - } - } - } - - @Override - public BytesRef getInternal(int index) { - throw new UnsupportedOperationException(); - } - - public float[] getInternal() { - return vector; - } - - @Override - public int size() { - if (vector == null) { - return 0; - } else { - return 1; - } - } - } - - private final KnnDenseVectorSupplier kdvSupplier; - - KnnDenseVectorScriptDocValues(KnnDenseVectorSupplier supplier, int dims) { - super(supplier, dims); - this.kdvSupplier = supplier; - } - - private float[] getVectorChecked() { - if (kdvSupplier.getInternal() == null) { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - return kdvSupplier.getInternal(); - } - - @Override - public float[] getVectorValue() { - float[] vector = getVectorChecked(); - // we need to copy the value, since {@link VectorValues} can reuse - // the underlying array across documents - return Arrays.copyOf(vector, vector.length); - } - - @Override - public float getMagnitude() { - float[] vector = getVectorChecked(); - double magnitude = 0.0f; - for (float elem : vector) { - magnitude += elem * elem; - } - return (float) Math.sqrt(magnitude); - } - - @Override - public double dotProduct(float[] queryVector) { - return VectorUtil.dotProduct(getVectorChecked(), queryVector); - } - - @Override - public double l1Norm(float[] queryVector) { - float[] vectorValue = getVectorChecked(); - double result = 0.0; - for (int i = 0; i < queryVector.length; i++) { - result += Math.abs(vectorValue[i] - queryVector[i]); - } - return result; - } - - @Override - public double l2Norm(float[] queryVector) { - return Math.sqrt(VectorUtil.squareDistance(getVectorValue(), queryVector)); - } - - @Override - public int size() { - return supplier.size(); - } -} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/ScoreScriptUtils.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/ScoreScriptUtils.java index e97daf4c2f397..24e74e4a93958 100644 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/ScoreScriptUtils.java +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/ScoreScriptUtils.java @@ -18,10 +18,10 @@ public class ScoreScriptUtils { public static class DenseVectorFunction { final ScoreScript scoreScript; final float[] queryVector; - final DenseVectorScriptDocValues docValues; + final DenseVectorDocValuesField field; - public DenseVectorFunction(ScoreScript scoreScript, List queryVector, String field) { - this(scoreScript, queryVector, field, false); + public DenseVectorFunction(ScoreScript scoreScript, List queryVector, String fieldName) { + this(scoreScript, queryVector, fieldName, false); } /** @@ -31,19 +31,10 @@ public DenseVectorFunction(ScoreScript scoreScript, List queryVector, St * @param queryVector The query vector. * @param normalizeQuery Whether the provided query should be normalized to unit length. */ - public DenseVectorFunction(ScoreScript scoreScript, List queryVector, String field, boolean normalizeQuery) { + public DenseVectorFunction(ScoreScript scoreScript, List queryVector, String fieldName, boolean normalizeQuery) { this.scoreScript = scoreScript; - this.docValues = (DenseVectorScriptDocValues) scoreScript.getDoc().get(field); - - if (docValues.dims() != queryVector.size()) { - throw new IllegalArgumentException( - "The query vector has a different number of dimensions [" - + queryVector.size() - + "] than the document vectors [" - + docValues.dims() - + "]." - ); - } + this.field = (DenseVectorDocValuesField) scoreScript.field(fieldName); + DenseVector.checkDimensions(field.get().getDims(), queryVector.size()); this.queryVector = new float[queryVector.size()]; double queryMagnitude = 0.0; @@ -63,11 +54,11 @@ public DenseVectorFunction(ScoreScript scoreScript, List queryVector, St void setNextVector() { try { - docValues.getSupplier().setNextDocId(scoreScript._getDocId()); + field.setNextDocId(scoreScript._getDocId()); } catch (IOException e) { throw ExceptionsHelper.convertToElastic(e); } - if (docValues.size() == 0) { + if (field.isEmpty()) { throw new IllegalArgumentException("A document doesn't have a value for a vector field!"); } } @@ -82,7 +73,7 @@ public L1Norm(ScoreScript scoreScript, List queryVector, String field) { public double l1norm() { setNextVector(); - return docValues.l1Norm(queryVector); + return field.get().l1Norm(queryVector); } } @@ -95,7 +86,7 @@ public L2Norm(ScoreScript scoreScript, List queryVector, String field) { public double l2norm() { setNextVector(); - return docValues.l2Norm(queryVector); + return field.get().l2Norm(queryVector); } } @@ -108,7 +99,7 @@ public DotProduct(ScoreScript scoreScript, List queryVector, String fiel public double dotProduct() { setNextVector(); - return docValues.dotProduct(queryVector); + return field.get().dotProduct(queryVector); } } @@ -121,7 +112,8 @@ public CosineSimilarity(ScoreScript scoreScript, List queryVector, Strin public double cosineSimilarity() { setNextVector(); - return docValues.dotProduct(queryVector) / docValues.getMagnitude(); + // query vector normalized in constructor + return field.get().cosineSimilarity(queryVector, false); } } } diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/VectorDVLeafFieldData.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/VectorDVLeafFieldData.java index 1d8c45e9c60c2..a4789543ded43 100644 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/VectorDVLeafFieldData.java +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/VectorDVLeafFieldData.java @@ -15,18 +15,12 @@ import org.elasticsearch.Version; import org.elasticsearch.index.fielddata.LeafFieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; -import org.elasticsearch.script.field.DelegateDocValuesField; import org.elasticsearch.script.field.DocValuesField; -import org.elasticsearch.xpack.vectors.query.BinaryDenseVectorScriptDocValues.BinaryDenseVectorSupplier; -import org.elasticsearch.xpack.vectors.query.DenseVectorScriptDocValues.DenseVectorSupplier; -import org.elasticsearch.xpack.vectors.query.KnnDenseVectorScriptDocValues.KnnDenseVectorSupplier; import java.io.IOException; import java.util.Collection; import java.util.Collections; -import static org.elasticsearch.xpack.vectors.query.DenseVectorScriptDocValues.MISSING_VECTOR_FIELD_MESSAGE; - final class VectorDVLeafFieldData implements LeafFieldData { private final LeafReader reader; @@ -63,31 +57,15 @@ public DocValuesField getScriptField(String name) { try { if (indexed) { VectorValues values = reader.getVectorValues(field); - if (values == null || values == VectorValues.EMPTY) { - return new DelegateDocValuesField(DenseVectorScriptDocValues.empty(new DenseVectorSupplier() { - @Override - public float[] getInternal() { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - - @Override - public void setNextDocId(int docId) throws IOException { - // do nothing - } - - @Override - public int size() { - return 0; - } - }, dims), name); + if (values == VectorValues.EMPTY) { + // There's no way for KnnDenseVectorDocValuesField to reliably differentiate between VectorValues.EMPTY and + // values that can be iterated through. Since VectorValues.EMPTY throws on docID(), pass a null instead. + values = null; } - return new DelegateDocValuesField(new KnnDenseVectorScriptDocValues(new KnnDenseVectorSupplier(values), dims), name); + return new KnnDenseVectorDocValuesField(values, name, dims); } else { BinaryDocValues values = DocValues.getBinary(reader, field); - return new DelegateDocValuesField( - new BinaryDenseVectorScriptDocValues(new BinaryDenseVectorSupplier(values), indexVersion, dims), - name - ); + return new BinaryDenseVectorDocValuesField(values, name, dims, indexVersion); } } catch (IOException e) { throw new IllegalStateException("Cannot load doc values for vector field!", e); diff --git a/x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/whitelist.txt b/x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/org.elasticsearch.xpack.vectors.txt similarity index 52% rename from x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/whitelist.txt rename to x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/org.elasticsearch.xpack.vectors.txt index 86583d77264a2..bcf989933b04e 100644 --- a/x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/whitelist.txt +++ b/x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/org.elasticsearch.xpack.vectors.txt @@ -11,6 +11,43 @@ class org.elasticsearch.xpack.vectors.query.DenseVectorScriptDocValues { class org.elasticsearch.script.ScoreScript @no_import { } +class org.elasticsearch.xpack.vectors.query.DenseVector { + DenseVector EMPTY + float getMagnitude() + + # handle List and float[] arguments + double dotProduct(Object) + double l1Norm(Object) + double l2Norm(Object) + double cosineSimilarity(Object) + + float[] getVector() + boolean isEmpty() + int getDims() + int size() +} + +# implementation of DenseVector +class org.elasticsearch.xpack.vectors.query.BinaryDenseVector { +} + +# implementation of DenseVector +class org.elasticsearch.xpack.vectors.query.KnnDenseVector { +} + +class org.elasticsearch.xpack.vectors.query.DenseVectorDocValuesField { + DenseVector get() + DenseVector get(DenseVector) +} + +# implementation of DenseVectorDocValuesField +class org.elasticsearch.xpack.vectors.query.KnnDenseVectorDocValuesField { +} + +# implementation of DenseVectorDocValuesField +class org.elasticsearch.xpack.vectors.query.BinaryDenseVectorDocValuesField { +} + static_import { double l1norm(org.elasticsearch.script.ScoreScript, List, String) bound_to org.elasticsearch.xpack.vectors.query.ScoreScriptUtils$L1Norm double l2norm(org.elasticsearch.script.ScoreScript, List, String) bound_to org.elasticsearch.xpack.vectors.query.ScoreScriptUtils$L2Norm diff --git a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValuesTests.java b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValuesTests.java index 2761364e51505..ddd96ba9fd0a7 100644 --- a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValuesTests.java +++ b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValuesTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.Version; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.vectors.mapper.VectorEncoderDecoder; -import org.elasticsearch.xpack.vectors.query.BinaryDenseVectorScriptDocValues.BinaryDenseVectorSupplier; import java.io.IOException; import java.nio.ByteBuffer; @@ -29,24 +28,56 @@ public void testGetVectorValueAndGetMagnitude() throws IOException { for (Version indexVersion : Arrays.asList(Version.V_7_4_0, Version.CURRENT)) { BinaryDocValues docValues = wrap(vectors, indexVersion); - BinaryDenseVectorSupplier supplier = new BinaryDenseVectorSupplier(docValues); - DenseVectorScriptDocValues scriptDocValues = new BinaryDenseVectorScriptDocValues(supplier, indexVersion, dims); + BinaryDenseVectorDocValuesField field = new BinaryDenseVectorDocValuesField(docValues, "test", dims, indexVersion); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); for (int i = 0; i < vectors.length; i++) { - supplier.setNextDocId(i); + field.setNextDocId(i); + assertEquals(1, field.size()); + assertEquals(dims, scriptDocValues.dims()); assertArrayEquals(vectors[i], scriptDocValues.getVectorValue(), 0.0001f); assertEquals(expectedMagnitudes[i], scriptDocValues.getMagnitude(), 0.0001f); } } } + public void testMetadataAndIterator() throws IOException { + int dims = 3; + Version indexVersion = Version.CURRENT; + float[][] vectors = fill(new float[randomIntBetween(1, 5)][dims]); + BinaryDocValues docValues = wrap(vectors, indexVersion); + BinaryDenseVectorDocValuesField field = new BinaryDenseVectorDocValuesField(docValues, "test", dims, indexVersion); + for (int i = 0; i < vectors.length; i++) { + field.setNextDocId(i); + DenseVector dv = field.get(); + assertEquals(1, dv.size()); + assertFalse(dv.isEmpty()); + assertEquals(dims, dv.getDims()); + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, field::iterator); + assertEquals("Cannot iterate over single valued dense_vector field, use get() instead", e.getMessage()); + } + field.setNextDocId(vectors.length); + DenseVector dv = field.get(); + assertEquals(dv, DenseVector.EMPTY); + } + + protected float[][] fill(float[][] vectors) { + for (float[] vector : vectors) { + for (int i = 0; i < vector.length; i++) { + vector[i] = randomFloat(); + } + } + return vectors; + } + public void testMissingValues() throws IOException { int dims = 3; float[][] vectors = { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }; BinaryDocValues docValues = wrap(vectors, Version.CURRENT); - BinaryDenseVectorSupplier supplier = new BinaryDenseVectorSupplier(docValues); - DenseVectorScriptDocValues scriptDocValues = new BinaryDenseVectorScriptDocValues(supplier, Version.CURRENT, dims); + BinaryDenseVectorDocValuesField field = new BinaryDenseVectorDocValuesField(docValues, "test", dims, Version.CURRENT); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); - supplier.setNextDocId(3); + field.setNextDocId(3); + assertEquals(0, field.size()); Exception e = expectThrows(IllegalArgumentException.class, scriptDocValues::getVectorValue); assertEquals("A document doesn't have a value for a vector field!", e.getMessage()); @@ -58,12 +89,17 @@ public void testGetFunctionIsNotAccessible() throws IOException { int dims = 3; float[][] vectors = { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }; BinaryDocValues docValues = wrap(vectors, Version.CURRENT); - BinaryDenseVectorSupplier supplier = new BinaryDenseVectorSupplier(docValues); - DenseVectorScriptDocValues scriptDocValues = new BinaryDenseVectorScriptDocValues(supplier, Version.CURRENT, dims); + BinaryDenseVectorDocValuesField field = new BinaryDenseVectorDocValuesField(docValues, "test", dims, Version.CURRENT); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); - supplier.setNextDocId(0); + field.setNextDocId(0); Exception e = expectThrows(UnsupportedOperationException.class, () -> scriptDocValues.get(0)); - assertThat(e.getMessage(), containsString("accessing a vector field's value through 'get' or 'value' is not supported!")); + assertThat( + e.getMessage(), + containsString( + "accessing a vector field's value through 'get' or 'value' is not supported, use 'vectorValue' or 'magnitude' instead." + ) + ); } public void testSimilarityFunctions() throws IOException { @@ -73,10 +109,10 @@ public void testSimilarityFunctions() throws IOException { for (Version indexVersion : Arrays.asList(Version.V_7_4_0, Version.CURRENT)) { BinaryDocValues docValues = wrap(new float[][] { docVector }, indexVersion); - BinaryDenseVectorSupplier supplier = new BinaryDenseVectorSupplier(docValues); - DenseVectorScriptDocValues scriptDocValues = new BinaryDenseVectorScriptDocValues(supplier, Version.CURRENT, dims); + BinaryDenseVectorDocValuesField field = new BinaryDenseVectorDocValuesField(docValues, "test", dims, indexVersion); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); - supplier.setNextDocId(0); + field.setNextDocId(0); assertEquals( "dotProduct result is not equal to the expected value!", @@ -133,7 +169,7 @@ public long cost() { }; } - private static BytesRef mockEncodeDenseVector(float[] values, Version indexVersion) { + static BytesRef mockEncodeDenseVector(float[] values, Version indexVersion) { byte[] bytes = indexVersion.onOrAfter(Version.V_7_5_0) ? new byte[VectorEncoderDecoder.INT_BYTES * values.length + VectorEncoderDecoder.INT_BYTES] : new byte[VectorEncoderDecoder.INT_BYTES * values.length]; diff --git a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorFunctionTests.java b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorFunctionTests.java index 0ecd26f08c20c..d40d7e3abd663 100644 --- a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorFunctionTests.java +++ b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorFunctionTests.java @@ -7,18 +7,16 @@ package org.elasticsearch.xpack.vectors.query; -import org.apache.lucene.index.BinaryDocValues; import org.elasticsearch.Version; import org.elasticsearch.script.ScoreScript; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.vectors.query.BinaryDenseVectorScriptDocValues.BinaryDenseVectorSupplier; import org.elasticsearch.xpack.vectors.query.ScoreScriptUtils.CosineSimilarity; import org.elasticsearch.xpack.vectors.query.ScoreScriptUtils.DotProduct; import org.elasticsearch.xpack.vectors.query.ScoreScriptUtils.L1Norm; import org.elasticsearch.xpack.vectors.query.ScoreScriptUtils.L2Norm; +import java.io.IOException; import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.function.Supplier; @@ -28,34 +26,72 @@ public class DenseVectorFunctionTests extends ESTestCase { - public void testVectorFunctions() { - String field = "vector"; + public void testVectorClassBindings() throws IOException { + String fieldName = "vector"; int dims = 5; float[] docVector = new float[] { 230.0f, 300.33f, -34.8988f, 15.555f, -200.0f }; List queryVector = Arrays.asList(0.5f, 111.3f, -13.0f, 14.8f, -156.0f); List invalidQueryVector = Arrays.asList(0.5, 111.3); - for (Version indexVersion : Arrays.asList(Version.V_7_4_0, Version.CURRENT)) { - BinaryDocValues docValues = BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, indexVersion); - DenseVectorScriptDocValues scriptDocValues = new BinaryDenseVectorScriptDocValues( - new BinaryDenseVectorSupplier(docValues), - indexVersion, - dims - ); + List fields = List.of( + new BinaryDenseVectorDocValuesField( + BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, Version.V_7_4_0), + "test", + dims, + Version.V_7_4_0 + ), + new BinaryDenseVectorDocValuesField( + BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, Version.CURRENT), + "test", + dims, + Version.CURRENT + ), + new KnnDenseVectorDocValuesField(KnnDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }), "test", dims) + ); + for (DenseVectorDocValuesField field : fields) { + field.setNextDocId(0); ScoreScript scoreScript = mock(ScoreScript.class); - when(scoreScript.getDoc()).thenReturn(Collections.singletonMap(field, scriptDocValues)); + when(scoreScript.field("vector")).thenAnswer(mock -> field); // Test cosine similarity explicitly, as it must perform special logic on top of the doc values - CosineSimilarity function = new CosineSimilarity(scoreScript, queryVector, field); - assertEquals("cosineSimilarity result is not equal to the expected value!", 0.790, function.cosineSimilarity(), 0.001); + CosineSimilarity function = new CosineSimilarity(scoreScript, queryVector, fieldName); + float cosineSimilarityExpected = 0.790f; + assertEquals( + "cosineSimilarity result is not equal to the expected value!", + cosineSimilarityExpected, + function.cosineSimilarity(), + 0.001 + ); + + // Test normalization for cosineSimilarity + float[] queryVectorArray = new float[queryVector.size()]; + for (int i = 0; i < queryVectorArray.length; i++) { + queryVectorArray[i] = queryVector.get(i).floatValue(); + } + assertEquals( + "cosineSimilarity result is not equal to the expected value!", + cosineSimilarityExpected, + field.getInternal().cosineSimilarity(queryVectorArray, true), + 0.001 + ); // Check each function rejects query vectors with the wrong dimension - assertDimensionMismatch(() -> new DotProduct(scoreScript, invalidQueryVector, field)); - assertDimensionMismatch(() -> new CosineSimilarity(scoreScript, invalidQueryVector, field)); - assertDimensionMismatch(() -> new L1Norm(scoreScript, invalidQueryVector, field)); - assertDimensionMismatch(() -> new L2Norm(scoreScript, invalidQueryVector, field)); + assertDimensionMismatch(() -> new DotProduct(scoreScript, invalidQueryVector, fieldName)); + assertDimensionMismatch(() -> new CosineSimilarity(scoreScript, invalidQueryVector, fieldName)); + assertDimensionMismatch(() -> new L1Norm(scoreScript, invalidQueryVector, fieldName)); + assertDimensionMismatch(() -> new L2Norm(scoreScript, invalidQueryVector, fieldName)); + + // Check scripting infrastructure integration + DotProduct dotProduct = new DotProduct(scoreScript, queryVector, fieldName); + assertEquals(65425.6249, dotProduct.dotProduct(), 0.001); + assertEquals(485.1837, new L1Norm(scoreScript, queryVector, fieldName).l1norm(), 0.001); + assertEquals(301.3614, new L2Norm(scoreScript, queryVector, fieldName).l2norm(), 0.001); + when(scoreScript._getDocId()).thenReturn(1); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, dotProduct::dotProduct); + assertEquals("A document doesn't have a value for a vector field!", e.getMessage()); } + } private void assertDimensionMismatch(Supplier supplier) { diff --git a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorTests.java b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorTests.java new file mode 100644 index 0000000000000..11078e4964920 --- /dev/null +++ b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorTests.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; + +import static org.hamcrest.Matchers.containsString; + +public class DenseVectorTests extends ESTestCase { + public void testBadVectorType() { + DenseVector knn = new KnnDenseVector(new float[] { 1.0f, 2.0f, 3.5f }); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> knn.dotProduct(new HashMap<>())); + assertThat(e.getMessage(), containsString("Cannot use vector [")); + assertThat(e.getMessage(), containsString("] with class [java.util.HashMap] as query vector")); + + e = expectThrows(IllegalArgumentException.class, () -> knn.l1Norm(new HashMap<>())); + assertThat(e.getMessage(), containsString("Cannot use vector [")); + assertThat(e.getMessage(), containsString("] with class [java.util.HashMap] as query vector")); + + e = expectThrows(IllegalArgumentException.class, () -> knn.l2Norm(new HashMap<>())); + assertThat(e.getMessage(), containsString("Cannot use vector [")); + assertThat(e.getMessage(), containsString("] with class [java.util.HashMap] as query vector")); + + e = expectThrows(IllegalArgumentException.class, () -> knn.cosineSimilarity(new HashMap<>())); + assertThat(e.getMessage(), containsString("Cannot use vector [")); + assertThat(e.getMessage(), containsString("] with class [java.util.HashMap] as query vector")); + } + + public void testFloatVsListQueryVector() { + int dims = randomIntBetween(1, 16); + float[] docVector = new float[dims]; + float[] arrayQV = new float[dims]; + List listQV = new ArrayList<>(dims); + for (int i = 0; i < docVector.length; i++) { + docVector[i] = randomFloat(); + float q = randomFloat(); + arrayQV[i] = q; + listQV.add(q); + } + + KnnDenseVector knn = new KnnDenseVector(docVector); + assertEquals(knn.dotProduct(arrayQV), knn.dotProduct(listQV), 0.001f); + assertEquals(knn.dotProduct((Object) listQV), knn.dotProduct((Object) arrayQV), 0.001f); + + assertEquals(knn.l1Norm(arrayQV), knn.l1Norm(listQV), 0.001f); + assertEquals(knn.l1Norm((Object) listQV), knn.l1Norm((Object) arrayQV), 0.001f); + + assertEquals(knn.l2Norm(arrayQV), knn.l2Norm(listQV), 0.001f); + assertEquals(knn.l2Norm((Object) listQV), knn.l2Norm((Object) arrayQV), 0.001f); + + assertEquals(knn.cosineSimilarity(arrayQV), knn.cosineSimilarity(listQV), 0.001f); + assertEquals(knn.cosineSimilarity((Object) listQV), knn.cosineSimilarity((Object) arrayQV), 0.001f); + + for (Version indexVersion : Arrays.asList(Version.V_7_4_0, Version.CURRENT)) { + BytesRef value = BinaryDenseVectorScriptDocValuesTests.mockEncodeDenseVector(docVector, indexVersion); + BinaryDenseVector bdv = new BinaryDenseVector(value, dims, indexVersion); + + assertEquals(bdv.dotProduct(arrayQV), bdv.dotProduct(listQV), 0.001f); + assertEquals(bdv.dotProduct((Object) listQV), bdv.dotProduct((Object) arrayQV), 0.001f); + + assertEquals(bdv.l1Norm(arrayQV), bdv.l1Norm(listQV), 0.001f); + assertEquals(bdv.l1Norm((Object) listQV), bdv.l1Norm((Object) arrayQV), 0.001f); + + assertEquals(bdv.l2Norm(arrayQV), bdv.l2Norm(listQV), 0.001f); + assertEquals(bdv.l2Norm((Object) listQV), bdv.l2Norm((Object) arrayQV), 0.001f); + + assertEquals(bdv.cosineSimilarity(arrayQV), bdv.cosineSimilarity(listQV), 0.001f); + assertEquals(bdv.cosineSimilarity((Object) listQV), bdv.cosineSimilarity((Object) arrayQV), 0.001f); + } + } + +} diff --git a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValuesTests.java b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValuesTests.java index 7005e4d7bd531..743fc2d8bb63e 100644 --- a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValuesTests.java +++ b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValuesTests.java @@ -10,7 +10,6 @@ import org.apache.lucene.index.VectorValues; import org.apache.lucene.util.BytesRef; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.vectors.query.KnnDenseVectorScriptDocValues.KnnDenseVectorSupplier; import java.io.IOException; @@ -23,22 +22,52 @@ public void testGetVectorValueAndGetMagnitude() throws IOException { float[][] vectors = { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }; float[] expectedMagnitudes = { 1.7320f, 2.4495f, 3.3166f }; - KnnDenseVectorSupplier supplier = new KnnDenseVectorSupplier(wrap(vectors)); - DenseVectorScriptDocValues scriptDocValues = new KnnDenseVectorScriptDocValues(supplier, dims); + DenseVectorDocValuesField field = new KnnDenseVectorDocValuesField(wrap(vectors), "test", dims); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); for (int i = 0; i < vectors.length; i++) { - supplier.setNextDocId(i); + field.setNextDocId(i); + assertEquals(1, field.size()); + assertEquals(dims, scriptDocValues.dims()); assertArrayEquals(vectors[i], scriptDocValues.getVectorValue(), 0.0001f); assertEquals(expectedMagnitudes[i], scriptDocValues.getMagnitude(), 0.0001f); } } + public void testMetadataAndIterator() throws IOException { + int dims = 3; + float[][] vectors = fill(new float[randomIntBetween(1, 5)][dims]); + KnnDenseVectorDocValuesField field = new KnnDenseVectorDocValuesField(wrap(vectors), "test", dims); + for (int i = 0; i < vectors.length; i++) { + field.setNextDocId(i); + DenseVector dv = field.get(); + assertEquals(1, dv.size()); + assertFalse(dv.isEmpty()); + assertEquals(dims, dv.getDims()); + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, field::iterator); + assertEquals("Cannot iterate over single valued dense_vector field, use get() instead", e.getMessage()); + } + assertEquals(1, field.size()); + field.setNextDocId(vectors.length); + DenseVector dv = field.get(); + assertEquals(dv, DenseVector.EMPTY); + } + + protected float[][] fill(float[][] vectors) { + for (float[] vector : vectors) { + for (int i = 0; i < vector.length; i++) { + vector[i] = randomFloat(); + } + } + return vectors; + } + public void testMissingValues() throws IOException { int dims = 3; float[][] vectors = { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }; - KnnDenseVectorSupplier supplier = new KnnDenseVectorSupplier(wrap(vectors)); - DenseVectorScriptDocValues scriptDocValues = new KnnDenseVectorScriptDocValues(supplier, dims); + DenseVectorDocValuesField field = new KnnDenseVectorDocValuesField(wrap(vectors), "test", dims); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); - supplier.setNextDocId(3); + field.setNextDocId(3); Exception e = expectThrows(IllegalArgumentException.class, () -> scriptDocValues.getVectorValue()); assertEquals("A document doesn't have a value for a vector field!", e.getMessage()); @@ -49,12 +78,17 @@ public void testMissingValues() throws IOException { public void testGetFunctionIsNotAccessible() throws IOException { int dims = 3; float[][] vectors = { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }; - KnnDenseVectorSupplier supplier = new KnnDenseVectorSupplier(wrap(vectors)); - DenseVectorScriptDocValues scriptDocValues = new KnnDenseVectorScriptDocValues(supplier, dims); + DenseVectorDocValuesField field = new KnnDenseVectorDocValuesField(wrap(vectors), "test", dims); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); - supplier.setNextDocId(0); + field.setNextDocId(0); Exception e = expectThrows(UnsupportedOperationException.class, () -> scriptDocValues.get(0)); - assertThat(e.getMessage(), containsString("accessing a vector field's value through 'get' or 'value' is not supported!")); + assertThat( + e.getMessage(), + containsString( + "accessing a vector field's value through 'get' or 'value' is not supported, use 'vectorValue' or 'magnitude' instead." + ) + ); } public void testSimilarityFunctions() throws IOException { @@ -62,16 +96,30 @@ public void testSimilarityFunctions() throws IOException { float[] docVector = new float[] { 230.0f, 300.33f, -34.8988f, 15.555f, -200.0f }; float[] queryVector = new float[] { 0.5f, 111.3f, -13.0f, 14.8f, -156.0f }; - KnnDenseVectorSupplier supplier = new KnnDenseVectorSupplier(wrap(new float[][] { docVector })); - DenseVectorScriptDocValues scriptDocValues = new KnnDenseVectorScriptDocValues(supplier, dims); - supplier.setNextDocId(0); + DenseVectorDocValuesField field = new KnnDenseVectorDocValuesField(wrap(new float[][] { docVector }), "test", dims); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); + field.setNextDocId(0); assertEquals("dotProduct result is not equal to the expected value!", 65425.624, scriptDocValues.dotProduct(queryVector), 0.001); assertEquals("l1norm result is not equal to the expected value!", 485.184, scriptDocValues.l1Norm(queryVector), 0.001); assertEquals("l2norm result is not equal to the expected value!", 301.361, scriptDocValues.l2Norm(queryVector), 0.001); } - private static VectorValues wrap(float[][] vectors) { + public void testMissingVectorValues() throws IOException { + int dims = 7; + KnnDenseVectorDocValuesField emptyKnn = new KnnDenseVectorDocValuesField(null, "test", dims); + + emptyKnn.setNextDocId(0); + assertEquals(0, emptyKnn.getScriptDocValues().size()); + assertTrue(emptyKnn.getScriptDocValues().isEmpty()); + assertEquals(DenseVector.EMPTY, emptyKnn.get()); + assertNull(emptyKnn.get(null)); + assertNull(emptyKnn.getInternal()); + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, emptyKnn::iterator); + assertEquals("Cannot iterate over single valued dense_vector field, use get() instead", e.getMessage()); + } + + static VectorValues wrap(float[][] vectors) { return new VectorValues() { int index = 0; diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/25_array_compare.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/25_array_compare.yml index 4cdf66d749aa7..28cfa43fe8abe 100644 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/25_array_compare.yml +++ b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/25_array_compare.yml @@ -8,25 +8,25 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { level: 0 } - do: index: index: test_1 - id: 2 + id: "2" body: { level: 0 } - do: index: index: test_1 - id: 3 + id: "3" body: { level: 0 } - do: index: index: test_1 - id: 4 + id: "4" body: { level: 1 } - do: indices.refresh: {} diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/30_search_input.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/30_search_input.yml index 954f0d3e3f214..842a340be1bd5 100644 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/30_search_input.yml +++ b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/30_search_input.yml @@ -7,7 +7,7 @@ setup: - do: index: index: idx - id: 1 + id: "1" body: > { "date" : "2015-01-01T00:00:00", @@ -16,7 +16,7 @@ setup: - do: index: index: idx - id: 2 + id: "2" body: > { "date" : "2015-01-02T00:00:00", @@ -25,7 +25,7 @@ setup: - do: index: index: idx - id: 3 + id: "3" body: > { "date" : "2015-01-03T00:00:00", @@ -34,7 +34,7 @@ setup: - do: index: index: idx - id: 4 + id: "4" body: > { "date" : "2015-01-04T00:00:00", diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/40_search_transform.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/40_search_transform.yml index 91b76e5c66eae..8fd23c455bec4 100644 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/40_search_transform.yml +++ b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/40_search_transform.yml @@ -7,7 +7,7 @@ setup: - do: index: index: idx - id: 1 + id: "1" body: > { "date" : "2015-01-01T00:00:00", @@ -16,7 +16,7 @@ setup: - do: index: index: idx - id: 2 + id: "2" body: > { "date" : "2015-01-02T00:00:00", @@ -25,7 +25,7 @@ setup: - do: index: index: idx - id: 3 + id: "3" body: > { "date" : "2015-01-03T00:00:00", @@ -34,7 +34,7 @@ setup: - do: index: index: idx - id: 4 + id: "4" body: > { "date" : "2015-01-04T00:00:00", diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/50_webhook_url_escaping.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/50_webhook_url_escaping.yml index 0ed3cfe04480f..c2da572490c68 100644 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/50_webhook_url_escaping.yml +++ b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/50_webhook_url_escaping.yml @@ -3,7 +3,7 @@ - do: index: index: - id: 1 + id: "1" refresh: true body: { foo: bar } diff --git a/x-pack/plugin/watcher/qa/with-security/src/yamlRestTest/resources/rest-api-spec/test/watcher/security/20_test_run_as_execute_watch.yml b/x-pack/plugin/watcher/qa/with-security/src/yamlRestTest/resources/rest-api-spec/test/watcher/security/20_test_run_as_execute_watch.yml index b50f20afd0358..19bf7b940a662 100644 --- a/x-pack/plugin/watcher/qa/with-security/src/yamlRestTest/resources/rest-api-spec/test/watcher/security/20_test_run_as_execute_watch.yml +++ b/x-pack/plugin/watcher/qa/with-security/src/yamlRestTest/resources/rest-api-spec/test/watcher/security/20_test_run_as_execute_watch.yml @@ -8,7 +8,7 @@ setup: - do: index: index: my_test_index - id: 1 + id: "1" refresh: true body: > { @@ -333,7 +333,7 @@ teardown: - do: get: index: my_test_index - id: 1 + id: "1" - match: { _id: "1" } @@ -382,6 +382,6 @@ teardown: - do: get: index: index_not_allowed_to_read - id: 1 + id: "1" catch: forbidden diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java index cf4a178ba85fa..97f47e13abb7d 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java @@ -8,6 +8,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -75,6 +76,10 @@ protected void masterOperation( ActionListener listener ) { if (enabled) { + ActionListener preservingListener = ContextPreservingActionListener.wrapPreservingContext( + listener, + client.threadPool().getThreadContext() + ); try (ThreadContext.StoredContext ignore = client.threadPool().getThreadContext().stashWithOrigin(WATCHER_ORIGIN)) { WatcherStatsRequest statsRequest = new WatcherStatsRequest(); statsRequest.includeStats(true); @@ -91,8 +96,8 @@ protected void masterOperation( true, mergedCounters.toNestedMap() ); - listener.onResponse(new XPackUsageFeatureResponse(usage)); - }, listener::onFailure)); + preservingListener.onResponse(new XPackUsageFeatureResponse(usage)); + }, preservingListener::onFailure)); } } else { WatcherFeatureSetUsage usage = new WatcherFeatureSetUsage( diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java index 77dc361f4c5ab..0425206f224da 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java @@ -12,6 +12,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.Nullable; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; @@ -43,6 +45,10 @@ public class WatcherSearchTemplateRequest implements ToXContentObject { private final BytesReference searchSource; private boolean restTotalHitsAsInt = true; + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(WatcherSearchTemplateRequest.class); + static final String TYPES_DEPRECATION_MESSAGE = + "[types removal] Specifying empty types array in a watcher search request is deprecated."; + public WatcherSearchTemplateRequest( String[] indices, SearchType searchType, @@ -190,6 +196,17 @@ public static WatcherSearchTemplateRequest fromXContent(XContentParser parser, S ); } } + } else if (TYPES_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + // Tolerate an empty types array, because some watches created internally in 6.x have + // an empty types array in their search, and it's clearly equivalent to typeless. + if (parser.nextToken() != XContentParser.Token.END_ARRAY) { + throw new ElasticsearchParseException( + "could not read search request. unsupported non-empty array field [" + currentFieldName + "]" + ); + } + // Empty types arrays still generate the same deprecation warning they did in 7.x. + // Ideally they should be removed from the definition. + deprecationLogger.critical(DeprecationCategory.PARSING, "watcher_search_input", TYPES_DEPRECATION_MESSAGE); } else { throw new ElasticsearchParseException( "could not read search request. unexpected array field [" + currentFieldName + "]" @@ -272,6 +289,7 @@ public int hashCode() { } private static final ParseField INDICES_FIELD = new ParseField("indices"); + private static final ParseField TYPES_FIELD = new ParseField("types"); private static final ParseField BODY_FIELD = new ParseField("body"); private static final ParseField SEARCH_TYPE_FIELD = new ParseField("search_type"); private static final ParseField INDICES_OPTIONS_FIELD = new ParseField("indices_options"); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportExecuteWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportExecuteWatchAction.java index 0e71ee68c878b..94e1edcace189 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportExecuteWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportExecuteWatchAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.routing.Preference; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -69,6 +70,7 @@ public class TransportExecuteWatchAction extends WatcherTransportAction filteredHeaders = ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()); + Map filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders( + threadPool.getThreadContext(), + clusterService.state() + ); watch.status().setHeaders(filteredHeaders); try (XContentBuilder builder = jsonBuilder()) { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java index 005a089298777..620580ee09824 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java @@ -6,15 +6,18 @@ */ package org.elasticsearch.xpack.watcher.support.search; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; +import java.util.List; import java.util.Map; import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -32,6 +35,49 @@ public void testFromXContentWithTemplateCustomLang() throws IOException { assertTemplate(source, "custom-script", "painful", singletonMap("bar", "baz")); } + public void testFromXContentWithEmptyTypes() throws IOException { + String source = """ + { + "search_type" : "query_then_fetch", + "indices" : [ ".ml-anomalies-*" ], + "types" : [ ], + "body" : { + "query" : { + "bool" : { + "filter" : [ { "term" : { "job_id" : "my-job" } }, { "range" : { "timestamp" : { "gte" : "now-30m" } } } ] + } + } + } + } + """; + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + parser.nextToken(); + WatcherSearchTemplateRequest result = WatcherSearchTemplateRequest.fromXContent(parser, randomFrom(SearchType.values())); + assertThat(result.getIndices(), arrayContaining(".ml-anomalies-*")); + } + } + + public void testFromXContentWithNonEmptyTypes() throws IOException { + String source = """ + { + "search_type" : "query_then_fetch", + "indices" : [ "my-index" ], + "types" : [ "my-type" ], + "body" : { + "query" : { "match_all" : {} } + } + } + """; + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + parser.nextToken(); + ElasticsearchParseException e = expectThrows( + ElasticsearchParseException.class, + () -> WatcherSearchTemplateRequest.fromXContent(parser, randomFrom(SearchType.values())) + ); + assertThat(e.getMessage(), is("could not read search request. unsupported non-empty array field [types]")); + } + } + public void testDefaultHitCountsDefaults() throws IOException { assertHitCount("{}", true); } @@ -61,4 +107,8 @@ private void assertTemplate(String source, String expectedScript, String expecte assertThat(result.getTemplate().getParams(), equalTo(expectedParams)); } } + + protected List filteredWarnings() { + return List.of(WatcherSearchTemplateRequest.TYPES_DEPRECATION_MESSAGE); + } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/TransportPutWatchActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/TransportPutWatchActionTests.java index 770ca44137701..d3a16585ac9ac 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/TransportPutWatchActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/TransportPutWatchActionTests.java @@ -6,11 +6,15 @@ */ package org.elasticsearch.xpack.watcher.transport.actions; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.Index; @@ -21,6 +25,10 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.AuthenticationField; +import org.elasticsearch.xpack.core.security.authc.support.SecondaryAuthentication; +import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.watcher.watch.ClockMock; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.ClockHolder; @@ -31,6 +39,7 @@ import java.util.Collections; import java.util.Map; +import java.util.Set; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; @@ -75,6 +84,13 @@ public void setupAction() throws Exception { return null; }).when(client).execute(any(), any(), any()); + final ClusterService clusterService = mock(ClusterService.class); + final ClusterState clusterState = mock(ClusterState.class); + final DiscoveryNodes discoveryNodes = mock(DiscoveryNodes.class); + when(clusterService.state()).thenReturn(clusterState); + when(clusterState.nodes()).thenReturn(discoveryNodes); + when(discoveryNodes.getMinNodeVersion()).thenReturn(Version.CURRENT); + action = new TransportPutWatchAction( transportService, threadPool, @@ -82,7 +98,8 @@ public void setupAction() throws Exception { new ClockHolder(new ClockMock()), TestUtils.newTestLicenseState(), parser, - client + client, + clusterService ); } @@ -90,7 +107,14 @@ public void setupAction() throws Exception { public void testHeadersAreFilteredWhenPuttingWatches() throws Exception { // set up threadcontext with some arbitrary info String headerName = randomFrom(ClientHelper.SECURITY_HEADER_FILTERS); - threadContext.putHeader(headerName, randomAlphaOfLength(10)); + if (Set.of(AuthenticationField.AUTHENTICATION_KEY, SecondaryAuthentication.THREAD_CTX_KEY).contains(headerName)) { + threadContext.putHeader( + headerName, + Authentication.newRealmAuthentication(new User("dummy"), new Authentication.RealmRef("name", "type", "node")).encode() + ); + } else { + threadContext.putHeader(headerName, randomAlphaOfLength(10)); + } threadContext.putHeader(randomAlphaOfLength(10), "doesntmatter"); PutWatchRequest putWatchRequest = new PutWatchRequest(); diff --git a/x-pack/qa/core-rest-tests-with-security/build.gradle b/x-pack/qa/core-rest-tests-with-security/build.gradle index e8fdffe47c01f..552dd2bc94f02 100644 --- a/x-pack/qa/core-rest-tests-with-security/build.gradle +++ b/x-pack/qa/core-rest-tests-with-security/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-testclusters' @@ -31,7 +32,6 @@ testClusters.matching { it.name == "integTest" }.configureEach { setting 'xpack.license.self_generated.type', 'trial' setting 'indices.lifecycle.history_index_enabled', 'false' setting 'xpack.security.autoconfiguration.enabled', 'false' - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") + requiresFeature 'es.random_sampler_feature_flag_registered', Version.fromString("8.1.0") } diff --git a/x-pack/qa/full-cluster-restart/build.gradle b/x-pack/qa/full-cluster-restart/build.gradle index 34fdd71678349..3923d439d394d 100644 --- a/x-pack/qa/full-cluster-restart/build.gradle +++ b/x-pack/qa/full-cluster-restart/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -60,9 +61,8 @@ BuildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> keystore 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' setting 'xpack.security.authc.api_key.enabled', 'true' - if (BuildParams.isSnapshotBuild() == false && bwcVersion.toString() == project.version) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } tasks.register("${baseName}#oldClusterTest", StandaloneRestIntegTestTask) { @@ -83,9 +83,6 @@ BuildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> useCluster baseCluster dependsOn "${baseName}#oldClusterTest" doFirst { - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } baseCluster.get().goToNextVersion() if (bwcVersion.before(BuildParams.bwcVersions.minimumWireCompatibleVersion)) { // When doing a full cluster restart of older versions we actually have to upgrade twice. First to 7.last, then to the current version. diff --git a/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/10_reindex.yml b/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/10_reindex.yml index e63cdefa0bee4..6bcb8c1127394 100644 --- a/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/10_reindex.yml +++ b/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/10_reindex.yml @@ -7,7 +7,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -27,7 +27,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -59,7 +59,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -91,7 +91,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -112,7 +112,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -132,12 +132,12 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: source - id: 2 + id: "2" body: { "user": "another" } - do: indices.refresh: {} @@ -177,12 +177,12 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test", "hidden": true } - do: indices.refresh: {} @@ -226,7 +226,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test", "foo": "z", "bar": "z" } - do: indices.refresh: {} @@ -278,7 +278,7 @@ setup: - do: index: index: dest - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -298,7 +298,7 @@ setup: - do: index: index: dest - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -318,7 +318,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} diff --git a/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/15_reindex_from_remote.yml b/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/15_reindex_from_remote.yml index 773c8db88991c..06bb898a99944 100644 --- a/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/15_reindex_from_remote.yml +++ b/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/15_reindex_from_remote.yml @@ -6,7 +6,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -41,7 +41,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -87,7 +87,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -132,7 +132,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -166,12 +166,12 @@ - do: index: index: source - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: source - id: 2 + id: "2" body: { "user": "another" } - do: indices.refresh: {} @@ -226,12 +226,12 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test", "hidden": true } - do: indices.refresh: {} @@ -289,7 +289,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test", "foo": "z", "bar": "z" } - do: indices.refresh: {} @@ -356,7 +356,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -391,7 +391,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} diff --git a/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/20_update_by_query.yml b/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/20_update_by_query.yml index 8512e4e6308b1..c2177cce77c52 100644 --- a/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/20_update_by_query.yml +++ b/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/20_update_by_query.yml @@ -7,7 +7,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -37,7 +37,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -68,7 +68,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -99,7 +99,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -116,7 +116,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -133,12 +133,12 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test", "hidden": true } - do: indices.refresh: {} @@ -185,7 +185,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test", "foo": "z", "bar": "z" } - do: indices.refresh: {} @@ -202,7 +202,7 @@ setup: - do: get: index: source - id: 1 + id: "1" # These were visible to the user running the update_by_query so they stayed. - match: { _source.foo: z } - match: { _source.bar: z } diff --git a/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/30_delete_by_query.yml b/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/30_delete_by_query.yml index 827bd8364c073..64591ec47bfba 100644 --- a/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/30_delete_by_query.yml +++ b/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/30_delete_by_query.yml @@ -7,7 +7,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -32,7 +32,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -58,7 +58,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -84,7 +84,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -110,7 +110,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -136,12 +136,12 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test", "hidden": false } - do: index: index: source - id: 2 + id: "2" body: { "text": "test", "hidden": true } - do: indices.refresh: {} @@ -204,7 +204,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test", "foo": "z", "bar": "z" } - do: indices.refresh: {} diff --git a/x-pack/qa/repository-old-versions/build.gradle b/x-pack/qa/repository-old-versions/build.gradle index 54e6958c58ac3..fc2e96ec66045 100644 --- a/x-pack/qa/repository-old-versions/build.gradle +++ b/x-pack/qa/repository-old-versions/build.gradle @@ -18,6 +18,7 @@ import org.gradle.api.internal.artifacts.ArtifactAttributes apply plugin: 'elasticsearch.jdk-download' apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' +apply plugin: 'elasticsearch.rest-resources' configurations { oldesFixture @@ -37,6 +38,15 @@ jdks { } } +restResources { + restApi { + include '_common', 'search' + } + restTests { + includeCore 'search/390_doc_values_search.yml' + } +} + if (Os.isFamily(Os.FAMILY_WINDOWS)) { logger.warn("Disabling repository-old-versions tests because we can't get the pid file on windows") tasks.named("testingConventions").configure { enabled = false } @@ -48,8 +58,8 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) { * To avoid testing against too many old versions, always pick first and last version per major */ project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { - transformSpec.getFrom().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE); - transformSpec.getTo().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + transformSpec.getFrom().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE); + transformSpec.getTo().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); }); for (String versionString : ['5.0.0', '5.6.16', '6.0.0', '6.8.20']) { @@ -60,7 +70,7 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) { String configName = "es${versionNoDots}" def config = configurations.create(configName) - config.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + config.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); dependencies.add(configName, artifact) String repoLocation = "${buildDir}/cluster/shared/repo/${versionNoDots}" @@ -91,15 +101,21 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) { if (Architecture.current() == Architecture.AARCH64) { env 'ES_JAVA_OPTS', '-Xss512k' } + def dataPath = "${baseDir}/data" args 'oldes.OldElasticsearch', baseDir, "${ -> config.getSingleFile().toPath()}", false, - "path.repo: ${repoLocation}" + "path.repo: ${repoLocation}", + "path.data: ${dataPath}" if (version.onOrAfter('6.8.0') && Architecture.current() == Architecture.AARCH64) { // We need to explicitly disable ML when running old ES versions on ARM args 'xpack.ml.enabled: false' } + doFirst { + delete(dataPath) + mkdir(dataPath) + } maxWaitInSeconds 60 waitCondition = { fixture, ant -> // the fixture writes the ports file when Elasticsearch's HTTP service diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/DocValueOnlyFieldsIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/DocValueOnlyFieldsIT.java new file mode 100644 index 0000000000000..ab1105d989ff1 --- /dev/null +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/DocValueOnlyFieldsIT.java @@ -0,0 +1,213 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.oldrepos; + +import com.carrotsearch.randomizedtesting.RandomizedTest; +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.http.HttpHost; +import org.elasticsearch.Version; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Booleans; +import org.elasticsearch.core.PathUtils; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.junit.Before; + +import java.io.IOException; + +/** + * Tests doc-value-based searches against indices imported from clusters older than N-1. + * We reuse the YAML tests in search/390_doc_values_search.yml but have to do the setup + * manually here as the setup is done on the old cluster for which we have to use the + * low-level REST client instead of the YAML set up that only knows how to talk to + * newer ES versions. + * + * We mimic the setup in search/390_doc_values_search.yml here, but adapt it to work + * against older version clusters. + */ +public class DocValueOnlyFieldsIT extends ESClientYamlSuiteTestCase { + + final Version oldVersion = Version.fromString(System.getProperty("tests.es.version")); + static boolean setupDone; + + public DocValueOnlyFieldsIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return ESClientYamlSuiteTestCase.createParameters(); + } + + @Override + protected boolean preserveClusterUponCompletion() { + return true; + } + + @Override + protected Settings restClientSettings() { + String token = basicAuthHeaderValue("admin", new SecureString("admin-password".toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + @Override + protected boolean skipSetupSections() { + // setup in the YAML file is replaced by the method below + return true; + } + + @Before + public void setupIndex() throws IOException { + final boolean afterRestart = Booleans.parseBoolean(System.getProperty("tests.after_restart")); + if (afterRestart) { + return; + } + + // The following is bit of a hack. While we wish we could make this an @BeforeClass, it does not work because the client() is only + // initialized later, so we do it when running the first test + if (setupDone) { + return; + } + + setupDone = true; + + String repoLocation = PathUtils.get(System.getProperty("tests.repo.location")) + .resolve(RandomizedTest.getContext().getTargetClass().getName()) + .toString(); + + String indexName = "test"; + String repoName = "doc_values_repo"; + String snapshotName = "snap"; + String[] basicTypes = new String[] { + "byte", + "double", + "float", + "half_float", + "integer", + "long", + "short", + "boolean", + "keyword", + "ip", + "geo_point" }; // date is manually added as it need further configuration + + int oldEsPort = Integer.parseInt(System.getProperty("tests.es.port")); + try (RestClient oldEs = RestClient.builder(new HttpHost("127.0.0.1", oldEsPort)).build()) { + Request createIndex = new Request("PUT", "/" + indexName); + int numberOfShards = randomIntBetween(1, 3); + + boolean multiTypes = oldVersion.before(Version.V_7_0_0); + + XContentBuilder settingsBuilder = XContentFactory.jsonBuilder() + .startObject() + .startObject("settings") + .field("index.number_of_shards", numberOfShards) + .endObject() + .startObject("mappings"); + if (multiTypes) { + settingsBuilder.startObject("doc"); + } + settingsBuilder.field("dynamic", false).startObject("properties"); + for (String type : basicTypes) { + settingsBuilder.startObject(type).field("type", type).endObject(); + } + settingsBuilder.startObject("date").field("type", "date").field("format", "yyyy/MM/dd").endObject(); + if (multiTypes) { + settingsBuilder.endObject(); + } + settingsBuilder.endObject().endObject().endObject(); + + createIndex.setJsonEntity(Strings.toString(settingsBuilder)); + assertOK(oldEs.performRequest(createIndex)); + + Request doc1 = new Request("PUT", "/" + indexName + "/" + "doc" + "/" + "1"); + doc1.addParameter("refresh", "true"); + XContentBuilder bodyDoc1 = XContentFactory.jsonBuilder() + .startObject() + .field("byte", 1) + .field("double", 1.0) + .field("float", 1.0) + .field("half_float", 1.0) + .field("integer", 1) + .field("long", 1) + .field("short", 1) + .field("date", "2017/01/01") + .field("keyword", "key1") + .field("boolean", false) + .field("ip", "192.168.0.1") + .array("geo_point", 13.5, 34.89) + .endObject(); + doc1.setJsonEntity(Strings.toString(bodyDoc1)); + assertOK(oldEs.performRequest(doc1)); + + Request doc2 = new Request("PUT", "/" + indexName + "/" + "doc" + "/" + "2"); + doc2.addParameter("refresh", "true"); + XContentBuilder bodyDoc2 = XContentFactory.jsonBuilder() + .startObject() + .field("byte", 2) + .field("double", 2.0) + .field("float", 2.0) + .field("half_float", 2.0) + .field("integer", 2) + .field("long", 2) + .field("short", 2) + .field("date", "2017/01/02") + .field("keyword", "key2") + .field("boolean", true) + .field("ip", "192.168.0.2") + .array("geo_point", -63.24, 31.0) + .endObject(); + doc2.setJsonEntity(Strings.toString(bodyDoc2)); + assertOK(oldEs.performRequest(doc2)); + + // register repo on old ES and take snapshot + Request createRepoRequest = new Request("PUT", "/_snapshot/" + repoName); + createRepoRequest.setJsonEntity(""" + {"type":"fs","settings":{"location":"%s"}} + """.formatted(repoLocation)); + assertOK(oldEs.performRequest(createRepoRequest)); + + Request createSnapshotRequest = new Request("PUT", "/_snapshot/" + repoName + "/" + snapshotName); + createSnapshotRequest.addParameter("wait_for_completion", "true"); + createSnapshotRequest.setJsonEntity("{\"indices\":\"" + indexName + "\"}"); + assertOK(oldEs.performRequest(createSnapshotRequest)); + } + + // register repo on new ES and restore snapshot + Request createRepoRequest2 = new Request("PUT", "/_snapshot/" + repoName); + createRepoRequest2.setJsonEntity(""" + {"type":"fs","settings":{"location":"%s","allow_bwc_indices":true}} + """.formatted(repoLocation)); + assertOK(client().performRequest(createRepoRequest2)); + + final Request createRestoreRequest = new Request("POST", "/_snapshot/" + repoName + "/" + snapshotName + "/_restore"); + createRestoreRequest.addParameter("wait_for_completion", "true"); + createRestoreRequest.setJsonEntity("{\"indices\":\"" + indexName + "\"}"); + assertOK(client().performRequest(createRestoreRequest)); + + // add mappings (they will be auto-converted later) + Request putMappingsRequest = new Request("PUT", "/" + indexName + "/_mappings"); + XContentBuilder mappingsBuilder = XContentFactory.jsonBuilder().startObject().startObject("properties"); + for (String type : basicTypes) { + mappingsBuilder.startObject(type).field("type", type).field("index", false).endObject(); + } + mappingsBuilder.startObject("date").field("type", "date").field("index", false).field("format", "yyyy/MM/dd").endObject(); + mappingsBuilder.endObject().endObject(); + putMappingsRequest.setJsonEntity(Strings.toString(mappingsBuilder)); + assertOK(client().performRequest(putMappingsRequest)); + } +} diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java index a2e12e6046f06..0f77bfb8ee964 100644 --- a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java @@ -8,9 +8,7 @@ package org.elasticsearch.oldrepos; import org.apache.http.HttpHost; -import org.elasticsearch.Build; import org.elasticsearch.Version; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; @@ -29,7 +27,6 @@ import org.elasticsearch.client.indices.PutMappingRequest; import org.elasticsearch.client.searchable_snapshots.MountSnapshotRequest; import org.elasticsearch.cluster.SnapshotsInProgress; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.routing.Murmur3HashFunction; @@ -133,11 +130,9 @@ public void runTest(boolean sourceOnlyRepository) throws IOException { } private void afterRestart(String indexName) throws IOException { - if (Build.CURRENT.isSnapshot()) { - ensureGreen("restored_" + indexName); - ensureGreen("mounted_full_copy_" + indexName); - ensureGreen("mounted_shared_cache_" + indexName); - } + ensureGreen("restored_" + indexName); + ensureGreen("mounted_full_copy_" + indexName); + ensureGreen("mounted_shared_cache_" + indexName); } @SuppressWarnings("removal") @@ -209,9 +204,7 @@ private void beforeRestart( if (sourceOnlyRepository) { repoSettingsBuilder.put("delegate_type", "fs"); } - if (Build.CURRENT.isSnapshot()) { - repoSettingsBuilder.put("allow_bwc_indices", true); - } + repoSettingsBuilder.put("allow_bwc_indices", true); ElasticsearchAssertions.assertAcked( client.snapshot() .createRepository( @@ -265,48 +258,42 @@ private void beforeRestart( assertThat(snapshotStatus.getStats().getTotalSize(), greaterThan(0L)); assertThat(snapshotStatus.getStats().getTotalFileCount(), greaterThan(0)); - if (Build.CURRENT.isSnapshot()) { - // restore / mount and check whether searches work - restoreMountAndVerify( - numDocs, - expectedIds, - client, - numberOfShards, - sourceOnlyRepository, - oldVersion, - indexName, - repoName, - snapshotName - ); + // restore / mount and check whether searches work + restoreMountAndVerify( + numDocs, + expectedIds, + client, + numberOfShards, + sourceOnlyRepository, + oldVersion, + indexName, + repoName, + snapshotName + ); - // close indices - assertTrue( - client.indices().close(new CloseIndexRequest("restored_" + indexName), RequestOptions.DEFAULT).isShardsAcknowledged() - ); - assertTrue( - client.indices() - .close(new CloseIndexRequest("mounted_full_copy_" + indexName), RequestOptions.DEFAULT) - .isShardsAcknowledged() - ); - assertTrue( - client.indices() - .close(new CloseIndexRequest("mounted_shared_cache_" + indexName), RequestOptions.DEFAULT) - .isShardsAcknowledged() - ); + // close indices + assertTrue(client.indices().close(new CloseIndexRequest("restored_" + indexName), RequestOptions.DEFAULT).isShardsAcknowledged()); + assertTrue( + client.indices().close(new CloseIndexRequest("mounted_full_copy_" + indexName), RequestOptions.DEFAULT).isShardsAcknowledged() + ); + assertTrue( + client.indices() + .close(new CloseIndexRequest("mounted_shared_cache_" + indexName), RequestOptions.DEFAULT) + .isShardsAcknowledged() + ); - // restore / mount again - restoreMountAndVerify( - numDocs, - expectedIds, - client, - numberOfShards, - sourceOnlyRepository, - oldVersion, - indexName, - repoName, - snapshotName - ); - } + // restore / mount again + restoreMountAndVerify( + numDocs, + expectedIds, + client, + numberOfShards, + sourceOnlyRepository, + oldVersion, + indexName, + repoName, + snapshotName + ); } private String getType(Version oldVersion, String id) { @@ -342,15 +329,7 @@ private void restoreMountAndVerify( assertEquals(numberOfShards, restoreSnapshotResponse.getRestoreInfo().totalShards()); assertEquals(numberOfShards, restoreSnapshotResponse.getRestoreInfo().successfulShards()); - assertEquals( - ClusterHealthStatus.GREEN, - client.cluster() - .health( - new ClusterHealthRequest("restored_" + indexName).waitForGreenStatus().waitForNoRelocatingShards(true), - RequestOptions.DEFAULT - ) - .getStatus() - ); + ensureGreen("restored_" + indexName); MappingMetadata mapping = client.indices() .getMapping(new GetMappingsRequest().indices("restored_" + indexName), RequestOptions.DEFAULT) @@ -401,15 +380,7 @@ private void restoreMountAndVerify( assertEquals(numberOfShards, mountSnapshotResponse.getRestoreInfo().totalShards()); assertEquals(numberOfShards, mountSnapshotResponse.getRestoreInfo().successfulShards()); - assertEquals( - ClusterHealthStatus.GREEN, - client.cluster() - .health( - new ClusterHealthRequest("mounted_full_copy_" + indexName).waitForGreenStatus().waitForNoRelocatingShards(true), - RequestOptions.DEFAULT - ) - .getStatus() - ); + ensureGreen("mounted_full_copy_" + indexName); // run a search against the index assertDocs("mounted_full_copy_" + indexName, numDocs, expectedIds, client, sourceOnlyRepository, oldVersion); diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index 93a9a99ce3e3f..a6db46c9d0d10 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -41,6 +41,13 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> versions = [oldVersion, project.version] numberOfNodes = 3 + systemProperty 'ingest.geoip.downloader.enabled.default', 'true' + //we don't want to hit real service from each test + systemProperty 'ingest.geoip.downloader.endpoint.default', 'http://invalid.endpoint' + if (bwcVersion.onOrAfter('7.14.0')) { + setting 'ingest.geoip.downloader.endpoint', 'http://invalid.endpoint' + } + setting 'repositories.url.allowed_urls', 'http://snapshot.test*' setting 'path.repo', "['${buildDir}/cluster/shared/repo/${baseName}', '${searchableSnapshotRepository}']" setting 'xpack.license.self_generated.type', 'trial' diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/GeoIpUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/GeoIpUpgradeIT.java new file mode 100644 index 0000000000000..3dedd041d6465 --- /dev/null +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/GeoIpUpgradeIT.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.upgrades; + +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.hamcrest.Matchers; + +import java.nio.charset.StandardCharsets; + +public class GeoIpUpgradeIT extends AbstractUpgradeTestCase { + + public void testGeoIpDownloader() throws Exception { + if (CLUSTER_TYPE == ClusterType.UPGRADED) { + assertBusy(() -> { + Response response = client().performRequest(new Request("GET", "_cat/tasks")); + String tasks = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); + assertThat(tasks, Matchers.containsString("geoip-downloader")); + }); + assertBusy(() -> { + Response response = client().performRequest(new Request("GET", "_ingest/geoip/stats")); + String tasks = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); + assertThat(tasks, Matchers.containsString("failed_downloads\":1")); + }); + } + } +} diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java index e1a1620f37771..73aee56bc6162 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java @@ -8,47 +8,20 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; -import org.elasticsearch.client.MachineLearningClient; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.client.ml.CloseJobRequest; -import org.elasticsearch.client.ml.CloseJobResponse; -import org.elasticsearch.client.ml.FlushJobRequest; -import org.elasticsearch.client.ml.FlushJobResponse; -import org.elasticsearch.client.ml.GetJobRequest; -import org.elasticsearch.client.ml.GetJobResponse; -import org.elasticsearch.client.ml.GetJobStatsRequest; -import org.elasticsearch.client.ml.GetModelSnapshotsRequest; -import org.elasticsearch.client.ml.GetModelSnapshotsResponse; -import org.elasticsearch.client.ml.OpenJobRequest; -import org.elasticsearch.client.ml.OpenJobResponse; -import org.elasticsearch.client.ml.PostDataRequest; -import org.elasticsearch.client.ml.PostDataResponse; -import org.elasticsearch.client.ml.PutJobRequest; -import org.elasticsearch.client.ml.PutJobResponse; -import org.elasticsearch.client.ml.RevertModelSnapshotRequest; -import org.elasticsearch.client.ml.UpgradeJobModelSnapshotRequest; -import org.elasticsearch.client.ml.job.config.AnalysisConfig; -import org.elasticsearch.client.ml.job.config.DataDescription; -import org.elasticsearch.client.ml.job.config.Detector; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.process.DataCounts; -import org.elasticsearch.client.ml.job.process.ModelSnapshot; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.test.rest.XPackRestTestConstants; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -60,6 +33,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -68,14 +42,6 @@ public class MlJobSnapshotUpgradeIT extends AbstractUpgradeTestCase { private static final String JOB_ID = "ml-snapshots-upgrade-job"; - private static class HLRC extends RestHighLevelClient { - HLRC(RestClient restClient) { - super(restClient, RestClient::close, new ArrayList<>()); - } - } - - private MachineLearningClient hlrc; - @Override protected Collection templatesToWaitFor() { // We shouldn't wait for ML templates during the upgrade - production won't @@ -95,7 +61,6 @@ protected static void waitForPendingUpgraderTasks() throws Exception { * index mappings when it is assigned to an upgraded node even if no other ML endpoint is called after the upgrade */ public void testSnapshotUpgrader() throws Exception { - hlrc = new HLRC(client()).machineLearning(); Request adjustLoggingLevels = new Request("PUT", "/_cluster/settings"); adjustLoggingLevels.setJsonEntity(""" {"persistent": {"logger.org.elasticsearch.xpack.ml": "trace"}}"""); @@ -125,57 +90,51 @@ public void testSnapshotUpgrader() throws Exception { } } + @SuppressWarnings("unchecked") private void testSnapshotUpgradeFailsOnMixedCluster() throws Exception { - Job job = getJob(JOB_ID).jobs().get(0); - String currentSnapshot = job.getModelSnapshotId(); - GetModelSnapshotsResponse modelSnapshots = getModelSnapshots(job.getId()); - assertThat(modelSnapshots.snapshots(), hasSize(2)); - - ModelSnapshot snapshot = modelSnapshots.snapshots() - .stream() - .filter(s -> s.getSnapshotId().equals(currentSnapshot) == false) + Map jobs = entityAsMap(getJob(JOB_ID)); + + String currentSnapshot = ((List) XContentMapValues.extractValue("jobs.model_snapshot_id", jobs)).get(0); + Response getResponse = getModelSnapshots(JOB_ID); + List> snapshots = (List>) entityAsMap(getResponse).get("model_snapshots"); + assertThat(snapshots, hasSize(2)); + + Map snapshot = snapshots.stream() + .filter(s -> s.get("snapshot_id").equals(currentSnapshot) == false) .findFirst() .orElseThrow(() -> new ElasticsearchException("Not found snapshot other than " + currentSnapshot)); - Exception ex = expectThrows( - Exception.class, - () -> hlrc.upgradeJobSnapshot( - new UpgradeJobModelSnapshotRequest(JOB_ID, snapshot.getSnapshotId(), null, true), - RequestOptions.DEFAULT - ) - ); + Exception ex = expectThrows(Exception.class, () -> upgradeJobSnapshot(JOB_ID, (String) snapshot.get("snapshot_id"), true)); assertThat(ex.getMessage(), containsString("All nodes must be the same version")); } + @SuppressWarnings("unchecked") private void testSnapshotUpgrade() throws Exception { - Job job = getJob(JOB_ID).jobs().get(0); - String currentSnapshot = job.getModelSnapshotId(); + Map jobs = entityAsMap(getJob(JOB_ID)); + String currentSnapshotId = ((List) XContentMapValues.extractValue("jobs.model_snapshot_id", jobs)).get(0); - GetModelSnapshotsResponse modelSnapshots = getModelSnapshots(job.getId()); - assertThat(modelSnapshots.snapshots(), hasSize(2)); - assertThat(modelSnapshots.snapshots().get(0).getMinVersion().major, equalTo(UPGRADE_FROM_VERSION.major)); - assertThat(modelSnapshots.snapshots().get(1).getMinVersion().major, equalTo(UPGRADE_FROM_VERSION.major)); + Response getSnapshotsResponse = getModelSnapshots(JOB_ID); + List> snapshots = (List>) entityAsMap(getSnapshotsResponse).get("model_snapshots"); + assertThat(snapshots, hasSize(2)); + assertThat(Integer.parseInt(snapshots.get(0).get("min_version").toString(), 0, 1, 10), equalTo((int) UPGRADE_FROM_VERSION.major)); + assertThat(Integer.parseInt(snapshots.get(1).get("min_version").toString(), 0, 1, 10), equalTo((int) UPGRADE_FROM_VERSION.major)); - ModelSnapshot snapshot = modelSnapshots.snapshots() - .stream() - .filter(s -> s.getSnapshotId().equals(currentSnapshot) == false) + Map snapshotToUpgrade = snapshots.stream() + .filter(s -> s.get("snapshot_id").equals(currentSnapshotId) == false) .findFirst() - .orElseThrow(() -> new ElasticsearchException("Not found snapshot other than " + currentSnapshot)); + .orElseThrow(() -> new ElasticsearchException("Not found snapshot other than " + currentSnapshotId)); // Don't wait for completion in the initial upgrade call, but instead poll for status // using the stats endpoint - this mimics what the Kibana upgrade assistant does - String snapshotToUpgrade = snapshot.getSnapshotId(); - assertThat( - hlrc.upgradeJobSnapshot(new UpgradeJobModelSnapshotRequest(JOB_ID, snapshotToUpgrade, null, false), RequestOptions.DEFAULT) - .isCompleted(), - is(false) - ); + String snapshotToUpgradeId = (String) snapshotToUpgrade.get("snapshot_id"); + Map upgradeResponse = entityAsMap(upgradeJobSnapshot(JOB_ID, snapshotToUpgradeId, false)); + assertFalse((boolean) upgradeResponse.get("completed")); // Wait for completion by waiting for the persistent task to disappear assertBusy(() -> { try { Response response = client().performRequest( - new Request("GET", "_ml/anomaly_detectors/" + JOB_ID + "/model_snapshots/" + snapshotToUpgrade + "/_upgrade/_stats") + new Request("GET", "_ml/anomaly_detectors/" + JOB_ID + "/model_snapshots/" + snapshotToUpgradeId + "/_upgrade/_stats") ); // Doing this instead of using expectThrows() on the line above means we get better diagnostics if the test fails fail("Upgrade still in progress: " + entityAsMap(response)); @@ -184,96 +143,122 @@ private void testSnapshotUpgrade() throws Exception { } }, 30, TimeUnit.SECONDS); - List snapshots = getModelSnapshots(job.getId(), snapshotToUpgrade).snapshots(); - assertThat(snapshots, hasSize(1)); - snapshot = snapshots.get(0); - assertThat(snapshot.getLatestRecordTimeStamp(), equalTo(snapshots.get(0).getLatestRecordTimeStamp())); + List> upgradedSnapshot = (List>) entityAsMap(getModelSnapshots(JOB_ID, snapshotToUpgradeId)) + .get("model_snapshots"); + assertThat(upgradedSnapshot, hasSize(1)); + assertThat(upgradedSnapshot.get(0).get("latest_record_time_stamp"), equalTo(snapshotToUpgrade.get("latest_record_time_stamp"))); // Does the snapshot still work? + var stats = entityAsMap(getJobStats(JOB_ID)); + List> jobStats = (List>) XContentMapValues.extractValue("jobs", stats); assertThat( - hlrc.getJobStats(new GetJobStatsRequest(JOB_ID), RequestOptions.DEFAULT) - .jobStats() - .get(0) - .getDataCounts() - .getLatestRecordTimeStamp(), - greaterThan(snapshot.getLatestRecordTimeStamp()) + (long) XContentMapValues.extractValue("data_counts.latest_record_timestamp", jobStats.get(0)), + greaterThan((long) snapshotToUpgrade.get("latest_record_time_stamp")) ); - RevertModelSnapshotRequest revertModelSnapshotRequest = new RevertModelSnapshotRequest(JOB_ID, snapshotToUpgrade); - revertModelSnapshotRequest.setDeleteInterveningResults(true); - assertThat( - hlrc.revertModelSnapshot(revertModelSnapshotRequest, RequestOptions.DEFAULT).getModel().getSnapshotId(), - equalTo(snapshotToUpgrade) - ); - assertThat(openJob(JOB_ID).isOpened(), is(true)); + + var revertResponse = entityAsMap(revertModelSnapshot(JOB_ID, snapshotToUpgradeId, true)); + assertThat((String) XContentMapValues.extractValue("model.snapshot_id", revertResponse), equalTo(snapshotToUpgradeId)); + assertThat(entityAsMap(openJob(JOB_ID)).get("opened"), is(true)); + + stats = entityAsMap(getJobStats(JOB_ID)); + jobStats = (List>) XContentMapValues.extractValue("jobs", stats); assertThat( - hlrc.getJobStats(new GetJobStatsRequest(JOB_ID), RequestOptions.DEFAULT) - .jobStats() - .get(0) - .getDataCounts() - .getLatestRecordTimeStamp(), - equalTo(snapshot.getLatestRecordTimeStamp()) + (long) XContentMapValues.extractValue("data_counts.latest_record_timestamp", jobStats.get(0)), + equalTo((long) upgradedSnapshot.get(0).get("latest_record_time_stamp")) ); closeJob(JOB_ID); } + @SuppressWarnings("unchecked") private void createJobAndSnapshots() throws Exception { TimeValue bucketSpan = TimeValue.timeValueHours(1); long startTime = 1491004800000L; - PutJobResponse jobResponse = buildAndPutJob(JOB_ID, bucketSpan); - Job job = jobResponse.getResponse(); - openJob(job.getId()); - DataCounts dataCounts = postData( - job.getId(), - generateData(startTime, bucketSpan, 10, Arrays.asList("foo"), (bucketIndex, series) -> bucketIndex == 5 ? 100.0 : 10.0).stream() - .collect(Collectors.joining()) - ).getDataCounts(); - assertThat(dataCounts.getInvalidDateCount(), equalTo(0L)); - assertThat(dataCounts.getBucketCount(), greaterThan(0L)); - final long lastCount = dataCounts.getBucketCount(); - flushJob(job.getId()); - closeJob(job.getId()); + buildAndPutJob(JOB_ID, bucketSpan); + openJob(JOB_ID); + var dataCounts = entityAsMap( + postData( + JOB_ID, + String.join( + "", + generateData( + startTime, + bucketSpan, + 10, + Collections.singletonList("foo"), + (bucketIndex, series) -> bucketIndex == 5 ? 100.0 : 10.0 + ) + ) + ) + ); + + assertThat((Integer) dataCounts.get("invalid_date_count"), equalTo(0)); + assertThat((Integer) dataCounts.get("bucket_count"), greaterThan(0)); + final int lastCount = (Integer) dataCounts.get("bucket_count"); + flushJob(JOB_ID); + closeJob(JOB_ID); // We need to wait a second to ensure the second time around model snapshot will have a different ID (it depends on epoch seconds) waitUntil(() -> false, 2, TimeUnit.SECONDS); - openJob(job.getId()); - dataCounts = postData( - job.getId(), - generateData(startTime + 10 * bucketSpan.getMillis(), bucketSpan, 10, Arrays.asList("foo"), (bucketIndex, series) -> 10.0) - .stream() - .collect(Collectors.joining()) - ).getDataCounts(); - assertThat(dataCounts.getInvalidDateCount(), equalTo(0L)); - assertThat(dataCounts.getBucketCount(), greaterThan(lastCount)); - flushJob(job.getId()); - closeJob(job.getId()); - - GetModelSnapshotsResponse modelSnapshots = getModelSnapshots(job.getId()); - assertThat(modelSnapshots.snapshots(), hasSize(2)); - assertThat(modelSnapshots.snapshots().get(0).getMinVersion().major, equalTo(UPGRADE_FROM_VERSION.major)); - assertThat(modelSnapshots.snapshots().get(1).getMinVersion().major, equalTo(UPGRADE_FROM_VERSION.major)); + openJob(JOB_ID); + dataCounts = entityAsMap( + postData( + JOB_ID, + String.join( + "", + generateData( + startTime + 10 * bucketSpan.getMillis(), + bucketSpan, + 10, + Collections.singletonList("foo"), + (bucketIndex, series) -> 10.0 + ) + ) + ) + ); + assertThat((Integer) dataCounts.get("invalid_date_count"), equalTo(0)); + assertThat((Integer) dataCounts.get("bucket_count"), greaterThan(lastCount)); + flushJob(JOB_ID); + closeJob(JOB_ID); + + var modelSnapshots = entityAsMap(getModelSnapshots(JOB_ID)); + var snapshots = (List>) modelSnapshots.get("model_snapshots"); + assertThat(snapshots, hasSize(2)); + assertThat(Integer.parseInt(snapshots.get(0).get("min_version").toString(), 0, 1, 10), equalTo((int) UPGRADE_FROM_VERSION.major)); + assertThat(Integer.parseInt(snapshots.get(1).get("min_version").toString(), 0, 1, 10), equalTo((int) UPGRADE_FROM_VERSION.major)); } - private PutJobResponse buildAndPutJob(String jobId, TimeValue bucketSpan) throws Exception { - Detector.Builder detector = new Detector.Builder("mean", "value"); - detector.setPartitionFieldName("series"); - List detectors = new ArrayList<>(); - detectors.add(detector.build()); + private Response buildAndPutJob(String jobId, TimeValue bucketSpan) throws Exception { boolean isCategorization = randomBoolean(); + String jobConfig; + if (isCategorization) { - detectors.add(new Detector.Builder("count", null).setByFieldName("mlcategory").build()); - } - AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(detectors); - analysisConfig.setBucketSpan(bucketSpan); - if (isCategorization) { - analysisConfig.setCategorizationFieldName("text"); + jobConfig = """ + { + "analysis_config" : { + "bucket_span":""" + "\"" + bucketSpan + "\"," + """ + "detectors":[{"function":"mean", "field_name":"value", "partition_field_name":"series"}, + {"function":"count", "by_field_name":"mlcategory"}], + "categorization_field_name":"text" + }, + "data_description" : { + } + }"""; + } else { + jobConfig = """ + { + "analysis_config" : { + "bucket_span":""" + "\"" + bucketSpan + "\"," + """ + "detectors":[{"function":"mean", "field_name":"value", "partition_field_name":"series"}] + }, + "data_description" : { + } + }"""; } - Job.Builder job = new Job.Builder(jobId); - job.setAnalysisConfig(analysisConfig); - DataDescription.Builder dataDescription = new DataDescription.Builder(); - job.setDataDescription(dataDescription); - return putJob(job.build()); + Request request = new Request("PUT", "/_ml/anomaly_detectors/" + jobId); + request.setJsonEntity(jobConfig); + return client().performRequest(request); } private static List generateData( @@ -305,19 +290,19 @@ record = new HashMap<>(); return data; } - protected GetJobResponse getJob(String jobId) throws IOException { - return hlrc.getJob(new GetJobRequest(jobId), RequestOptions.DEFAULT); + protected Response getJob(String jobId) throws IOException { + return client().performRequest(new Request("GET", "/_ml/anomaly_detectors/" + jobId)); } - protected PutJobResponse putJob(Job job) throws IOException { - return hlrc.putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + protected Response getJobStats(String jobId) throws IOException { + return client().performRequest(new Request("GET", "/_ml/anomaly_detectors/" + jobId + "/_stats")); } - protected OpenJobResponse openJob(String jobId) throws IOException { - return hlrc.openJob(new OpenJobRequest(jobId), RequestOptions.DEFAULT); + protected Response openJob(String jobId) throws IOException { + return client().performRequest(new Request("POST", "/_ml/anomaly_detectors/" + jobId + "/_open")); } - protected PostDataResponse postData(String jobId, String data) throws IOException { + protected Response postData(String jobId, String data) throws IOException { // Post data is deprecated, so a deprecation warning is possible (depending on the old version) RequestOptions postDataOptions = RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warnings -> { if (warnings.isEmpty()) { @@ -332,25 +317,52 @@ protected PostDataResponse postData(String jobId, String data) throws IOExceptio + "in a future major version it will be compulsory to use a datafeed" ) == false; }).build(); - return hlrc.postData(new PostDataRequest(jobId, XContentType.JSON, new BytesArray(data)), postDataOptions); + + Request postDataRequest = new Request("POST", "/_ml/anomaly_detectors/" + jobId + "/_data"); + // Post data is deprecated, so expect a deprecation warning + postDataRequest.setOptions(postDataOptions); + postDataRequest.setJsonEntity(data); + return client().performRequest(postDataRequest); } - protected FlushJobResponse flushJob(String jobId) throws IOException { - return hlrc.flushJob(new FlushJobRequest(jobId), RequestOptions.DEFAULT); + protected void flushJob(String jobId) throws IOException { + client().performRequest(new Request("POST", "/_ml/anomaly_detectors/" + jobId + "/_flush")); } - protected CloseJobResponse closeJob(String jobId) throws IOException { - return hlrc.closeJob(new CloseJobRequest(jobId), RequestOptions.DEFAULT); + private void closeJob(String jobId) throws IOException { + Response closeResponse = client().performRequest(new Request("POST", "/_ml/anomaly_detectors/" + jobId + "/_close")); + assertThat(entityAsMap(closeResponse), hasEntry("closed", true)); } - protected GetModelSnapshotsResponse getModelSnapshots(String jobId) throws IOException { + protected Response getModelSnapshots(String jobId) throws IOException { return getModelSnapshots(jobId, null); } - protected GetModelSnapshotsResponse getModelSnapshots(String jobId, String snapshotId) throws IOException { - GetModelSnapshotsRequest getModelSnapshotsRequest = new GetModelSnapshotsRequest(jobId); - getModelSnapshotsRequest.setSnapshotId(snapshotId); - return hlrc.getModelSnapshots(getModelSnapshotsRequest, RequestOptions.DEFAULT); + protected Response getModelSnapshots(String jobId, String snapshotId) throws IOException { + String url = "_ml/anomaly_detectors/" + jobId + "/model_snapshots/"; + if (snapshotId != null) { + url = url + snapshotId; + } + return client().performRequest(new Request("GET", url)); + } + + private Response revertModelSnapshot(String jobId, String snapshotId, boolean deleteIntervening) throws IOException { + String url = "_ml/anomaly_detectors/" + jobId + "/model_snapshots/" + snapshotId + "/_revert"; + + if (deleteIntervening) { + url = url + "?delete_intervening_results=true"; + } + Request request = new Request("POST", url); + return client().performRequest(request); + } + + private Response upgradeJobSnapshot(String jobId, String snapshotId, boolean waitForCompletion) throws IOException { + String url = "_ml/anomaly_detectors/" + jobId + "/model_snapshots/" + snapshotId + "/_upgrade"; + if (waitForCompletion) { + url = url + "?wait_for_completion=true"; + } + Request request = new Request("POST", url); + return client().performRequest(request); } protected static String createJsonRecord(Map keyValueMap) throws IOException { diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java index c9bbf2d78f2c8..2438de83e0eff 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java @@ -10,19 +10,12 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; -import org.elasticsearch.client.ml.job.config.AnalysisConfig; -import org.elasticsearch.client.ml.job.config.DataDescription; -import org.elasticsearch.client.ml.job.config.Detector; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.test.rest.IndexMappingTemplateAsserter; import org.elasticsearch.xpack.test.rest.XPackRestTestConstants; import org.elasticsearch.xpack.test.rest.XPackRestTestHelper; import java.io.IOException; import java.util.Collection; -import java.util.Collections; import java.util.Map; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -72,19 +65,21 @@ public void testMappingsUpgrade() throws Exception { } private void createAndOpenTestJob() throws IOException { - - Detector.Builder d = new Detector.Builder("metric", "responsetime"); - d.setByFieldName("airline"); - AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(d.build())); - analysisConfig.setBucketSpan(TimeValue.timeValueMinutes(10)); - Job.Builder job = new Job.Builder(JOB_ID); - job.setAnalysisConfig(analysisConfig); - job.setDataDescription(new DataDescription.Builder()); // Use a custom index because other rolling upgrade tests meddle with the shared index - job.setResultsIndexName("mappings-upgrade-test"); + String jobConfig = """ + { + "results_index_name":"mappings-upgrade-test", + "analysis_config" : { + "bucket_span": "600s", + "detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}] + }, + "data_description" : { + } + }" + """; Request putJob = new Request("PUT", "_ml/anomaly_detectors/" + JOB_ID); - putJob.setJsonEntity(Strings.toString(job.build())); + putJob.setJsonEntity(jobConfig); Response response = client().performRequest(putJob); assertEquals(200, response.getStatusLine().getStatusCode()); diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml index 265f3547b6d65..ff94bb5745326 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml @@ -15,13 +15,13 @@ - do: index: index: upgraded_scroll - id: 42 + id: "42" body: { foo: 1 } - do: index: index: upgraded_scroll - id: 43 + id: "43" body: { foo: 2 } - do: @@ -46,5 +46,5 @@ - do: index: index: scroll_index - id: 1 + id: "1" body: { value: $scroll_id } diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/10_basic.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/10_basic.yml index 0c5deab19068d..d4aec6ac1f0ab 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/10_basic.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/10_basic.yml @@ -3,7 +3,7 @@ - do: get: index: scroll_index - id: 1 + id: "1" - set: {_source.value: scroll_id} diff --git a/x-pack/qa/runtime-fields/build.gradle b/x-pack/qa/runtime-fields/build.gradle index c78903de66081..e2c73dde88977 100644 --- a/x-pack/qa/runtime-fields/build.gradle +++ b/x-pack/qa/runtime-fields/build.gradle @@ -6,6 +6,8 @@ * Side Public License, v 1. */ + +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.build' @@ -40,9 +42,9 @@ subprojects { testDistribution = 'DEFAULT' setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'false' - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") + requiresFeature 'es.random_sampler_feature_flag_registered', Version.fromString("8.1.0") } tasks.named("yamlRestTest").configure { @@ -70,7 +72,7 @@ subprojects { /////// TO FIX /////// 'search.aggregation/40_range/Date range', //source only date field should also emit values for numbers, it expects strings only 'search/115_multiple_field_collapsing/two levels fields collapsing', // Field collapsing on a runtime field does not work - 'field_caps/30_filter/Field caps with index filter', // We don't support filtering field caps on runtime fields. What should we do? + 'field_caps/30_index_filter/Field caps with index filter', // We don't support filtering field caps on runtime fields. What should we do? 'search.aggregation/220_filters_bucket/cache busting', // runtime keyword does not support split_queries_on_whitespace 'search/140_pre_filter_search_shards/pre_filter_shard_size with shards that have no hit', //completion suggester does not return options when the context field is a geo_point runtime field diff --git a/x-pack/qa/security-example-spi-extension/src/javaRestTest/java/org/elasticsearch/example/role/CustomRolesProviderIT.java b/x-pack/qa/security-example-spi-extension/src/javaRestTest/java/org/elasticsearch/example/role/CustomRolesProviderIT.java index 8b997decdf5cf..963e7e5341ee5 100644 --- a/x-pack/qa/security-example-spi-extension/src/javaRestTest/java/org/elasticsearch/example/role/CustomRolesProviderIT.java +++ b/x-pack/qa/security-example-spi-extension/src/javaRestTest/java/org/elasticsearch/example/role/CustomRolesProviderIT.java @@ -6,14 +6,12 @@ */ package org.elasticsearch.example.role; +import org.apache.http.client.methods.HttpPut; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.client.security.PutUserRequest; -import org.elasticsearch.client.security.RefreshPolicy; -import org.elasticsearch.client.security.user.User; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -24,7 +22,7 @@ import java.io.IOException; import java.util.Collections; -import java.util.List; +import java.util.Map; import static org.elasticsearch.example.role.CustomInMemoryRolesProvider.INDEX; import static org.elasticsearch.example.role.CustomInMemoryRolesProvider.ROLE_A; @@ -58,11 +56,19 @@ protected Settings restClientSettings() { } public void setupTestUser(String role) throws IOException { - new TestRestHighLevelClient().security() - .putUser( - PutUserRequest.withPassword(new User(TEST_USER, List.of(role)), TEST_PWD.toCharArray(), true, RefreshPolicy.IMMEDIATE), - RequestOptions.DEFAULT - ); + final String endpoint = "/_security/user/" + TEST_USER; + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + final String body = """ + { + "username": "%s", + "password": "%s", + "roles": [ "%s" ] + } + """.formatted(TEST_USER, TEST_PWD, role); + request.setJsonEntity(body); + request.addParameters(Map.of("refresh", "true")); + request.setOptions(RequestOptions.DEFAULT); + adminClient().performRequest(request); } public void testAuthorizedCustomRoleSucceeds() throws Exception { diff --git a/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java b/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java index df6189b5ca681..e0a72946be332 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java +++ b/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java @@ -10,10 +10,6 @@ import io.netty.util.concurrent.GlobalEventExecutor; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -23,10 +19,6 @@ import org.elasticsearch.client.indices.GetIndexRequest; import org.elasticsearch.client.indices.GetIndexTemplatesRequest; import org.elasticsearch.client.indices.GetIndexTemplatesResponse; -import org.elasticsearch.client.xpack.XPackUsageRequest; -import org.elasticsearch.client.xpack.XPackUsageResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; -import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; @@ -165,9 +157,7 @@ public void enableExporter() throws Exception { .put("xpack.monitoring.exporters._http.ssl.certificate_authorities", "testnode.crt") .setSecureSettings(secureSettings) .build(); - ClusterUpdateSettingsResponse response = newHighLevelClient().cluster() - .putSettings(new ClusterUpdateSettingsRequest().transientSettings(exporterSettings), RequestOptions.DEFAULT); - assertTrue(response.isAcknowledged()); + updateClusterSettings(exporterSettings); } @After @@ -181,15 +171,12 @@ public void disableExporter() throws IOException { .putNull("xpack.monitoring.exporters._http.ssl.verification_mode") .putNull("xpack.monitoring.exporters._http.ssl.certificate_authorities") .build(); - ClusterUpdateSettingsResponse response = newHighLevelClient().cluster() - .putSettings(new ClusterUpdateSettingsRequest().transientSettings(exporterSettings), RequestOptions.DEFAULT); - assertTrue(response.isAcknowledged()); + updateClusterSettings(exporterSettings); } + @SuppressWarnings("unchecked") private boolean getMonitoringUsageExportersDefined() throws Exception { - RestHighLevelClient client = newHighLevelClient(); - final XPackUsageResponse usageResponse = client.xpack().usage(new XPackUsageRequest(), RequestOptions.DEFAULT); - Map monitoringUsage = usageResponse.getUsages().get("monitoring"); + Map monitoringUsage = (Map) getAsMap("/_xpack/usage").get("monitoring"); assertThat("Monitoring feature set does not exist", monitoringUsage, notNullValue()); @SuppressWarnings("unchecked") @@ -225,13 +212,12 @@ public void testHTTPExporterWithSSL() throws Exception { }); // Waits for indices to be ready - ClusterHealthRequest healthRequest = new ClusterHealthRequest(MONITORING_PATTERN); - healthRequest.waitForStatus(ClusterHealthStatus.YELLOW); - healthRequest.waitForEvents(Priority.LANGUID); - healthRequest.waitForNoRelocatingShards(true); - healthRequest.waitForNoInitializingShards(true); - ClusterHealthResponse response = client.cluster().health(healthRequest, RequestOptions.DEFAULT); - assertThat(response.isTimedOut(), is(false)); + ensureHealth(MONITORING_PATTERN, (request) -> { + request.addParameter("wait_for_status", "yellow"); + request.addParameter("wait_for_events", "languid"); + request.addParameter("wait_for_no_relocating_shards", "true"); + request.addParameter("wait_for_no_initializing_shards", "true"); + }); // Checks that the HTTP exporter has successfully exported some data SearchRequest searchRequest = new SearchRequest(new String[] { MONITORING_PATTERN }, new SearchSourceBuilder().size(0)); diff --git a/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/10_templated_role_query.yml b/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/10_templated_role_query.yml index 4dcc8c847c464..39e7239cba4d3 100644 --- a/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/10_templated_role_query.yml +++ b/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/10_templated_role_query.yml @@ -110,7 +110,7 @@ setup: - do: index: index: foobar - id: 1 + id: "1" body: > { "username": "inline_template_user" @@ -118,7 +118,7 @@ setup: - do: index: index: foobar - id: 2 + id: "2" body: > { "username": "stored_template_user" diff --git a/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/11_templated_role_query_runas.yml b/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/11_templated_role_query_runas.yml index b3948028f4144..11f93ee9909cd 100644 --- a/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/11_templated_role_query_runas.yml +++ b/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/11_templated_role_query_runas.yml @@ -110,7 +110,7 @@ setup: - do: index: index: foobar - id: 1 + id: "1" body: > { "username": "inline_template_user" @@ -118,7 +118,7 @@ setup: - do: index: index: foobar - id: 2 + id: "2" body: > { "username": "stored_template_user" diff --git a/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/30_search_template.yml b/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/30_search_template.yml index 1ce18208a1085..9b4221e50f8d3 100644 --- a/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/30_search_template.yml +++ b/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/30_search_template.yml @@ -32,14 +32,14 @@ setup: - do: index: index: foobar - id: 1 + id: "1" body: title: "contains some words" - do: index: index: unauthorized_index - id: 2 + id: "2" body: title: "contains some words too" diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java index 43eeb69ed3fdf..c6644a976d08a 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java @@ -107,6 +107,7 @@ private RealmConfig configureRealm(String name, String type, Settings settings) .normalizePrefix("xpack.security.authc.realms." + type + "." + name + ".") .put(globalSettings) .put(getFullSettingKey(identifier, RealmSettings.ORDER_SETTING), 0) + .put(getFullSettingKey(identifier, SessionFactorySettings.TIMEOUT_RESPONSE_SETTING), "15s") .build(); final Environment env = TestEnvironment.newEnvironment(mergedSettings); this.sslService = new SSLService(env); diff --git a/x-pack/qa/xpack-prefix-rest-compat/build.gradle b/x-pack/qa/xpack-prefix-rest-compat/build.gradle index ebeab608dd23a..8b6a81202f951 100644 --- a/x-pack/qa/xpack-prefix-rest-compat/build.gradle +++ b/x-pack/qa/xpack-prefix-rest-compat/build.gradle @@ -6,6 +6,8 @@ * Side Public License, v 1. */ + +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.rest.CopyRestTestsTask @@ -94,9 +96,7 @@ testClusters.configureEach { extraConfigFile nodeCert.name, nodeCert extraConfigFile serviceTokens.name, serviceTokens - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } // transform (again) the (already) transformed x-pack compatibility tests to test the xpack prefixes