diff --git a/.ci/bwcVersions b/.ci/bwcVersions
index de840b910ada2..8503dd364c8d8 100644
--- a/.ci/bwcVersions
+++ b/.ci/bwcVersions
@@ -39,4 +39,5 @@ BWC_VERSION:
- "1.3.0"
- "1.3.1"
- "1.3.2"
- - "1.4.0"
+ - "2.0.0"
+ - "2.1.0"
diff --git a/.github/workflows/version.yml b/.github/workflows/version.yml
index b42e7c4f2f317..030689642677a 100644
--- a/.github/workflows/version.yml
+++ b/.github/workflows/version.yml
@@ -59,6 +59,7 @@ jobs:
base: ${{ env.BASE }}
branch: 'create-pull-request/patch-${{ env.BASE }}'
commit-message: Incremented version to ${{ env.NEXT_VERSION }}
+ signoff: true
delete-branch: true
title: '[AUTO] Incremented version to ${{ env.NEXT_VERSION }}.'
body: |
@@ -83,6 +84,7 @@ jobs:
base: ${{ env.BASE_X }}
branch: 'create-pull-request/patch-${{ env.BASE_X }}'
commit-message: Added bwc version ${{ env.NEXT_VERSION }}
+ signoff: true
delete-branch: true
title: '[AUTO] [${{ env.BASE_X }}] Added bwc version ${{ env.NEXT_VERSION }}.'
body: |
@@ -107,6 +109,7 @@ jobs:
base: main
branch: 'create-pull-request/patch-main'
commit-message: Added bwc version ${{ env.NEXT_VERSION }}
+ signoff: true
delete-branch: true
title: '[AUTO] [main] Added bwc version ${{ env.NEXT_VERSION }}.'
body: |
diff --git a/TESTING.md b/TESTING.md
index 4a2a786469b67..d6f246dbd6dcc 100644
--- a/TESTING.md
+++ b/TESTING.md
@@ -245,7 +245,7 @@ The YAML REST tests support all the options provided by the randomized runner, p
- `tests.rest.suite`: comma separated paths of the test suites to be run (by default loaded from /rest-api-spec/test). It is possible to run only a subset of the tests providing a sub-folder or even a single yaml file (the default /rest-api-spec/test prefix is optional when files are loaded from classpath) e.g. `-Dtests.rest.suite=index,get,create/10_with_id`
-- `tests.rest.blacklist`: comma separated globs that identify tests that are denylisted and need to be skipped e.g. `-Dtests.rest.blacklist=index/**/Index document,get/10_basic/**`
+- `tests.rest.denylist`: comma separated globs that identify tests that are denylisted and need to be skipped e.g. `-Dtests.rest.denylist=index/**/Index document,get/10_basic/**`
Java REST tests can be run with the "javaRestTest" task.
diff --git a/build.gradle b/build.gradle
index be5766f327e0d..a41ad61de39a6 100644
--- a/build.gradle
+++ b/build.gradle
@@ -48,7 +48,8 @@ plugins {
id 'lifecycle-base'
id 'opensearch.docker-support'
id 'opensearch.global-build-info'
- id "com.diffplug.spotless" version "6.3.0" apply false
+ id "com.diffplug.spotless" version "6.4.2" apply false
+ id "org.gradle.test-retry" version "1.3.1" apply false
}
apply from: 'gradle/build-complete.gradle'
@@ -232,7 +233,7 @@ allprojects {
tasks.withType(JavaCompile).configureEach { JavaCompile compile ->
// See please https://bugs.openjdk.java.net/browse/JDK-8209058
if (BuildParams.runtimeJavaVersion > JavaVersion.VERSION_11) {
- compile.options.compilerArgs << '-Werror'
+ compile.options.compilerArgs << '-Werror'
}
compile.options.compilerArgs << '-Xlint:auxiliaryclass'
compile.options.compilerArgs << '-Xlint:cast'
@@ -386,6 +387,18 @@ gradle.projectsEvaluated {
}
}
+// test retry configuration
+subprojects {
+ apply plugin: "org.gradle.test-retry"
+ tasks.withType(Test).configureEach {
+ retry {
+ failOnPassedAfterRetry = false
+ maxRetries = 3
+ maxFailures = 10
+ }
+ }
+}
+
// eclipse configuration
allprojects {
apply plugin: 'eclipse'
@@ -445,9 +458,9 @@ allprojects {
tasks.named('eclipse') { dependsOn 'cleanEclipse', 'copyEclipseSettings' }
afterEvaluate {
- tasks.findByName("eclipseJdt")?.configure {
- dependsOn 'copyEclipseSettings'
- }
+ tasks.findByName("eclipseJdt")?.configure {
+ dependsOn 'copyEclipseSettings'
+ }
}
}
diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle
index f940eec593306..d478a1fd45e80 100644
--- a/buildSrc/build.gradle
+++ b/buildSrc/build.gradle
@@ -107,16 +107,16 @@ dependencies {
api 'org.apache.ant:ant:1.10.12'
api 'com.netflix.nebula:gradle-extra-configurations-plugin:7.0.0'
api 'com.netflix.nebula:nebula-publishing-plugin:4.4.4'
- api 'com.netflix.nebula:gradle-info-plugin:7.1.3'
+ api 'com.netflix.nebula:gradle-info-plugin:11.3.3'
api 'org.apache.rat:apache-rat:0.13'
api 'commons-io:commons-io:2.7'
api "net.java.dev.jna:jna:5.10.0"
api 'gradle.plugin.com.github.johnrengelman:shadow:7.1.2'
- api 'de.thetaphi:forbiddenapis:3.2'
- api 'com.avast.gradle:gradle-docker-compose-plugin:0.14.12'
+ api 'de.thetaphi:forbiddenapis:3.3'
+ api 'com.avast.gradle:gradle-docker-compose-plugin:0.15.2'
api 'org.apache.maven:maven-model:3.6.2'
- api 'com.networknt:json-schema-validator:1.0.67'
- api "com.fasterxml.jackson.core:jackson-databind:${props.getProperty('jackson')}"
+ api 'com.networknt:json-schema-validator:1.0.68'
+ api "com.fasterxml.jackson.core:jackson-databind:${props.getProperty('jackson_databind')}"
testFixturesApi "junit:junit:${props.getProperty('junit')}"
testFixturesApi "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${props.getProperty('randomizedrunner')}"
diff --git a/buildSrc/reaper/build.gradle b/buildSrc/reaper/build.gradle
index d5e8d6ebc7099..4ccbec894e30e 100644
--- a/buildSrc/reaper/build.gradle
+++ b/buildSrc/reaper/build.gradle
@@ -11,6 +11,9 @@
apply plugin: 'java'
+targetCompatibility = JavaVersion.VERSION_11
+sourceCompatibility = JavaVersion.VERSION_11
+
jar {
archiveFileName = "${project.name}.jar"
manifest {
diff --git a/buildSrc/src/main/groovy/org/opensearch/gradle/precommit/LicenseHeadersTask.groovy b/buildSrc/src/main/groovy/org/opensearch/gradle/precommit/LicenseHeadersTask.groovy
index b330934ed2d26..b8d0ed2b9c43c 100644
--- a/buildSrc/src/main/groovy/org/opensearch/gradle/precommit/LicenseHeadersTask.groovy
+++ b/buildSrc/src/main/groovy/org/opensearch/gradle/precommit/LicenseHeadersTask.groovy
@@ -35,7 +35,10 @@ import org.opensearch.gradle.AntTask
import org.gradle.api.file.FileCollection
import org.gradle.api.tasks.Input
import org.gradle.api.tasks.InputFiles
+import org.gradle.api.tasks.IgnoreEmptyDirectories;
import org.gradle.api.tasks.OutputFile
+import org.gradle.api.tasks.PathSensitive
+import org.gradle.api.tasks.PathSensitivity
import org.gradle.api.tasks.SkipWhenEmpty
import java.nio.file.Files
@@ -78,6 +81,8 @@ class LicenseHeadersTask extends AntTask {
*/
@InputFiles
@SkipWhenEmpty
+ @IgnoreEmptyDirectories
+ @PathSensitive(PathSensitivity.RELATIVE)
List getJavaFiles() {
return project.sourceSets.collect({it.allJava})
}
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/Version.java b/buildSrc/src/main/java/org/opensearch/gradle/Version.java
index 3012488381729..4c184f908e5d2 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/Version.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/Version.java
@@ -77,7 +77,7 @@ public Version(int major, int minor, int revision) {
// currently snapshot is not taken into account
int id = major * 10000000 + minor * 100000 + revision * 1000;
// identify if new OpenSearch version 1
- this.id = major == 1 || major == 2 ? id ^ MASK : id;
+ this.id = major == 1 || major == 2 || major == 3 ? id ^ MASK : id;
}
private static int parseSuffixNumber(String substring) {
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/FilePermissionsTask.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/FilePermissionsTask.java
index 9ffd472151b4b..d525a4a1e2c69 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/FilePermissionsTask.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/FilePermissionsTask.java
@@ -46,8 +46,11 @@
import org.gradle.api.GradleException;
import org.gradle.api.file.FileCollection;
import org.gradle.api.file.FileTree;
+import org.gradle.api.tasks.IgnoreEmptyDirectories;
import org.gradle.api.tasks.InputFiles;
import org.gradle.api.tasks.OutputFile;
+import org.gradle.api.tasks.PathSensitive;
+import org.gradle.api.tasks.PathSensitivity;
import org.gradle.api.tasks.SkipWhenEmpty;
import org.gradle.api.tasks.StopExecutionException;
import org.gradle.api.tasks.TaskAction;
@@ -92,6 +95,8 @@ private static boolean isExecutableFile(File file) {
*/
@InputFiles
@SkipWhenEmpty
+ @IgnoreEmptyDirectories
+ @PathSensitive(PathSensitivity.RELATIVE)
public FileCollection getFiles() {
return GradleUtils.getJavaSourceSets(getProject())
.stream()
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ForbiddenPatternsTask.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ForbiddenPatternsTask.java
index f57c190496452..754743b9b784c 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ForbiddenPatternsTask.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ForbiddenPatternsTask.java
@@ -37,9 +37,12 @@
import org.gradle.api.file.FileCollection;
import org.gradle.api.file.FileTree;
import org.gradle.api.plugins.JavaPluginConvention;
+import org.gradle.api.tasks.IgnoreEmptyDirectories;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.InputFiles;
import org.gradle.api.tasks.OutputFile;
+import org.gradle.api.tasks.PathSensitive;
+import org.gradle.api.tasks.PathSensitivity;
import org.gradle.api.tasks.SkipWhenEmpty;
import org.gradle.api.tasks.TaskAction;
import org.gradle.api.tasks.util.PatternFilterable;
@@ -100,6 +103,8 @@ public ForbiddenPatternsTask() {
@InputFiles
@SkipWhenEmpty
+ @IgnoreEmptyDirectories
+ @PathSensitive(PathSensitivity.RELATIVE)
public FileCollection getFiles() {
return getProject().getConvention()
.getPlugin(JavaPluginConvention.class)
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/LoggerUsageTask.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/LoggerUsageTask.java
index 1fd092b7f268f..ff9f6619d64e6 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/LoggerUsageTask.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/LoggerUsageTask.java
@@ -37,6 +37,7 @@
import org.gradle.api.plugins.JavaPluginConvention;
import org.gradle.api.tasks.CacheableTask;
import org.gradle.api.tasks.Classpath;
+import org.gradle.api.tasks.IgnoreEmptyDirectories;
import org.gradle.api.tasks.InputFiles;
import org.gradle.api.tasks.PathSensitive;
import org.gradle.api.tasks.PathSensitivity;
@@ -79,6 +80,7 @@ public void setClasspath(FileCollection classpath) {
@InputFiles
@PathSensitive(PathSensitivity.RELATIVE)
@SkipWhenEmpty
+ @IgnoreEmptyDirectories
public FileCollection getClassDirectories() {
return getProject().getConvention()
.getPlugin(JavaPluginConvention.class)
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditTask.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditTask.java
index ee68d2740e279..2a49ae05db1fb 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditTask.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditTask.java
@@ -47,6 +47,7 @@
import org.gradle.api.tasks.CacheableTask;
import org.gradle.api.tasks.Classpath;
import org.gradle.api.tasks.CompileClasspath;
+import org.gradle.api.tasks.IgnoreEmptyDirectories;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.InputFile;
import org.gradle.api.tasks.InputFiles;
@@ -195,6 +196,7 @@ public Set getMissingClassExcludes() {
@Classpath
@SkipWhenEmpty
+ @IgnoreEmptyDirectories
public Set getJarsToScan() {
// These are SelfResolvingDependency, and some of them backed by file collections, like the Gradle API files,
// or dependencies added as `files(...)`, we can't be sure if those are third party or not.
@@ -235,6 +237,7 @@ public void runThirdPartyAudit() throws IOException {
Set jdkJarHellClasses = runJdkJarHellCheck();
if (missingClassExcludes != null) {
+ assertNoPointlessExclusions("are not missing", missingClassExcludes, missingClasses);
long bogousExcludesCount = Stream.concat(missingClassExcludes.stream(), violationsExcludes.stream())
.filter(each -> missingClasses.contains(each) == false)
.filter(each -> violationsClasses.contains(each) == false)
@@ -245,7 +248,6 @@ public void runThirdPartyAudit() throws IOException {
"All excluded classes seem to have no issues. " + "This is sometimes an indication that the check silently failed"
);
}
- assertNoPointlessExclusions("are not missing", missingClassExcludes, missingClasses);
missingClasses.removeAll(missingClassExcludes);
}
assertNoPointlessExclusions("have no violations", violationsExcludes, violationsClasses);
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestApiTask.java b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestApiTask.java
index 399cd39d236d7..1468c4cb1b537 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestApiTask.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestApiTask.java
@@ -43,9 +43,12 @@
import org.gradle.api.file.FileTree;
import org.gradle.api.plugins.JavaPluginConvention;
import org.gradle.api.provider.ListProperty;
+import org.gradle.api.tasks.IgnoreEmptyDirectories;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.InputFiles;
import org.gradle.api.tasks.OutputDirectory;
+import org.gradle.api.tasks.PathSensitive;
+import org.gradle.api.tasks.PathSensitivity;
import org.gradle.api.tasks.SkipWhenEmpty;
import org.gradle.api.tasks.SourceSet;
import org.gradle.api.tasks.TaskAction;
@@ -112,8 +115,10 @@ public boolean isSkipHasRestTestCheck() {
return skipHasRestTestCheck;
}
+ @IgnoreEmptyDirectories
@SkipWhenEmpty
@InputFiles
+ @PathSensitive(PathSensitivity.RELATIVE)
public FileTree getInputDir() {
FileTree coreFileTree = null;
boolean projectHasYamlRestTests = skipHasRestTestCheck || projectHasYamlRestTests();
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestTestsTask.java b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestTestsTask.java
index 56ce449f4cf6f..dd94d040cb9d8 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestTestsTask.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestTestsTask.java
@@ -43,9 +43,12 @@
import org.gradle.api.file.FileTree;
import org.gradle.api.plugins.JavaPluginConvention;
import org.gradle.api.provider.ListProperty;
+import org.gradle.api.tasks.IgnoreEmptyDirectories;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.InputFiles;
import org.gradle.api.tasks.OutputDirectory;
+import org.gradle.api.tasks.PathSensitive;
+import org.gradle.api.tasks.PathSensitivity;
import org.gradle.api.tasks.SkipWhenEmpty;
import org.gradle.api.tasks.SourceSet;
import org.gradle.api.tasks.TaskAction;
@@ -104,8 +107,10 @@ String getSourceSetName() {
return sourceSetName;
}
+ @IgnoreEmptyDirectories
@SkipWhenEmpty
@InputFiles
+ @PathSensitive(PathSensitivity.RELATIVE)
public FileTree getInputDir() {
FileTree coreFileTree = null;
if (includeCore.get().isEmpty() == false) {
diff --git a/buildSrc/src/main/resources/minimumGradleVersion b/buildSrc/src/main/resources/minimumGradleVersion
index ba92e72f5775b..6b0e58e78f5ee 100644
--- a/buildSrc/src/main/resources/minimumGradleVersion
+++ b/buildSrc/src/main/resources/minimumGradleVersion
@@ -1 +1 @@
-6.6.1
\ No newline at end of file
+7.4.1
\ No newline at end of file
diff --git a/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java b/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java
index 446c94acc7ad4..d7798ef5040bb 100644
--- a/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java
+++ b/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java
@@ -53,11 +53,11 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
private static Project packagesProject;
private static Project bwcProject;
- private static final Version BWC_MAJOR_VERSION = Version.fromString("5.0.0");
- private static final Version BWC_MINOR_VERSION = Version.fromString("4.1.0");
- private static final Version BWC_STAGED_VERSION = Version.fromString("4.0.0");
- private static final Version BWC_BUGFIX_VERSION = Version.fromString("4.0.1");
- private static final Version BWC_MAINTENANCE_VERSION = Version.fromString("3.90.1");
+ private static final Version BWC_MAJOR_VERSION = Version.fromString("6.0.0");
+ private static final Version BWC_MINOR_VERSION = Version.fromString("5.1.0");
+ private static final Version BWC_STAGED_VERSION = Version.fromString("5.0.0");
+ private static final Version BWC_BUGFIX_VERSION = Version.fromString("5.0.1");
+ private static final Version BWC_MAINTENANCE_VERSION = Version.fromString("4.90.1");
private static final BwcVersions BWC_MINOR = new BwcVersions(
new TreeSet<>(Arrays.asList(BWC_BUGFIX_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION)),
BWC_MAJOR_VERSION
diff --git a/buildSrc/src/test/java/org/opensearch/gradle/VersionTests.java b/buildSrc/src/test/java/org/opensearch/gradle/VersionTests.java
index a9f32886f7927..b6abe5662e474 100644
--- a/buildSrc/src/test/java/org/opensearch/gradle/VersionTests.java
+++ b/buildSrc/src/test/java/org/opensearch/gradle/VersionTests.java
@@ -65,7 +65,7 @@ public void testRelaxedVersionParsing() {
public void testCompareWithStringVersions() {
// 1.10.2 is now rebased to OpenSearch version; so this needs to report
- assertTrue("OpenSearch 1.10.20 is not interpreted as after Legacy 3.0.0", Version.fromString("1.10.20").after("3.0.0"));
+ assertTrue("OpenSearch 1.10.20 is not interpreted as after Legacy 6.0.0", Version.fromString("1.10.20").after("6.0.0"));
assertTrue(
"7.0.0-alpha1 should be equal to 7.0.0-alpha1",
Version.fromString("7.0.0-alpha1").equals(Version.fromString("7.0.0-alpha1"))
diff --git a/buildSrc/src/testKit/thirdPartyAudit/build.gradle b/buildSrc/src/testKit/thirdPartyAudit/build.gradle
index 41e699db94dcf..2c86d28cf0206 100644
--- a/buildSrc/src/testKit/thirdPartyAudit/build.gradle
+++ b/buildSrc/src/testKit/thirdPartyAudit/build.gradle
@@ -40,7 +40,7 @@ repositories {
}
dependencies {
- forbiddenApisCliJar 'de.thetaphi:forbiddenapis:3.2'
+ forbiddenApisCliJar 'de.thetaphi:forbiddenapis:3.3'
jdkJarHell 'org.opensearch:opensearch-core:current'
compileOnly "org.${project.properties.compileOnlyGroup}:${project.properties.compileOnlyVersion}"
implementation "org.${project.properties.compileGroup}:${project.properties.compileVersion}"
diff --git a/buildSrc/version.properties b/buildSrc/version.properties
index b5e14cd24bd93..7ae3bfaa19b5a 100644
--- a/buildSrc/version.properties
+++ b/buildSrc/version.properties
@@ -1,5 +1,5 @@
-opensearch = 2.0.0
-lucene = 9.1.0-snapshot-ea989fe8f30
+opensearch = 3.0.0
+lucene = 9.1.0
bundled_jdk_vendor = adoptium
bundled_jdk = 17.0.2+8
@@ -10,6 +10,7 @@ bundled_jdk = 17.0.2+8
spatial4j = 0.7
jts = 1.15.0
jackson = 2.13.2
+jackson_databind = 2.13.2.2
snakeyaml = 1.26
icu4j = 70.1
supercsv = 2.4.0
@@ -22,6 +23,13 @@ jna = 5.5.0
netty = 4.1.73.Final
joda = 2.10.12
+# client dependencies
+httpclient = 4.5.13
+httpcore = 4.4.12
+httpasyncclient = 4.1.4
+commonslogging = 1.2
+commonscodec = 1.13
+
# when updating this version, you need to ensure compatibility with:
# - plugins/ingest-attachment (transitive dependency, check the upstream POM)
# - distribution/tools/plugin-cli
@@ -29,11 +37,6 @@ bouncycastle=1.70
# test dependencies
randomizedrunner = 2.7.1
junit = 4.13.2
-httpclient = 4.5.13
-httpcore = 4.4.12
-httpasyncclient = 4.1.4
-commonslogging = 1.1.3
-commonscodec = 1.13
hamcrest = 2.1
mockito = 4.3.1
objenesis = 3.2
diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/tasks/TaskInfo.java b/client/rest-high-level/src/main/java/org/opensearch/client/tasks/TaskInfo.java
index de8374b283ea6..375f004dc3052 100644
--- a/client/rest-high-level/src/main/java/org/opensearch/client/tasks/TaskInfo.java
+++ b/client/rest-high-level/src/main/java/org/opensearch/client/tasks/TaskInfo.java
@@ -57,6 +57,7 @@ public class TaskInfo {
private TaskId parentTaskId;
private final Map status = new HashMap<>();
private final Map headers = new HashMap<>();
+ private final Map resourceStats = new HashMap<>();
public TaskInfo(TaskId taskId) {
this.taskId = taskId;
@@ -150,6 +151,14 @@ public Map getStatus() {
return status;
}
+ void setResourceStats(Map resourceStats) {
+ this.resourceStats.putAll(resourceStats);
+ }
+
+ public Map getResourceStats() {
+ return resourceStats;
+ }
+
private void noOpParse(Object s) {}
public static final ObjectParser.NamedObjectParser PARSER;
@@ -170,6 +179,7 @@ private void noOpParse(Object s) {}
parser.declareBoolean(TaskInfo::setCancelled, new ParseField("cancelled"));
parser.declareString(TaskInfo::setParentTaskId, new ParseField("parent_task_id"));
parser.declareObject(TaskInfo::setHeaders, (p, c) -> p.mapStrings(), new ParseField("headers"));
+ parser.declareObject(TaskInfo::setResourceStats, (p, c) -> p.map(), new ParseField("resource_stats"));
PARSER = (XContentParser p, Void v, String name) -> parser.parse(p, new TaskInfo(new TaskId(name)), null);
}
@@ -188,7 +198,8 @@ && isCancelled() == taskInfo.isCancelled()
&& Objects.equals(getDescription(), taskInfo.getDescription())
&& Objects.equals(getParentTaskId(), taskInfo.getParentTaskId())
&& Objects.equals(status, taskInfo.status)
- && Objects.equals(getHeaders(), taskInfo.getHeaders());
+ && Objects.equals(getHeaders(), taskInfo.getHeaders())
+ && Objects.equals(getResourceStats(), taskInfo.getResourceStats());
}
@Override
@@ -204,7 +215,8 @@ public int hashCode() {
isCancelled(),
getParentTaskId(),
status,
- getHeaders()
+ getHeaders(),
+ getResourceStats()
);
}
@@ -236,6 +248,8 @@ public String toString() {
+ status
+ ", headers="
+ headers
+ + ", resource_stats="
+ + resourceStats
+ '}';
}
}
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientTests.java
index 73ce7d1b2b794..7766fa76d5cfe 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientTests.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientTests.java
@@ -841,7 +841,6 @@ public void testApiNamingConventions() throws Exception {
"create",
"get_script_context",
"get_script_languages",
- "indices.exists_type",
"indices.get_upgrade",
"indices.put_alias",
"render_search_template",
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/core/MainResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/core/MainResponseTests.java
index cabb125a739b7..cd759aa62eaf1 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/core/MainResponseTests.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/core/MainResponseTests.java
@@ -33,7 +33,6 @@
package org.opensearch.client.core;
import org.opensearch.Build;
-import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.client.AbstractResponseTestCase;
import org.opensearch.cluster.ClusterName;
@@ -53,7 +52,7 @@ protected org.opensearch.action.main.MainResponse createServerTestInstance(XCont
ClusterName clusterName = new ClusterName(randomAlphaOfLength(10));
String nodeName = randomAlphaOfLength(10);
final String date = new Date(randomNonNegativeLong()).toString();
- Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Build build = new Build(
Build.Type.UNKNOWN,
randomAlphaOfLength(8),
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/core/tasks/GetTaskResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/core/tasks/GetTaskResponseTests.java
index 403e295303784..07ee0bedd4777 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/core/tasks/GetTaskResponseTests.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/core/tasks/GetTaskResponseTests.java
@@ -38,6 +38,8 @@
import org.opensearch.common.xcontent.ToXContent;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.tasks.RawTaskStatus;
+import org.opensearch.tasks.TaskResourceStats;
+import org.opensearch.tasks.TaskResourceUsage;
import org.opensearch.tasks.Task;
import org.opensearch.tasks.TaskId;
import org.opensearch.tasks.TaskInfo;
@@ -45,6 +47,7 @@
import java.io.IOException;
import java.util.Collections;
+import java.util.HashMap;
import java.util.Map;
import static org.opensearch.test.AbstractXContentTestCase.xContentTester;
@@ -57,7 +60,7 @@ public void testFromXContent() throws IOException {
)
.assertEqualsConsumer(this::assertEqualInstances)
.assertToXContentEquivalence(true)
- .randomFieldsExcludeFilter(field -> field.endsWith("headers") || field.endsWith("status"))
+ .randomFieldsExcludeFilter(field -> field.endsWith("headers") || field.endsWith("status") || field.contains("resource_stats"))
.test();
}
@@ -106,7 +109,8 @@ static TaskInfo randomTaskInfo() {
cancellable,
cancelled,
parentTaskId,
- headers
+ headers,
+ randomResourceStats()
);
}
@@ -127,4 +131,14 @@ private static RawTaskStatus randomRawTaskStatus() {
throw new IllegalStateException(e);
}
}
+
+ private static TaskResourceStats randomResourceStats() {
+ return randomBoolean() ? null : new TaskResourceStats(new HashMap() {
+ {
+ for (int i = 0; i < randomInt(5); i++) {
+ put(randomAlphaOfLength(5), new TaskResourceUsage(randomNonNegativeLong(), randomNonNegativeLong()));
+ }
+ }
+ });
+ }
}
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/tasks/CancelTasksResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/tasks/CancelTasksResponseTests.java
index 552a3712eea40..26be36b7162f6 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/tasks/CancelTasksResponseTests.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/tasks/CancelTasksResponseTests.java
@@ -96,7 +96,8 @@ protected CancelTasksResponseTests.ByNodeCancelTasksResponse createServerTestIns
cancellable,
cancelled,
new TaskId("node1", randomLong()),
- Collections.singletonMap("x-header-of", "some-value")
+ Collections.singletonMap("x-header-of", "some-value"),
+ null
)
);
}
diff --git a/client/rest/build.gradle b/client/rest/build.gradle
index 5c1252061443a..01c186ed83fc2 100644
--- a/client/rest/build.gradle
+++ b/client/rest/build.gradle
@@ -89,7 +89,6 @@ thirdPartyAudit.ignoreMissingClasses(
'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy',
'org.apache.log.Logger',
- 'org.apache.log4j.Category',
'org.apache.log4j.Level',
'org.apache.log4j.Logger',
'org.apache.log4j.Priority',
diff --git a/client/rest/licenses/commons-logging-1.1.3.jar.sha1 b/client/rest/licenses/commons-logging-1.1.3.jar.sha1
deleted file mode 100644
index 5b8f029e58293..0000000000000
--- a/client/rest/licenses/commons-logging-1.1.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f
\ No newline at end of file
diff --git a/client/rest/licenses/commons-logging-1.2.jar.sha1 b/client/rest/licenses/commons-logging-1.2.jar.sha1
new file mode 100644
index 0000000000000..f40f0242448e8
--- /dev/null
+++ b/client/rest/licenses/commons-logging-1.2.jar.sha1
@@ -0,0 +1 @@
+4bfc12adfe4842bf07b657f0369c4cb522955686
\ No newline at end of file
diff --git a/client/sniffer/build.gradle b/client/sniffer/build.gradle
index bc4be1dd153e8..b7cb0d87c02d9 100644
--- a/client/sniffer/build.gradle
+++ b/client/sniffer/build.gradle
@@ -88,7 +88,6 @@ thirdPartyAudit.ignoreMissingClasses(
'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy',
'org.apache.log.Logger',
- 'org.apache.log4j.Category',
'org.apache.log4j.Level',
'org.apache.log4j.Logger',
'org.apache.log4j.Priority',
diff --git a/client/sniffer/licenses/commons-logging-1.1.3.jar.sha1 b/client/sniffer/licenses/commons-logging-1.1.3.jar.sha1
deleted file mode 100644
index 5b8f029e58293..0000000000000
--- a/client/sniffer/licenses/commons-logging-1.1.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f
\ No newline at end of file
diff --git a/client/sniffer/licenses/commons-logging-1.2.jar.sha1 b/client/sniffer/licenses/commons-logging-1.2.jar.sha1
new file mode 100644
index 0000000000000..f40f0242448e8
--- /dev/null
+++ b/client/sniffer/licenses/commons-logging-1.2.jar.sha1
@@ -0,0 +1 @@
+4bfc12adfe4842bf07b657f0369c4cb522955686
\ No newline at end of file
diff --git a/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/AddFileKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/AddFileKeyStoreCommand.java
index 56c04e019bd5d..b948be24350f4 100644
--- a/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/AddFileKeyStoreCommand.java
+++ b/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/AddFileKeyStoreCommand.java
@@ -95,7 +95,7 @@ protected void executeCommand(Terminal terminal, OptionSet options, Environment
keyStore.setFile(setting, Files.readAllBytes(file));
}
- keyStore.save(env.configFile(), getKeyStorePassword().getChars());
+ keyStore.save(env.configDir(), getKeyStorePassword().getChars());
}
@SuppressForbidden(reason = "file arg for cli")
diff --git a/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/AddStringKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/AddStringKeyStoreCommand.java
index 88ed9f74fb690..a8bc1dff8838f 100644
--- a/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/AddStringKeyStoreCommand.java
+++ b/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/AddStringKeyStoreCommand.java
@@ -121,7 +121,7 @@ protected void executeCommand(Terminal terminal, OptionSet options, Environment
}
}
- keyStore.save(env.configFile(), getKeyStorePassword().getChars());
+ keyStore.save(env.configDir(), getKeyStorePassword().getChars());
}
}
diff --git a/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/CreateKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/CreateKeyStoreCommand.java
index b96dd46236b87..dbc5d897417ee 100644
--- a/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/CreateKeyStoreCommand.java
+++ b/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/CreateKeyStoreCommand.java
@@ -59,7 +59,7 @@ class CreateKeyStoreCommand extends KeyStoreAwareCommand {
@Override
protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception {
try (SecureString password = options.has(passwordOption) ? readPassword(terminal, true) : new SecureString(new char[0])) {
- Path keystoreFile = KeyStoreWrapper.keystorePath(env.configFile());
+ Path keystoreFile = KeyStoreWrapper.keystorePath(env.configDir());
if (Files.exists(keystoreFile)) {
if (terminal.promptYesNo("An opensearch keystore already exists. Overwrite?", false) == false) {
terminal.println("Exiting without creating keystore.");
@@ -67,8 +67,8 @@ protected void execute(Terminal terminal, OptionSet options, Environment env) th
}
}
KeyStoreWrapper keystore = KeyStoreWrapper.create();
- keystore.save(env.configFile(), password.getChars());
- terminal.println("Created opensearch keystore in " + KeyStoreWrapper.keystorePath(env.configFile()));
+ keystore.save(env.configDir(), password.getChars());
+ terminal.println("Created opensearch keystore in " + KeyStoreWrapper.keystorePath(env.configDir()));
} catch (SecurityException e) {
throw new UserException(ExitCodes.IO_ERROR, "Error creating the opensearch keystore.");
}
diff --git a/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/RemoveSettingKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/RemoveSettingKeyStoreCommand.java
index 54f0c9324f5c0..c57959117af15 100644
--- a/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/RemoveSettingKeyStoreCommand.java
+++ b/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/RemoveSettingKeyStoreCommand.java
@@ -66,6 +66,6 @@ protected void executeCommand(Terminal terminal, OptionSet options, Environment
}
keyStore.remove(setting);
}
- keyStore.save(env.configFile(), getKeyStorePassword().getChars());
+ keyStore.save(env.configDir(), getKeyStorePassword().getChars());
}
}
diff --git a/distribution/tools/keystore-cli/src/test/java/org/opensearch/bootstrap/BootstrapTests.java b/distribution/tools/keystore-cli/src/test/java/org/opensearch/bootstrap/BootstrapTests.java
index 5b280f210af03..58beba16820c6 100644
--- a/distribution/tools/keystore-cli/src/test/java/org/opensearch/bootstrap/BootstrapTests.java
+++ b/distribution/tools/keystore-cli/src/test/java/org/opensearch/bootstrap/BootstrapTests.java
@@ -71,7 +71,7 @@ public void setupEnv() throws IOException {
}
public void testLoadSecureSettings() throws Exception {
- final Path configPath = env.configFile();
+ final Path configPath = env.configDir();
final SecureString seed;
try (KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create()) {
seed = KeyStoreWrapper.SEED_SETTING.get(Settings.builder().setSecureSettings(keyStoreWrapper).build());
diff --git a/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/AddFileKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/AddFileKeyStoreCommandTests.java
index 4cbf54fd11bf0..b3cc7e10fdf8c 100644
--- a/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/AddFileKeyStoreCommandTests.java
+++ b/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/AddFileKeyStoreCommandTests.java
@@ -66,14 +66,14 @@ private Path createRandomFile() throws IOException {
for (int i = 0; i < length; ++i) {
bytes[i] = randomByte();
}
- Path file = env.configFile().resolve(randomAlphaOfLength(16));
+ Path file = env.configDir().resolve(randomAlphaOfLength(16));
Files.write(file, bytes);
return file;
}
private void addFile(KeyStoreWrapper keystore, String setting, Path file, String password) throws Exception {
keystore.setFile(setting, Files.readAllBytes(file));
- keystore.save(env.configFile(), password.toCharArray());
+ keystore.save(env.configDir(), password.toCharArray());
}
public void testMissingCreateWithEmptyPasswordWhenPrompted() throws Exception {
@@ -95,7 +95,7 @@ public void testMissingNoCreate() throws Exception {
terminal.addSecretInput(randomFrom("", "keystorepassword"));
terminal.addTextInput("n"); // explicit no
execute("foo");
- assertNull(KeyStoreWrapper.load(env.configFile()));
+ assertNull(KeyStoreWrapper.load(env.configDir()));
}
public void testOverwritePromptDefault() throws Exception {
diff --git a/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/AddStringKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/AddStringKeyStoreCommandTests.java
index b80e60925c2a6..059c74ed8971c 100644
--- a/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/AddStringKeyStoreCommandTests.java
+++ b/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/AddStringKeyStoreCommandTests.java
@@ -101,7 +101,7 @@ public void testMissingPromptCreateWithoutPasswordWithoutPromptIfForced() throws
public void testMissingNoCreate() throws Exception {
terminal.addTextInput("n"); // explicit no
execute("foo");
- assertNull(KeyStoreWrapper.load(env.configFile()));
+ assertNull(KeyStoreWrapper.load(env.configDir()));
}
public void testOverwritePromptDefault() throws Exception {
@@ -161,7 +161,7 @@ public void testForceNonExistent() throws Exception {
public void testPromptForValue() throws Exception {
String password = "keystorepassword";
- KeyStoreWrapper.create().save(env.configFile(), password.toCharArray());
+ KeyStoreWrapper.create().save(env.configDir(), password.toCharArray());
terminal.addSecretInput(password);
terminal.addSecretInput("secret value");
execute("foo");
@@ -170,7 +170,7 @@ public void testPromptForValue() throws Exception {
public void testPromptForMultipleValues() throws Exception {
final String password = "keystorepassword";
- KeyStoreWrapper.create().save(env.configFile(), password.toCharArray());
+ KeyStoreWrapper.create().save(env.configDir(), password.toCharArray());
terminal.addSecretInput(password);
terminal.addSecretInput("bar1");
terminal.addSecretInput("bar2");
@@ -183,7 +183,7 @@ public void testPromptForMultipleValues() throws Exception {
public void testStdinShort() throws Exception {
String password = "keystorepassword";
- KeyStoreWrapper.create().save(env.configFile(), password.toCharArray());
+ KeyStoreWrapper.create().save(env.configDir(), password.toCharArray());
terminal.addSecretInput(password);
setInput("secret value 1");
execute("-x", "foo");
@@ -192,7 +192,7 @@ public void testStdinShort() throws Exception {
public void testStdinLong() throws Exception {
String password = "keystorepassword";
- KeyStoreWrapper.create().save(env.configFile(), password.toCharArray());
+ KeyStoreWrapper.create().save(env.configDir(), password.toCharArray());
terminal.addSecretInput(password);
setInput("secret value 2");
execute("--stdin", "foo");
@@ -201,7 +201,7 @@ public void testStdinLong() throws Exception {
public void testStdinNoInput() throws Exception {
String password = "keystorepassword";
- KeyStoreWrapper.create().save(env.configFile(), password.toCharArray());
+ KeyStoreWrapper.create().save(env.configDir(), password.toCharArray());
terminal.addSecretInput(password);
setInput("");
execute("-x", "foo");
@@ -210,7 +210,7 @@ public void testStdinNoInput() throws Exception {
public void testStdinInputWithLineBreaks() throws Exception {
String password = "keystorepassword";
- KeyStoreWrapper.create().save(env.configFile(), password.toCharArray());
+ KeyStoreWrapper.create().save(env.configDir(), password.toCharArray());
terminal.addSecretInput(password);
setInput("Typedthisandhitenter\n");
execute("-x", "foo");
@@ -219,7 +219,7 @@ public void testStdinInputWithLineBreaks() throws Exception {
public void testStdinInputWithCarriageReturn() throws Exception {
String password = "keystorepassword";
- KeyStoreWrapper.create().save(env.configFile(), password.toCharArray());
+ KeyStoreWrapper.create().save(env.configDir(), password.toCharArray());
terminal.addSecretInput(password);
setInput("Typedthisandhitenter\r");
execute("-x", "foo");
@@ -228,7 +228,7 @@ public void testStdinInputWithCarriageReturn() throws Exception {
public void testStdinWithMultipleValues() throws Exception {
final String password = "keystorepassword";
- KeyStoreWrapper.create().save(env.configFile(), password.toCharArray());
+ KeyStoreWrapper.create().save(env.configDir(), password.toCharArray());
terminal.addSecretInput(password);
setInput("bar1\nbar2\nbar3");
execute(randomFrom("-x", "--stdin"), "foo1", "foo2", "foo3");
@@ -239,7 +239,7 @@ public void testStdinWithMultipleValues() throws Exception {
public void testAddUtf8String() throws Exception {
String password = "keystorepassword";
- KeyStoreWrapper.create().save(env.configFile(), password.toCharArray());
+ KeyStoreWrapper.create().save(env.configDir(), password.toCharArray());
terminal.addSecretInput(password);
final int stringSize = randomIntBetween(8, 16);
try (CharArrayWriter secretChars = new CharArrayWriter(stringSize)) {
diff --git a/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/CreateKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/CreateKeyStoreCommandTests.java
index 7d07208de766e..11bfc26e2425c 100644
--- a/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/CreateKeyStoreCommandTests.java
+++ b/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/CreateKeyStoreCommandTests.java
@@ -67,7 +67,7 @@ public void testNotMatchingPasswords() throws Exception {
public void testDefaultNotPromptForPassword() throws Exception {
execute();
- Path configDir = env.configFile();
+ Path configDir = env.configDir();
assertNotNull(KeyStoreWrapper.load(configDir));
}
@@ -76,7 +76,7 @@ public void testPosix() throws Exception {
terminal.addSecretInput(password);
terminal.addSecretInput(password);
execute();
- Path configDir = env.configFile();
+ Path configDir = env.configDir();
assertNotNull(KeyStoreWrapper.load(configDir));
}
@@ -86,13 +86,13 @@ public void testNotPosix() throws Exception {
terminal.addSecretInput(password);
env = setupEnv(false, fileSystems);
execute();
- Path configDir = env.configFile();
+ Path configDir = env.configDir();
assertNotNull(KeyStoreWrapper.load(configDir));
}
public void testOverwrite() throws Exception {
String password = randomFrom("", "keystorepassword");
- Path keystoreFile = KeyStoreWrapper.keystorePath(env.configFile());
+ Path keystoreFile = KeyStoreWrapper.keystorePath(env.configDir());
byte[] content = "not a keystore".getBytes(StandardCharsets.UTF_8);
Files.write(keystoreFile, content);
@@ -108,6 +108,6 @@ public void testOverwrite() throws Exception {
terminal.addSecretInput(password);
terminal.addSecretInput(password);
execute();
- assertNotNull(KeyStoreWrapper.load(env.configFile()));
+ assertNotNull(KeyStoreWrapper.load(env.configDir()));
}
}
diff --git a/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreCommandTestCase.java b/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreCommandTestCase.java
index aa31e07368fc2..32618923498ff 100644
--- a/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreCommandTestCase.java
+++ b/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreCommandTestCase.java
@@ -92,12 +92,12 @@ KeyStoreWrapper createKeystore(String password, String... settings) throws Excep
for (int i = 0; i < settings.length; i += 2) {
keystore.setString(settings[i], settings[i + 1].toCharArray());
}
- keystore.save(env.configFile(), password.toCharArray());
+ keystore.save(env.configDir(), password.toCharArray());
return keystore;
}
KeyStoreWrapper loadKeystore(String password) throws Exception {
- KeyStoreWrapper keystore = KeyStoreWrapper.load(env.configFile());
+ KeyStoreWrapper keystore = KeyStoreWrapper.load(env.configDir());
keystore.decrypt(password.toCharArray());
return keystore;
}
diff --git a/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreWrapperTests.java b/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreWrapperTests.java
index 2688e7637c9ba..70046c567b00e 100644
--- a/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreWrapperTests.java
+++ b/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreWrapperTests.java
@@ -103,8 +103,8 @@ public void testFileSettingExhaustiveBytes() throws Exception {
bytes[i] = (byte) i;
}
keystore.setFile("foo", bytes);
- keystore.save(env.configFile(), new char[0]);
- keystore = KeyStoreWrapper.load(env.configFile());
+ keystore.save(env.configDir(), new char[0]);
+ keystore = KeyStoreWrapper.load(env.configDir());
keystore.decrypt(new char[0]);
try (InputStream stream = keystore.getFile("foo")) {
for (int i = 0; i < 256; ++i) {
@@ -125,11 +125,11 @@ public void testCreate() throws Exception {
public void testDecryptKeyStoreWithWrongPassword() throws Exception {
KeyStoreWrapper keystore = KeyStoreWrapper.create();
- keystore.save(env.configFile(), new char[0]);
- final KeyStoreWrapper loadedkeystore = KeyStoreWrapper.load(env.configFile());
+ keystore.save(env.configDir(), new char[0]);
+ final KeyStoreWrapper loadedKeystore = KeyStoreWrapper.load(env.configDir());
final SecurityException exception = expectThrows(
SecurityException.class,
- () -> loadedkeystore.decrypt(new char[] { 'i', 'n', 'v', 'a', 'l', 'i', 'd' })
+ () -> loadedKeystore.decrypt(new char[] { 'i', 'n', 'v', 'a', 'l', 'i', 'd' })
);
if (inFipsJvm()) {
assertThat(
@@ -183,17 +183,17 @@ public void testValueSHA256Digest() throws Exception {
public void testUpgradeNoop() throws Exception {
KeyStoreWrapper keystore = KeyStoreWrapper.create();
SecureString seed = keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey());
- keystore.save(env.configFile(), new char[0]);
+ keystore.save(env.configDir(), new char[0]);
// upgrade does not overwrite seed
- KeyStoreWrapper.upgrade(keystore, env.configFile(), new char[0]);
+ KeyStoreWrapper.upgrade(keystore, env.configDir(), new char[0]);
assertEquals(seed.toString(), keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()).toString());
- keystore = KeyStoreWrapper.load(env.configFile());
+ keystore = KeyStoreWrapper.load(env.configDir());
keystore.decrypt(new char[0]);
assertEquals(seed.toString(), keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()).toString());
}
public void testFailWhenCannotConsumeSecretStream() throws Exception {
- Path configDir = env.configFile();
+ Path configDir = env.configDir();
NIOFSDirectory directory = new NIOFSDirectory(configDir);
try (IndexOutput indexOutput = directory.createOutput("opensearch.keystore", IOContext.DEFAULT)) {
CodecUtil.writeHeader(indexOutput, "opensearch.keystore", 3);
@@ -221,7 +221,7 @@ public void testFailWhenCannotConsumeSecretStream() throws Exception {
}
public void testFailWhenCannotConsumeEncryptedBytesStream() throws Exception {
- Path configDir = env.configFile();
+ Path configDir = env.configDir();
NIOFSDirectory directory = new NIOFSDirectory(configDir);
try (IndexOutput indexOutput = directory.createOutput("opensearch.keystore", IOContext.DEFAULT)) {
CodecUtil.writeHeader(indexOutput, "opensearch.keystore", 3);
@@ -250,7 +250,7 @@ public void testFailWhenCannotConsumeEncryptedBytesStream() throws Exception {
}
public void testFailWhenSecretStreamNotConsumed() throws Exception {
- Path configDir = env.configFile();
+ Path configDir = env.configDir();
NIOFSDirectory directory = new NIOFSDirectory(configDir);
try (IndexOutput indexOutput = directory.createOutput("opensearch.keystore", IOContext.DEFAULT)) {
CodecUtil.writeHeader(indexOutput, "opensearch.keystore", 3);
@@ -277,7 +277,7 @@ public void testFailWhenSecretStreamNotConsumed() throws Exception {
}
public void testFailWhenEncryptedBytesStreamIsNotConsumed() throws Exception {
- Path configDir = env.configFile();
+ Path configDir = env.configDir();
NIOFSDirectory directory = new NIOFSDirectory(configDir);
try (IndexOutput indexOutput = directory.createOutput("opensearch.keystore", IOContext.DEFAULT)) {
CodecUtil.writeHeader(indexOutput, "opensearch.keystore", 3);
@@ -343,11 +343,11 @@ private void possiblyAlterEncryptedBytes(
public void testUpgradeAddsSeed() throws Exception {
KeyStoreWrapper keystore = KeyStoreWrapper.create();
keystore.remove(KeyStoreWrapper.SEED_SETTING.getKey());
- keystore.save(env.configFile(), new char[0]);
- KeyStoreWrapper.upgrade(keystore, env.configFile(), new char[0]);
+ keystore.save(env.configDir(), new char[0]);
+ KeyStoreWrapper.upgrade(keystore, env.configDir(), new char[0]);
SecureString seed = keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey());
assertNotNull(seed);
- keystore = KeyStoreWrapper.load(env.configFile());
+ keystore = KeyStoreWrapper.load(env.configDir());
keystore.decrypt(new char[0]);
assertEquals(seed.toString(), keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()).toString());
}
@@ -364,7 +364,7 @@ public void testIllegalSettingName() throws Exception {
public void testBackcompatV1() throws Exception {
assumeFalse("Can't run in a FIPS JVM as PBE is not available", inFipsJvm());
- Path configDir = env.configFile();
+ Path configDir = env.configDir();
NIOFSDirectory directory = new NIOFSDirectory(configDir);
try (IndexOutput output = EndiannessReverserUtil.createOutput(directory, "opensearch.keystore", IOContext.DEFAULT)) {
CodecUtil.writeHeader(output, "opensearch.keystore", 1);
@@ -395,7 +395,7 @@ public void testBackcompatV1() throws Exception {
public void testBackcompatV2() throws Exception {
assumeFalse("Can't run in a FIPS JVM as PBE is not available", inFipsJvm());
- Path configDir = env.configFile();
+ Path configDir = env.configDir();
NIOFSDirectory directory = new NIOFSDirectory(configDir);
byte[] fileBytes = new byte[20];
random().nextBytes(fileBytes);
@@ -457,10 +457,10 @@ public void testStringAndFileDistinction() throws Exception {
final Path temp = createTempDir();
Files.write(temp.resolve("file_setting"), "file_value".getBytes(StandardCharsets.UTF_8));
wrapper.setFile("file_setting", Files.readAllBytes(temp.resolve("file_setting")));
- wrapper.save(env.configFile(), new char[0]);
+ wrapper.save(env.configDir(), new char[0]);
wrapper.close();
- final KeyStoreWrapper afterSave = KeyStoreWrapper.load(env.configFile());
+ final KeyStoreWrapper afterSave = KeyStoreWrapper.load(env.configDir());
assertNotNull(afterSave);
afterSave.decrypt(new char[0]);
assertThat(afterSave.getSettingNames(), equalTo(new HashSet<>(Arrays.asList("keystore.seed", "string_setting", "file_setting"))));
diff --git a/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/UpgradeKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/UpgradeKeyStoreCommandTests.java
index 8dd855ae6cf49..0fda83282c1f9 100644
--- a/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/UpgradeKeyStoreCommandTests.java
+++ b/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/UpgradeKeyStoreCommandTests.java
@@ -63,7 +63,7 @@ protected Environment createEnv(final Map settings) {
@AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/468")
public void testKeystoreUpgrade() throws Exception {
- final Path keystore = KeyStoreWrapper.keystorePath(env.configFile());
+ final Path keystore = KeyStoreWrapper.keystorePath(env.configDir());
try (
InputStream is = KeyStoreWrapperTests.class.getResourceAsStream("/format-v3-opensearch.keystore");
OutputStream os = Files.newOutputStream(keystore)
@@ -74,12 +74,12 @@ public void testKeystoreUpgrade() throws Exception {
os.write(buffer, 0, read);
}
}
- try (KeyStoreWrapper beforeUpgrade = KeyStoreWrapper.load(env.configFile())) {
+ try (KeyStoreWrapper beforeUpgrade = KeyStoreWrapper.load(env.configDir())) {
assertNotNull(beforeUpgrade);
assertThat(beforeUpgrade.getFormatVersion(), equalTo(3));
}
execute();
- try (KeyStoreWrapper afterUpgrade = KeyStoreWrapper.load(env.configFile())) {
+ try (KeyStoreWrapper afterUpgrade = KeyStoreWrapper.load(env.configDir())) {
assertNotNull(afterUpgrade);
assertThat(afterUpgrade.getFormatVersion(), equalTo(KeyStoreWrapper.FORMAT_VERSION));
afterUpgrade.decrypt(new char[0]);
@@ -89,7 +89,7 @@ public void testKeystoreUpgrade() throws Exception {
public void testKeystoreDoesNotExist() {
final UserException e = expectThrows(UserException.class, this::execute);
- assertThat(e, hasToString(containsString("keystore not found at [" + KeyStoreWrapper.keystorePath(env.configFile()) + "]")));
+ assertThat(e, hasToString(containsString("keystore not found at [" + KeyStoreWrapper.keystorePath(env.configDir()) + "]")));
}
}
diff --git a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java
index 8acf137043a92..86b44799eba68 100644
--- a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java
+++ b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java
@@ -269,8 +269,8 @@ void execute(Terminal terminal, List pluginIds, boolean isBatch, Environ
final List deleteOnFailure = new ArrayList<>();
deleteOnFailures.put(pluginId, deleteOnFailure);
- final Path pluginZip = download(terminal, pluginId, env.tmpFile(), isBatch);
- final Path extractedZip = unzip(pluginZip, env.pluginsFile());
+ final Path pluginZip = download(terminal, pluginId, env.tmpDir(), isBatch);
+ final Path extractedZip = unzip(pluginZip, env.pluginsDir());
deleteOnFailure.add(extractedZip);
final PluginInfo pluginInfo = installPlugin(terminal, isBatch, extractedZip, env, deleteOnFailure);
terminal.println("-> Installed " + pluginInfo.getName() + " with folder name " + pluginInfo.getTargetFolderName());
@@ -815,14 +815,14 @@ private PluginInfo loadPluginInfo(Terminal terminal, Path pluginRoot, Environmen
PluginsService.verifyCompatibility(info);
// checking for existing version of the plugin
- verifyPluginName(env.pluginsFile(), info.getName());
+ verifyPluginName(env.pluginsDir(), info.getName());
- PluginsService.checkForFailedPluginRemovals(env.pluginsFile());
+ PluginsService.checkForFailedPluginRemovals(env.pluginsDir());
terminal.println(VERBOSE, info.toString());
// check for jar hell before any copying
- jarHellCheck(info, pluginRoot, env.pluginsFile(), env.modulesFile());
+ jarHellCheck(info, pluginRoot, env.pluginsDir(), env.modulesDir());
return info;
}
@@ -872,21 +872,21 @@ private PluginInfo installPlugin(Terminal terminal, boolean isBatch, Path tmpRoo
Path policy = tmpRoot.resolve(PluginInfo.OPENSEARCH_PLUGIN_POLICY);
final Set permissions;
if (Files.exists(policy)) {
- permissions = PluginSecurity.parsePermissions(policy, env.tmpFile());
+ permissions = PluginSecurity.parsePermissions(policy, env.tmpDir());
} else {
permissions = Collections.emptySet();
}
PluginSecurity.confirmPolicyExceptions(terminal, permissions, isBatch);
String targetFolderName = info.getTargetFolderName();
- final Path destination = env.pluginsFile().resolve(targetFolderName);
+ final Path destination = env.pluginsDir().resolve(targetFolderName);
deleteOnFailure.add(destination);
installPluginSupportFiles(
info,
tmpRoot,
- env.binFile().resolve(targetFolderName),
- env.configFile().resolve(targetFolderName),
+ env.binDir().resolve(targetFolderName),
+ env.configDir().resolve(targetFolderName),
deleteOnFailure
);
movePlugin(tmpRoot, destination);
diff --git a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ListPluginsCommand.java b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ListPluginsCommand.java
index ecf702c4675de..d269603656114 100644
--- a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ListPluginsCommand.java
+++ b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ListPluginsCommand.java
@@ -57,13 +57,13 @@ class ListPluginsCommand extends EnvironmentAwareCommand {
@Override
protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception {
- if (Files.exists(env.pluginsFile()) == false) {
- throw new IOException("Plugins directory missing: " + env.pluginsFile());
+ if (Files.exists(env.pluginsDir()) == false) {
+ throw new IOException("Plugins directory missing: " + env.pluginsDir());
}
- terminal.println(Terminal.Verbosity.VERBOSE, "Plugins directory: " + env.pluginsFile());
+ terminal.println(Terminal.Verbosity.VERBOSE, "Plugins directory: " + env.pluginsDir());
final List plugins = new ArrayList<>();
- try (DirectoryStream paths = Files.newDirectoryStream(env.pluginsFile())) {
+ try (DirectoryStream paths = Files.newDirectoryStream(env.pluginsDir())) {
for (Path plugin : paths) {
plugins.add(plugin);
}
@@ -75,7 +75,7 @@ protected void execute(Terminal terminal, OptionSet options, Environment env) th
}
private void printPlugin(Environment env, Terminal terminal, Path plugin, String prefix) throws IOException {
- PluginInfo info = PluginInfo.readFromProperties(env.pluginsFile().resolve(plugin));
+ PluginInfo info = PluginInfo.readFromProperties(env.pluginsDir().resolve(plugin));
terminal.println(Terminal.Verbosity.SILENT, prefix + info.getName());
terminal.println(Terminal.Verbosity.VERBOSE, info.toString(prefix));
if (info.getOpenSearchVersion().equals(Version.CURRENT) == false) {
diff --git a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/RemovePluginCommand.java b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/RemovePluginCommand.java
index fb567e6609ba9..8fc98e5e14607 100644
--- a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/RemovePluginCommand.java
+++ b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/RemovePluginCommand.java
@@ -99,7 +99,7 @@ void execute(Terminal terminal, Environment env, String pluginName, boolean purg
// first make sure nothing extends this plugin
List usedBy = new ArrayList<>();
- Set bundles = PluginsService.getPluginBundles(env.pluginsFile());
+ Set bundles = PluginsService.getPluginBundles(env.pluginsDir());
for (PluginsService.Bundle bundle : bundles) {
for (String extendedPlugin : bundle.plugin.getExtendedPlugins()) {
if (extendedPlugin.equals(pluginName)) {
@@ -114,9 +114,9 @@ void execute(Terminal terminal, Environment env, String pluginName, boolean purg
);
}
- Path pluginDir = env.pluginsFile().resolve(pluginName);
- Path pluginConfigDir = env.configFile().resolve(pluginName);
- Path removing = env.pluginsFile().resolve(".removing-" + pluginName);
+ Path pluginDir = env.pluginsDir().resolve(pluginName);
+ Path pluginConfigDir = env.configDir().resolve(pluginName);
+ Path removing = env.pluginsDir().resolve(".removing-" + pluginName);
/*
* If the plugin directory is not found with the plugin name, scan the list of all installed plugins
@@ -124,9 +124,9 @@ void execute(Terminal terminal, Environment env, String pluginName, boolean purg
*/
if (!Files.exists(pluginDir)) {
terminal.println("searching in other folders to find if plugin exists with custom folder name");
- pluginDir = PluginHelper.verifyIfPluginExists(env.pluginsFile(), pluginName);
- pluginConfigDir = env.configFile().resolve(pluginDir.getFileName());
- removing = env.pluginsFile().resolve(".removing-" + pluginDir.getFileName());
+ pluginDir = PluginHelper.verifyIfPluginExists(env.pluginsDir(), pluginName);
+ pluginConfigDir = env.configDir().resolve(pluginDir.getFileName());
+ removing = env.pluginsDir().resolve(".removing-" + pluginDir.getFileName());
}
terminal.println("-> removing [" + pluginName + "]...");
@@ -158,7 +158,7 @@ void execute(Terminal terminal, Environment env, String pluginName, boolean purg
terminal.println(VERBOSE, "removing [" + pluginDir + "]");
}
- final Path pluginBinDir = env.binFile().resolve(pluginName);
+ final Path pluginBinDir = env.binDir().resolve(pluginName);
if (Files.exists(pluginBinDir)) {
if (!Files.isDirectory(pluginBinDir)) {
throw new UserException(ExitCodes.IO_ERROR, "bin dir for " + pluginName + " is not a directory");
diff --git a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java
index c1b4568759f4d..e4f477d78c16b 100644
--- a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java
+++ b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java
@@ -317,7 +317,7 @@ void installPlugins(final List pluginUrls, final Path home, final Instal
}
void assertPlugin(String name, Path original, Environment env) throws IOException {
- assertPluginInternal(name, env.pluginsFile(), original);
+ assertPluginInternal(name, env.pluginsDir(), original);
assertConfigAndBin(name, original, env);
assertInstallCleaned(env);
}
@@ -353,12 +353,12 @@ void assertPluginInternal(String name, Path pluginsFile, Path originalPlugin) th
void assertConfigAndBin(String name, Path original, Environment env) throws IOException {
if (Files.exists(original.resolve("bin"))) {
- Path binDir = env.binFile().resolve(name);
+ Path binDir = env.binDir().resolve(name);
assertTrue("bin dir exists", Files.exists(binDir));
assertTrue("bin is a dir", Files.isDirectory(binDir));
PosixFileAttributes binAttributes = null;
if (isPosix) {
- binAttributes = Files.readAttributes(env.binFile(), PosixFileAttributes.class);
+ binAttributes = Files.readAttributes(env.binDir(), PosixFileAttributes.class);
}
try (DirectoryStream stream = Files.newDirectoryStream(binDir)) {
for (Path file : stream) {
@@ -371,7 +371,7 @@ void assertConfigAndBin(String name, Path original, Environment env) throws IOEx
}
}
if (Files.exists(original.resolve("config"))) {
- Path configDir = env.configFile().resolve(name);
+ Path configDir = env.configDir().resolve(name);
assertTrue("config dir exists", Files.exists(configDir));
assertTrue("config is a dir", Files.isDirectory(configDir));
@@ -379,7 +379,7 @@ void assertConfigAndBin(String name, Path original, Environment env) throws IOEx
GroupPrincipal group = null;
if (isPosix) {
- PosixFileAttributes configAttributes = Files.getFileAttributeView(env.configFile(), PosixFileAttributeView.class)
+ PosixFileAttributes configAttributes = Files.getFileAttributeView(env.configDir(), PosixFileAttributeView.class)
.readAttributes();
user = configAttributes.owner();
group = configAttributes.group();
@@ -408,7 +408,7 @@ void assertConfigAndBin(String name, Path original, Environment env) throws IOEx
}
void assertInstallCleaned(Environment env) throws IOException {
- try (DirectoryStream stream = Files.newDirectoryStream(env.pluginsFile())) {
+ try (DirectoryStream stream = Files.newDirectoryStream(env.pluginsDir())) {
for (Path file : stream) {
if (file.getFileName().toString().startsWith(".installing")) {
fail("Installation dir still exists, " + file);
@@ -458,7 +458,7 @@ public void testTransaction() throws Exception {
() -> installPlugins(Arrays.asList(pluginZip, pluginZip + "does-not-exist"), env.v1())
);
assertThat(e, hasToString(containsString("does-not-exist")));
- final Path fakeInstallPath = env.v2().pluginsFile().resolve("fake");
+ final Path fakeInstallPath = env.v2().pluginsDir().resolve("fake");
// fake should have been removed when the file not found exception occurred
assertFalse(Files.exists(fakeInstallPath));
assertInstallCleaned(env.v2());
@@ -468,7 +468,7 @@ public void testInstallFailsIfPreviouslyRemovedPluginFailed() throws Exception {
Tuple env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("fake", pluginDir);
- final Path removing = env.v2().pluginsFile().resolve(".removing-failed");
+ final Path removing = env.v2().pluginsDir().resolve(".removing-failed");
Files.createDirectory(removing);
final IllegalStateException e = expectThrows(IllegalStateException.class, () -> installPlugin(pluginZip, env.v1()));
final String expected = String.format(
@@ -520,11 +520,11 @@ public void testPluginsDirReadOnly() throws Exception {
assumeTrue("posix and filesystem", isPosix && isReal);
Tuple env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
- try (PosixPermissionsResetter pluginsAttrs = new PosixPermissionsResetter(env.v2().pluginsFile())) {
+ try (PosixPermissionsResetter pluginsAttrs = new PosixPermissionsResetter(env.v2().pluginsDir())) {
pluginsAttrs.setPermissions(new HashSet<>());
String pluginZip = createPluginUrl("fake", pluginDir);
IOException e = expectThrows(IOException.class, () -> installPlugin(pluginZip, env.v1()));
- assertTrue(e.getMessage(), e.getMessage().contains(env.v2().pluginsFile().toString()));
+ assertTrue(e.getMessage(), e.getMessage().contains(env.v2().pluginsDir().toString()));
}
assertInstallCleaned(env.v2());
}
@@ -629,7 +629,7 @@ public void testBinConflict() throws Exception {
Files.createFile(binDir.resolve("somescript"));
String pluginZip = createPluginUrl("opensearch", pluginDir);
FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> installPlugin(pluginZip, env.v1()));
- assertTrue(e.getMessage(), e.getMessage().contains(env.v2().binFile().resolve("opensearch").toString()));
+ assertTrue(e.getMessage(), e.getMessage().contains(env.v2().binDir().resolve("opensearch").toString()));
assertInstallCleaned(env.v2());
}
@@ -641,7 +641,7 @@ public void testBinPermissions() throws Exception {
Files.createDirectory(binDir);
Files.createFile(binDir.resolve("somescript"));
String pluginZip = createPluginUrl("fake", pluginDir);
- try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.v2().binFile())) {
+ try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.v2().binDir())) {
Set perms = binAttrs.getCopyPermissions();
// make sure at least one execute perm is missing, so we know we forced it during installation
perms.remove(PosixFilePermission.GROUP_EXECUTE);
@@ -672,7 +672,7 @@ public void testPluginPermissions() throws Exception {
installPlugin(pluginZip, env.v1());
assertPlugin("fake", pluginDir, env.v2());
- final Path fake = env.v2().pluginsFile().resolve("fake");
+ final Path fake = env.v2().pluginsDir().resolve("fake");
final Path resources = fake.resolve("resources");
final Path platform = fake.resolve("platform");
final Path platformName = platform.resolve("linux-x64");
@@ -725,7 +725,7 @@ public void testConfig() throws Exception {
public void testExistingConfig() throws Exception {
Tuple env = createEnv(fs, temp);
- Path envConfigDir = env.v2().configFile().resolve("fake");
+ Path envConfigDir = env.v2().configDir().resolve("fake");
Files.createDirectories(envConfigDir);
Files.write(envConfigDir.resolve("custom.yml"), "existing config".getBytes(StandardCharsets.UTF_8));
Path pluginDir = createPluginDir(temp);
@@ -902,7 +902,7 @@ public void testPluginAlreadyInstalled() throws Exception {
e.getMessage(),
equalTo(
"plugin directory ["
- + env.v2().pluginsFile().resolve("fake")
+ + env.v2().pluginsDir().resolve("fake")
+ "] already exists; "
+ "if you need to update the plugin, uninstall it first using command 'remove fake'"
)
@@ -1493,7 +1493,7 @@ private void assertPolicyConfirmation(Tuple env, String plugi
assertEquals("installation aborted by user", e.getMessage());
assertThat(terminal.getErrorOutput(), containsString("WARNING: " + warning));
- try (Stream fileStream = Files.list(env.v2().pluginsFile())) {
+ try (Stream fileStream = Files.list(env.v2().pluginsDir())) {
assertThat(fileStream.collect(Collectors.toList()), empty());
}
@@ -1506,7 +1506,7 @@ private void assertPolicyConfirmation(Tuple env, String plugi
e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertEquals("installation aborted by user", e.getMessage());
assertThat(terminal.getErrorOutput(), containsString("WARNING: " + warning));
- try (Stream fileStream = Files.list(env.v2().pluginsFile())) {
+ try (Stream fileStream = Files.list(env.v2().pluginsDir())) {
assertThat(fileStream.collect(Collectors.toList()), empty());
}
}
diff --git a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java
index 376e470159731..d84f36d818046 100644
--- a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java
+++ b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java
@@ -111,7 +111,7 @@ private static void buildFakePlugin(
final boolean hasNativeController
) throws IOException {
PluginTestUtil.writePluginProperties(
- env.pluginsFile().resolve(name),
+ env.pluginsDir().resolve(name),
"description",
description,
"name",
@@ -132,9 +132,9 @@ private static void buildFakePlugin(
}
public void testPluginsDirMissing() throws Exception {
- Files.delete(env.pluginsFile());
+ Files.delete(env.pluginsDir());
IOException e = expectThrows(IOException.class, () -> listPlugins(home));
- assertEquals("Plugins directory missing: " + env.pluginsFile(), e.getMessage());
+ assertEquals("Plugins directory missing: " + env.pluginsDir(), e.getMessage());
}
public void testNoPlugins() throws Exception {
@@ -161,7 +161,7 @@ public void testPluginWithVerbose() throws Exception {
MockTerminal terminal = listPlugins(home, params);
assertEquals(
buildMultiline(
- "Plugins directory: " + env.pluginsFile(),
+ "Plugins directory: " + env.pluginsDir(),
"fake_plugin",
"- Plugin information:",
"Name: fake_plugin",
@@ -184,7 +184,7 @@ public void testPluginWithNativeController() throws Exception {
MockTerminal terminal = listPlugins(home, params);
assertEquals(
buildMultiline(
- "Plugins directory: " + env.pluginsFile(),
+ "Plugins directory: " + env.pluginsDir(),
"fake_plugin1",
"- Plugin information:",
"Name: fake_plugin1",
@@ -208,7 +208,7 @@ public void testPluginWithVerboseMultiplePlugins() throws Exception {
MockTerminal terminal = listPlugins(home, params);
assertEquals(
buildMultiline(
- "Plugins directory: " + env.pluginsFile(),
+ "Plugins directory: " + env.pluginsDir(),
"fake_plugin1",
"- Plugin information:",
"Name: fake_plugin1",
@@ -245,14 +245,14 @@ public void testPluginWithoutVerboseMultiplePlugins() throws Exception {
}
public void testPluginWithoutDescriptorFile() throws Exception {
- final Path pluginDir = env.pluginsFile().resolve("fake1");
+ final Path pluginDir = env.pluginsDir().resolve("fake1");
Files.createDirectories(pluginDir);
NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> listPlugins(home));
assertEquals(pluginDir.resolve(PluginInfo.OPENSEARCH_PLUGIN_PROPERTIES).toString(), e.getFile());
}
public void testPluginWithWrongDescriptorFile() throws Exception {
- final Path pluginDir = env.pluginsFile().resolve("fake1");
+ final Path pluginDir = env.pluginsDir().resolve("fake1");
PluginTestUtil.writePluginProperties(pluginDir, "description", "fake desc");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> listPlugins(home));
final Path descriptorPath = pluginDir.resolve(PluginInfo.OPENSEARCH_PLUGIN_PROPERTIES);
@@ -261,7 +261,7 @@ public void testPluginWithWrongDescriptorFile() throws Exception {
public void testExistingIncompatiblePlugin() throws Exception {
PluginTestUtil.writePluginProperties(
- env.pluginsFile().resolve("fake_plugin1"),
+ env.pluginsDir().resolve("fake_plugin1"),
"description",
"fake desc 1",
"name",
diff --git a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/RemovePluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/RemovePluginCommandTests.java
index 8f9aa27be7e84..ab23dfad75683 100644
--- a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/RemovePluginCommandTests.java
+++ b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/RemovePluginCommandTests.java
@@ -93,11 +93,11 @@ public void setUp() throws Exception {
}
void createPlugin(String name, String... additionalProps) throws IOException {
- createPlugin(env.pluginsFile(), name, Version.CURRENT, additionalProps);
+ createPlugin(env.pluginsDir(), name, Version.CURRENT, additionalProps);
}
void createPlugin(String name, Version version) throws IOException {
- createPlugin(env.pluginsFile(), name, version);
+ createPlugin(env.pluginsDir(), name, version);
}
void createPlugin(Path path, String name, Version version, String... additionalProps) throws IOException {
@@ -130,7 +130,7 @@ static MockTerminal removePlugin(String name, Path home, boolean purge) throws E
}
static void assertRemoveCleaned(Environment env) throws IOException {
- try (DirectoryStream stream = Files.newDirectoryStream(env.pluginsFile())) {
+ try (DirectoryStream stream = Files.newDirectoryStream(env.pluginsDir())) {
for (Path file : stream) {
if (file.getFileName().toString().startsWith(".removing")) {
fail("Removal dir still exists, " + file);
@@ -147,23 +147,23 @@ public void testMissing() throws Exception {
public void testBasic() throws Exception {
createPlugin("fake");
- Files.createFile(env.pluginsFile().resolve("fake").resolve("plugin.jar"));
- Files.createDirectory(env.pluginsFile().resolve("fake").resolve("subdir"));
+ Files.createFile(env.pluginsDir().resolve("fake").resolve("plugin.jar"));
+ Files.createDirectory(env.pluginsDir().resolve("fake").resolve("subdir"));
createPlugin("other");
removePlugin("fake", home, randomBoolean());
- assertFalse(Files.exists(env.pluginsFile().resolve("fake")));
- assertTrue(Files.exists(env.pluginsFile().resolve("other")));
+ assertFalse(Files.exists(env.pluginsDir().resolve("fake")));
+ assertTrue(Files.exists(env.pluginsDir().resolve("other")));
assertRemoveCleaned(env);
}
public void testRemovePluginWithCustomFolderName() throws Exception {
createPlugin("fake", "custom.foldername", "custom-folder");
- Files.createFile(env.pluginsFile().resolve("custom-folder").resolve("plugin.jar"));
- Files.createDirectory(env.pluginsFile().resolve("custom-folder").resolve("subdir"));
+ Files.createFile(env.pluginsDir().resolve("custom-folder").resolve("plugin.jar"));
+ Files.createDirectory(env.pluginsDir().resolve("custom-folder").resolve("subdir"));
createPlugin("other");
removePlugin("fake", home, randomBoolean());
- assertFalse(Files.exists(env.pluginsFile().resolve("custom-folder")));
- assertTrue(Files.exists(env.pluginsFile().resolve("other")));
+ assertFalse(Files.exists(env.pluginsDir().resolve("custom-folder")));
+ assertTrue(Files.exists(env.pluginsDir().resolve("other")));
assertRemoveCleaned(env);
}
@@ -177,62 +177,62 @@ public void testRemoveOldVersion() throws Exception {
)
);
removePlugin("fake", home, randomBoolean());
- assertThat(Files.exists(env.pluginsFile().resolve("fake")), equalTo(false));
+ assertThat(Files.exists(env.pluginsDir().resolve("fake")), equalTo(false));
assertRemoveCleaned(env);
}
public void testBin() throws Exception {
createPlugin("fake");
- Path binDir = env.binFile().resolve("fake");
+ Path binDir = env.binDir().resolve("fake");
Files.createDirectories(binDir);
Files.createFile(binDir.resolve("somescript"));
removePlugin("fake", home, randomBoolean());
- assertFalse(Files.exists(env.pluginsFile().resolve("fake")));
- assertTrue(Files.exists(env.binFile().resolve("opensearch")));
+ assertFalse(Files.exists(env.pluginsDir().resolve("fake")));
+ assertTrue(Files.exists(env.binDir().resolve("opensearch")));
assertFalse(Files.exists(binDir));
assertRemoveCleaned(env);
}
public void testBinNotDir() throws Exception {
createPlugin("fake");
- Files.createFile(env.binFile().resolve("fake"));
+ Files.createFile(env.binDir().resolve("fake"));
UserException e = expectThrows(UserException.class, () -> removePlugin("fake", home, randomBoolean()));
assertTrue(e.getMessage(), e.getMessage().contains("not a directory"));
- assertTrue(Files.exists(env.pluginsFile().resolve("fake"))); // did not remove
- assertTrue(Files.exists(env.binFile().resolve("fake")));
+ assertTrue(Files.exists(env.pluginsDir().resolve("fake"))); // did not remove
+ assertTrue(Files.exists(env.binDir().resolve("fake")));
assertRemoveCleaned(env);
}
public void testConfigDirPreserved() throws Exception {
createPlugin("fake");
- final Path configDir = env.configFile().resolve("fake");
+ final Path configDir = env.configDir().resolve("fake");
Files.createDirectories(configDir);
Files.createFile(configDir.resolve("fake.yml"));
final MockTerminal terminal = removePlugin("fake", home, false);
- assertTrue(Files.exists(env.configFile().resolve("fake")));
+ assertTrue(Files.exists(env.configDir().resolve("fake")));
assertThat(terminal.getOutput(), containsString(expectedConfigDirPreservedMessage(configDir)));
assertRemoveCleaned(env);
}
public void testPurgePluginExists() throws Exception {
createPlugin("fake");
- final Path configDir = env.configFile().resolve("fake");
+ final Path configDir = env.configDir().resolve("fake");
if (randomBoolean()) {
Files.createDirectories(configDir);
Files.createFile(configDir.resolve("fake.yml"));
}
final MockTerminal terminal = removePlugin("fake", home, true);
- assertFalse(Files.exists(env.configFile().resolve("fake")));
+ assertFalse(Files.exists(env.configDir().resolve("fake")));
assertThat(terminal.getOutput(), not(containsString(expectedConfigDirPreservedMessage(configDir))));
assertRemoveCleaned(env);
}
public void testPurgePluginDoesNotExist() throws Exception {
- final Path configDir = env.configFile().resolve("fake");
+ final Path configDir = env.configDir().resolve("fake");
Files.createDirectories(configDir);
Files.createFile(configDir.resolve("fake.yml"));
final MockTerminal terminal = removePlugin("fake", home, true);
- assertFalse(Files.exists(env.configFile().resolve("fake")));
+ assertFalse(Files.exists(env.configDir().resolve("fake")));
assertThat(terminal.getOutput(), not(containsString(expectedConfigDirPreservedMessage(configDir))));
assertRemoveCleaned(env);
}
@@ -243,8 +243,8 @@ public void testPurgeNothingExists() throws Exception {
}
public void testPurgeOnlyMarkerFileExists() throws Exception {
- final Path configDir = env.configFile().resolve("fake");
- final Path removing = env.pluginsFile().resolve(".removing-fake");
+ final Path configDir = env.configDir().resolve("fake");
+ final Path removing = env.pluginsDir().resolve(".removing-fake");
Files.createFile(removing);
final MockTerminal terminal = removePlugin("fake", home, randomBoolean());
assertFalse(Files.exists(removing));
@@ -253,7 +253,7 @@ public void testPurgeOnlyMarkerFileExists() throws Exception {
public void testNoConfigDirPreserved() throws Exception {
createPlugin("fake");
- final Path configDir = env.configFile().resolve("fake");
+ final Path configDir = env.configDir().resolve("fake");
final MockTerminal terminal = removePlugin("fake", home, randomBoolean());
assertThat(terminal.getOutput(), not(containsString(expectedConfigDirPreservedMessage(configDir))));
}
@@ -293,8 +293,8 @@ public void testMissingPluginName() throws Exception {
public void testRemoveWhenRemovingMarker() throws Exception {
createPlugin("fake");
- Files.createFile(env.pluginsFile().resolve("fake").resolve("plugin.jar"));
- Files.createFile(env.pluginsFile().resolve(".removing-fake"));
+ Files.createFile(env.pluginsDir().resolve("fake").resolve("plugin.jar"));
+ Files.createFile(env.pluginsDir().resolve(".removing-fake"));
removePlugin("fake", home, randomBoolean());
}
diff --git a/distribution/tools/upgrade-cli/build.gradle b/distribution/tools/upgrade-cli/build.gradle
index 0e1996f3d68fa..d29c808562168 100644
--- a/distribution/tools/upgrade-cli/build.gradle
+++ b/distribution/tools/upgrade-cli/build.gradle
@@ -15,7 +15,7 @@ dependencies {
compileOnly project(":server")
compileOnly project(":libs:opensearch-cli")
implementation "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
- implementation "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}"
+ implementation "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}"
implementation "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}"
testImplementation project(":test:framework")
testImplementation 'com.google.jimfs:jimfs:1.2'
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.13.2.2.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.13.2.2.jar.sha1
new file mode 100644
index 0000000000000..9d9266300feef
--- /dev/null
+++ b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.13.2.2.jar.sha1
@@ -0,0 +1 @@
+ffeb635597d093509f33e1e94274d14be610f933
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.13.2.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.13.2.jar.sha1
deleted file mode 100644
index 5d356f3fd045f..0000000000000
--- a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.13.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-926e48c451166a291f1ce6c6276d9abbefa7c00f
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/src/main/java/org/opensearch/upgrade/TaskInput.java b/distribution/tools/upgrade-cli/src/main/java/org/opensearch/upgrade/TaskInput.java
index dde84f3f0ebe8..2fbd5d9a0fa7c 100644
--- a/distribution/tools/upgrade-cli/src/main/java/org/opensearch/upgrade/TaskInput.java
+++ b/distribution/tools/upgrade-cli/src/main/java/org/opensearch/upgrade/TaskInput.java
@@ -83,11 +83,11 @@ public void setEsHome(Path esHome) {
}
public Path getOpenSearchConfig() {
- return openSearchEnv.configFile();
+ return openSearchEnv.configDir();
}
public Path getOpenSearchBin() {
- return openSearchEnv.binFile();
+ return openSearchEnv.binDir();
}
public boolean isRunning() {
diff --git a/distribution/tools/upgrade-cli/src/test/java/org/opensearch/upgrade/UpgradeCliTests.java b/distribution/tools/upgrade-cli/src/test/java/org/opensearch/upgrade/UpgradeCliTests.java
index 3db782925a660..39fa2cef24bb1 100644
--- a/distribution/tools/upgrade-cli/src/test/java/org/opensearch/upgrade/UpgradeCliTests.java
+++ b/distribution/tools/upgrade-cli/src/test/java/org/opensearch/upgrade/UpgradeCliTests.java
@@ -121,7 +121,7 @@ private void assertYmlConfigImported() throws IOException {
"path.logs: \"/var/log/eslogs\""
)
);
- List actualSettings = Files.readAllLines(env.configFile().resolve("opensearch.yml"))
+ List actualSettings = Files.readAllLines(env.configDir().resolve("opensearch.yml"))
.stream()
.filter(Objects::nonNull)
.filter(line -> !line.isEmpty())
@@ -132,7 +132,7 @@ private void assertYmlConfigImported() throws IOException {
private void assertKeystoreImported(String passwd) throws IOException, GeneralSecurityException {
// assert keystore is created
- KeyStoreWrapper keystore = KeyStoreWrapper.load(env.configFile());
+ KeyStoreWrapper keystore = KeyStoreWrapper.load(env.configDir());
assertNotNull(keystore);
// assert all keystore settings are imported
@@ -148,13 +148,13 @@ private void assertKeystoreImported(String passwd) throws IOException, GeneralSe
}
private void assertJvmOptionsImported() throws IOException, GeneralSecurityException {
- Path path = env.configFile().resolve("jvm.options.d");
+ Path path = env.configDir().resolve("jvm.options.d");
assertThat(Files.exists(path), is(true));
assertThat(Files.isDirectory(path), is(true));
assertThat(Files.exists(path.resolve("test.options")), is(true));
}
private void assertLog4jPropertiesImported() throws IOException, GeneralSecurityException {
- assertThat(Files.exists(env.configFile().resolve("log4j2.properties")), is(true));
+ assertThat(Files.exists(env.configDir().resolve("log4j2.properties")), is(true));
}
}
diff --git a/distribution/tools/upgrade-cli/src/test/java/org/opensearch/upgrade/ValidateInputTaskTests.java b/distribution/tools/upgrade-cli/src/test/java/org/opensearch/upgrade/ValidateInputTaskTests.java
index f72e49d5961bf..07cb19b132f31 100644
--- a/distribution/tools/upgrade-cli/src/test/java/org/opensearch/upgrade/ValidateInputTaskTests.java
+++ b/distribution/tools/upgrade-cli/src/test/java/org/opensearch/upgrade/ValidateInputTaskTests.java
@@ -64,6 +64,6 @@ public void testGetSummaryFields() {
assertThat(summary.get("Elasticsearch Version"), is("7.10.2"));
assertThat(summary.get("Elasticsearch Plugins"), is("[plugin-1, plugin-2]"));
assertThat(summary.get("Elasticsearch Config"), is("es_home"));
- assertThat(summary.get("OpenSearch Config"), is(env.configFile().toString()));
+ assertThat(summary.get("OpenSearch Config"), is(env.configDir().toString()));
}
}
diff --git a/gradle/missing-javadoc.gradle b/gradle/missing-javadoc.gradle
index df47a3796c825..05531487f35f3 100644
--- a/gradle/missing-javadoc.gradle
+++ b/gradle/missing-javadoc.gradle
@@ -7,6 +7,8 @@
*/
import javax.annotation.Nullable
+import org.gradle.api.tasks.PathSensitive;
+import org.gradle.api.tasks.PathSensitivity;
import org.gradle.internal.jvm.Jvm
/**
@@ -178,6 +180,8 @@ configure([
class MissingJavadocTask extends DefaultTask {
@InputFiles
@SkipWhenEmpty
+ @IgnoreEmptyDirectories
+ @PathSensitive(PathSensitivity.RELATIVE)
SourceDirectorySet srcDirSet;
@OutputDirectory
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index daf75f8e132cb..a8e09684f1fd3 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -11,7 +11,7 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-all.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-7.4.2-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
-distributionSha256Sum=c9490e938b221daf0094982288e4038deed954a3f12fb54cbf270ddf4e37d879
+distributionSha256Sum=e6d864e3b5bc05cc62041842b306383fc1fefcec359e70cebb1d470a6094ca82
diff --git a/jenkins/jenkinsfile b/jenkins/jenkinsfile
new file mode 100644
index 0000000000000..96973fceea765
--- /dev/null
+++ b/jenkins/jenkinsfile
@@ -0,0 +1,32 @@
+pipeline {
+ agent {
+ docker {
+ label 'AL2-X64'
+ /* See
+ https://hub.docker.com/layers/ci-runner/opensearchstaging/ci-runner/ci-runner-ubuntu1804-build-v1/images/sha256-2c7bb2780bc08cd4e7e3c382ac53db414754dabd52f9b70e1c7e344dfb9a0e5e?context=explore
+ for docker image
+ */
+ image 'opensearchstaging/ci-runner:ci-runner-ubuntu1804-build-v1'
+ alwaysPull true
+ }
+ }
+
+ environment {
+ JAVA11_HOME="/opt/java/openjdk-11"
+ JAVA14_HOME="/opt/java/openjdk-14"
+ JAVA17_HOME="/opt/java/openjdk-17"
+ JAVA8_HOME="/opt/java/openjdk-8"
+ JAVA_HOME="/opt/java/openjdk-17"
+ }
+
+ stages {
+ stage('gradle-check') {
+ steps {
+ script {
+ sh 'echo gradle check'
+ sh './gradlew check --no-daemon --no-scan'
+ }
+ }
+ }
+ }
+}
diff --git a/libs/dissect/build.gradle b/libs/dissect/build.gradle
index 0f0b8407e7e6b..47f7970ea5ac0 100644
--- a/libs/dissect/build.gradle
+++ b/libs/dissect/build.gradle
@@ -34,7 +34,7 @@ dependencies {
}
testImplementation "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
testImplementation "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}"
- testImplementation "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}"
+ testImplementation "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}"
}
tasks.named('forbiddenApisMain').configure {
diff --git a/libs/grok/build.gradle b/libs/grok/build.gradle
index ce23406721fe6..86414d18108a1 100644
--- a/libs/grok/build.gradle
+++ b/libs/grok/build.gradle
@@ -29,9 +29,9 @@
*/
dependencies {
- api 'org.jruby.joni:joni:2.1.41'
+ api 'org.jruby.joni:joni:2.1.43'
// joni dependencies:
- api 'org.jruby.jcodings:jcodings:1.0.44'
+ api 'org.jruby.jcodings:jcodings:1.0.57'
testImplementation(project(":test:framework")) {
exclude group: 'org.opensearch', module: 'opensearch-grok'
@@ -41,7 +41,3 @@ dependencies {
tasks.named('forbiddenApisMain').configure {
replaceSignatureFiles 'jdk-signatures'
}
-
-thirdPartyAudit.ignoreMissingClasses(
- 'org.jcodings.unicode.UnicodeCodeRange'
-)
\ No newline at end of file
diff --git a/libs/grok/licenses/jcodings-1.0.44.jar.sha1 b/libs/grok/licenses/jcodings-1.0.44.jar.sha1
deleted file mode 100644
index 4449009d3395e..0000000000000
--- a/libs/grok/licenses/jcodings-1.0.44.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a6884b2fd8fd9a56874db05afaa22435043a2e3e
\ No newline at end of file
diff --git a/libs/grok/licenses/jcodings-1.0.57.jar.sha1 b/libs/grok/licenses/jcodings-1.0.57.jar.sha1
new file mode 100644
index 0000000000000..1a703c2644787
--- /dev/null
+++ b/libs/grok/licenses/jcodings-1.0.57.jar.sha1
@@ -0,0 +1 @@
+603a9ceac39cbf7f6f27fe18b2fded4714319b0a
\ No newline at end of file
diff --git a/libs/grok/licenses/joni-2.1.41.jar.sha1 b/libs/grok/licenses/joni-2.1.41.jar.sha1
deleted file mode 100644
index 4f0a0a8393dd0..0000000000000
--- a/libs/grok/licenses/joni-2.1.41.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4a35f4eaef792073bc081b756b1f4949879cd41e
\ No newline at end of file
diff --git a/libs/grok/licenses/joni-2.1.43.jar.sha1 b/libs/grok/licenses/joni-2.1.43.jar.sha1
new file mode 100644
index 0000000000000..ef5dfabb2b391
--- /dev/null
+++ b/libs/grok/licenses/joni-2.1.43.jar.sha1
@@ -0,0 +1 @@
+9a3bf154469d5ff1d1107755904279081a5fb618
\ No newline at end of file
diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java
index 875c5261f8387..b46bef3e6c563 100644
--- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java
+++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java
@@ -61,7 +61,7 @@ public class HyphenationCompoundWordTokenFilterFactory extends AbstractCompoundW
throw new IllegalArgumentException("hyphenation_patterns_path is a required setting.");
}
- Path hyphenationPatternsFile = env.configFile().resolve(hyphenationPatternsPath);
+ Path hyphenationPatternsFile = env.configDir().resolve(hyphenationPatternsPath);
try {
InputStream in = Files.newInputStream(hyphenationPatternsFile);
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactoryTests.java
index 509010e209088..eaf571e7469d6 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactoryTests.java
@@ -13,10 +13,7 @@
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
-import org.apache.lucene.analysis.miscellaneous.ConcatenateGraphFilter;
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
-import org.opensearch.LegacyESVersion;
-import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
import org.opensearch.index.analysis.AnalysisTestsHelper;
@@ -24,7 +21,6 @@
import org.opensearch.index.analysis.TokenFilterFactory;
import org.opensearch.test.OpenSearchTestCase;
import org.opensearch.test.OpenSearchTokenStreamTestCase;
-import org.opensearch.test.VersionUtils;
import java.io.IOException;
import java.io.StringReader;
@@ -62,61 +58,6 @@ public void testTokenizerCustomizedSeparator() throws IOException {
assertTokenStreamContents(tokenFilter.create(tokenizer), new String[] { "PowerShot+Is+AweSome" });
}
- public void testOldLuceneVersionSeparator() throws IOException {
- OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(
- Settings.builder()
- .put(
- IndexMetadata.SETTING_VERSION_CREATED,
- VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, LegacyESVersion.V_7_5_2)
- )
- .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph")
- .put("index.analysis.filter.my_concatenate_graph.token_separator", "+") // this will be ignored
- .build(),
- new CommonAnalysisPlugin()
- );
-
- TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_concatenate_graph");
- String source = "PowerShot Is AweSome";
- Tokenizer tokenizer = new WhitespaceTokenizer();
- tokenizer.setReader(new StringReader(source));
-
- // earlier Lucene version will only use Lucene's default separator
- assertTokenStreamContents(
- tokenFilter.create(tokenizer),
- new String[] {
- "PowerShot"
- + ConcatenateGraphFilter.DEFAULT_TOKEN_SEPARATOR
- + "Is"
- + ConcatenateGraphFilter.DEFAULT_TOKEN_SEPARATOR
- + "AweSome" }
- );
- }
-
- public void testOldLuceneVersionNoSeparator() throws IOException {
- OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(
- Settings.builder()
- .put(
- IndexMetadata.SETTING_VERSION_CREATED,
- VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, LegacyESVersion.V_7_5_2)
- )
- .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph")
- .put("index.analysis.filter.my_concatenate_graph.token_separator", "+") // this will be ignored
- .put("index.analysis.filter.my_concatenate_graph.preserve_separator", "false")
- .build(),
- new CommonAnalysisPlugin()
- );
-
- TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_concatenate_graph");
- String source = "PowerShot Is AweSome";
- Tokenizer tokenizer = new WhitespaceTokenizer();
- tokenizer.setReader(new StringReader(source));
-
- // earlier Lucene version will not add separator if preserve_separator is false
- assertTokenStreamContents(tokenFilter.create(tokenizer), new String[] { "PowerShotIsAweSome" });
- }
-
public void testTokenizerEmptySeparator() throws IOException {
OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(
Settings.builder()
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/EdgeNGramTokenizerTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/EdgeNGramTokenizerTests.java
index 07ac0d69428af..e77f895d05661 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/EdgeNGramTokenizerTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/EdgeNGramTokenizerTests.java
@@ -33,7 +33,6 @@
package org.opensearch.analysis.common;
import org.apache.lucene.analysis.Tokenizer;
-import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.Settings;
@@ -68,34 +67,6 @@ private IndexAnalyzers buildAnalyzers(Version version, String tokenizer) throws
public void testPreConfiguredTokenizer() throws IOException {
- // Before 7.3 we return ngrams of length 1 only
- {
- Version version = VersionUtils.randomVersionBetween(
- random(),
- LegacyESVersion.fromString("7.0.0"),
- VersionUtils.getPreviousVersion(LegacyESVersion.fromString("7.3.0"))
- );
- try (IndexAnalyzers indexAnalyzers = buildAnalyzers(version, "edge_ngram")) {
- NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer");
- assertNotNull(analyzer);
- assertAnalyzesTo(analyzer, "test", new String[] { "t" });
- }
- }
-
- // Check deprecated name as well
- {
- Version version = VersionUtils.randomVersionBetween(
- random(),
- LegacyESVersion.fromString("7.0.0"),
- VersionUtils.getPreviousVersion(LegacyESVersion.fromString("7.3.0"))
- );
- try (IndexAnalyzers indexAnalyzers = buildAnalyzers(version, "edgeNGram")) {
- NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer");
- assertNotNull(analyzer);
- assertAnalyzesTo(analyzer, "test", new String[] { "t" });
- }
- }
-
// Afterwards, we return ngrams of length 1 and 2, to match the default factory settings
{
try (IndexAnalyzers indexAnalyzers = buildAnalyzers(Version.CURRENT, "edge_ngram")) {
@@ -109,7 +80,7 @@ public void testPreConfiguredTokenizer() throws IOException {
{
try (
IndexAnalyzers indexAnalyzers = buildAnalyzers(
- VersionUtils.randomVersionBetween(random(), LegacyESVersion.fromString("7.3.0"), Version.CURRENT),
+ VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT),
"edgeNGram"
)
) {
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SynonymsAnalysisTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SynonymsAnalysisTests.java
index 8094e24b9adc8..99e1c90808f41 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SynonymsAnalysisTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SynonymsAnalysisTests.java
@@ -37,7 +37,6 @@
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.core.KeywordTokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
-import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.Settings;
@@ -231,10 +230,7 @@ public void testChainedSynonymFilters() throws IOException {
public void testShingleFilters() {
Settings settings = Settings.builder()
- .put(
- IndexMetadata.SETTING_VERSION_CREATED,
- VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT)
- )
+ .put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT))
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.synonyms.type", "synonym")
.putList("index.analysis.filter.synonyms.synonyms", "programmer, developer")
@@ -293,10 +289,7 @@ public void testPreconfiguredTokenFilters() throws IOException {
);
Settings settings = Settings.builder()
- .put(
- IndexMetadata.SETTING_VERSION_CREATED,
- VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT)
- )
+ .put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT))
.put("path.home", createTempDir().toString())
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
@@ -320,10 +313,7 @@ public void testPreconfiguredTokenFilters() throws IOException {
public void testDisallowedTokenFilters() throws IOException {
Settings settings = Settings.builder()
- .put(
- IndexMetadata.SETTING_VERSION_CREATED,
- VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT)
- )
+ .put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT))
.put("path.home", createTempDir().toString())
.putList("common_words", "a", "b")
.put("output_unigrams", "true")
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java
index 6129971a69e18..102182f381128 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java
@@ -33,7 +33,6 @@
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
-import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.Settings;
@@ -47,7 +46,6 @@
import org.opensearch.indices.analysis.AnalysisModule;
import org.opensearch.test.OpenSearchTestCase;
import org.opensearch.test.IndexSettingsModule;
-import org.opensearch.test.VersionUtils;
import java.io.IOException;
import java.io.StringReader;
@@ -202,38 +200,7 @@ public void testIgnoreKeywords() throws IOException {
}
public void testPreconfiguredFilter() throws IOException {
- // Before 7.3 we don't adjust offsets
- {
- Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
- Settings indexSettings = Settings.builder()
- .put(
- IndexMetadata.SETTING_VERSION_CREATED,
- VersionUtils.randomVersionBetween(
- random(),
- LegacyESVersion.V_7_0_0,
- VersionUtils.getPreviousVersion(LegacyESVersion.V_7_3_0)
- )
- )
- .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard")
- .putList("index.analysis.analyzer.my_analyzer.filter", "word_delimiter_graph")
- .build();
- IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
-
- try (
- IndexAnalyzers indexAnalyzers = new AnalysisModule(
- TestEnvironment.newEnvironment(settings),
- Collections.singletonList(new CommonAnalysisPlugin())
- ).getAnalysisRegistry().build(idxSettings)
- ) {
-
- NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer");
- assertNotNull(analyzer);
- assertAnalyzesTo(analyzer, "h100", new String[] { "h", "100" }, new int[] { 0, 0 }, new int[] { 4, 4 });
-
- }
- }
- // Afger 7.3 we do adjust offsets
{
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
Settings indexSettings = Settings.builder()
diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle
index f78dc49e9fb8a..f3be0fe61d4be 100644
--- a/modules/ingest-geoip/build.gradle
+++ b/modules/ingest-geoip/build.gradle
@@ -39,10 +39,10 @@ opensearchplugin {
}
dependencies {
- api('com.maxmind.geoip2:geoip2:2.16.1')
+ api('com.maxmind.geoip2:geoip2:3.0.1')
// geoip2 dependencies:
api("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}")
- api("com.fasterxml.jackson.core:jackson-databind:${versions.jackson}")
+ api("com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}")
api('com.maxmind.db:maxmind-db:2.0.0')
testImplementation 'org.elasticsearch:geolite2-databases:20191119'
@@ -67,24 +67,6 @@ tasks.named("bundlePlugin").configure {
}
}
-tasks.named("thirdPartyAudit").configure {
- ignoreMissingClasses(
- // geoip WebServiceClient needs apache http client, but we're not using WebServiceClient:
- 'org.apache.http.HttpEntity',
- 'org.apache.http.HttpResponse',
- 'org.apache.http.StatusLine',
- 'org.apache.http.client.config.RequestConfig$Builder',
- 'org.apache.http.client.config.RequestConfig',
- 'org.apache.http.client.methods.CloseableHttpResponse',
- 'org.apache.http.client.methods.HttpGet',
- 'org.apache.http.client.utils.URIBuilder',
- 'org.apache.http.impl.auth.BasicScheme',
- 'org.apache.http.impl.client.CloseableHttpClient',
- 'org.apache.http.impl.client.HttpClientBuilder',
- 'org.apache.http.util.EntityUtils'
- )
-}
-
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
tasks.named("test").configure {
// Windows cannot cleanup database files properly unless it loads everything on heap.
diff --git a/modules/ingest-geoip/licenses/geoip2-2.16.1.jar.sha1 b/modules/ingest-geoip/licenses/geoip2-2.16.1.jar.sha1
deleted file mode 100644
index 0221476794d3a..0000000000000
--- a/modules/ingest-geoip/licenses/geoip2-2.16.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c92040bd6ef2cb59be71c6749d08c141ca546caf
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/geoip2-3.0.1.jar.sha1 b/modules/ingest-geoip/licenses/geoip2-3.0.1.jar.sha1
new file mode 100644
index 0000000000000..f1d5ac5aea546
--- /dev/null
+++ b/modules/ingest-geoip/licenses/geoip2-3.0.1.jar.sha1
@@ -0,0 +1 @@
+8a814ae92a1d8c35f82d0ff76d86927c191b7916
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.13.2.2.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.13.2.2.jar.sha1
new file mode 100644
index 0000000000000..9d9266300feef
--- /dev/null
+++ b/modules/ingest-geoip/licenses/jackson-databind-2.13.2.2.jar.sha1
@@ -0,0 +1 @@
+ffeb635597d093509f33e1e94274d14be610f933
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.13.2.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.13.2.jar.sha1
deleted file mode 100644
index 5d356f3fd045f..0000000000000
--- a/modules/ingest-geoip/licenses/jackson-databind-2.13.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-926e48c451166a291f1ce6c6276d9abbefa7c00f
\ No newline at end of file
diff --git a/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java
index 384ae6f14dc4d..030f75bf48e18 100644
--- a/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java
+++ b/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java
@@ -364,7 +364,7 @@ private Map retrieveAsnGeoData(InetAddress ipAddress) {
})
);
- Integer asn = response.getAutonomousSystemNumber();
+ Long asn = response.getAutonomousSystemNumber();
String organization_name = response.getAutonomousSystemOrganization();
Network network = response.getNetwork();
diff --git a/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/IngestGeoIpPlugin.java b/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/IngestGeoIpPlugin.java
index 6af408c185374..790a9bb4bf978 100644
--- a/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/IngestGeoIpPlugin.java
+++ b/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/IngestGeoIpPlugin.java
@@ -82,7 +82,7 @@ public Map getProcessors(Processor.Parameters paramet
throw new IllegalStateException("getProcessors called twice for geoip plugin!!");
}
final Path geoIpDirectory = getGeoIpDirectory(parameters);
- final Path geoIpConfigDirectory = parameters.env.configFile().resolve("ingest-geoip");
+ final Path geoIpConfigDirectory = parameters.env.configDir().resolve("ingest-geoip");
long cacheSize = CACHE_SIZE.get(parameters.env.settings());
try {
databaseReaders = loadDatabaseReaders(geoIpDirectory, geoIpConfigDirectory);
@@ -102,7 +102,7 @@ public Map getProcessors(Processor.Parameters paramet
private Path getGeoIpDirectory(Processor.Parameters parameters) {
final Path geoIpDirectory;
if (parameters.env.settings().get("ingest.geoip.database_path") == null) {
- geoIpDirectory = parameters.env.modulesFile().resolve("ingest-geoip");
+ geoIpDirectory = parameters.env.modulesDir().resolve("ingest-geoip");
} else {
geoIpDirectory = PathUtils.get(parameters.env.settings().get("ingest.geoip.database_path"));
}
diff --git a/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorTests.java
index f06802af8b571..34c80fec520aa 100644
--- a/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorTests.java
+++ b/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorTests.java
@@ -308,7 +308,7 @@ public void testAsn() throws Exception {
Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field");
assertThat(geoData.size(), equalTo(4));
assertThat(geoData.get("ip"), equalTo(ip));
- assertThat(geoData.get("asn"), equalTo(1136));
+ assertThat(geoData.get("asn"), equalTo(1136L));
assertThat(geoData.get("organization_name"), equalTo("KPN B.V."));
assertThat(geoData.get("network"), equalTo("82.168.0.0/14"));
}
diff --git a/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/IngestUserAgentPlugin.java b/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/IngestUserAgentPlugin.java
index ee424ad1322fb..dc005ae36dff8 100644
--- a/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/IngestUserAgentPlugin.java
+++ b/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/IngestUserAgentPlugin.java
@@ -62,7 +62,7 @@ public class IngestUserAgentPlugin extends Plugin implements IngestPlugin {
@Override
public Map getProcessors(Processor.Parameters parameters) {
- Path userAgentConfigDirectory = parameters.env.configFile().resolve("ingest-user-agent");
+ Path userAgentConfigDirectory = parameters.env.configDir().resolve("ingest-user-agent");
if (Files.exists(userAgentConfigDirectory) == false && Files.isDirectory(userAgentConfigDirectory)) {
throw new IllegalStateException(
diff --git a/modules/lang-expression/licenses/lucene-expressions-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.1.0-snapshot-ea989fe8f30.jar.sha1
deleted file mode 100644
index fb85ff4827c36..0000000000000
--- a/modules/lang-expression/licenses/lucene-expressions-9.1.0-snapshot-ea989fe8f30.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c7317bb4e72b820a516e0c8a90beac5acc82c2e2
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/lucene-expressions-9.1.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.1.0.jar.sha1
new file mode 100644
index 0000000000000..c825e197188fc
--- /dev/null
+++ b/modules/lang-expression/licenses/lucene-expressions-9.1.0.jar.sha1
@@ -0,0 +1 @@
+2711abb758d101fc738c35a6867ee7559da5308b
\ No newline at end of file
diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle
index 7f37b5e76d904..069158fb678ef 100644
--- a/modules/lang-painless/build.gradle
+++ b/modules/lang-painless/build.gradle
@@ -29,8 +29,11 @@
*/
import org.opensearch.gradle.testclusters.DefaultTestClustersTask;
+import com.github.jengelman.gradle.plugins.shadow.ShadowBasePlugin
+
apply plugin: 'opensearch.validate-rest-spec'
apply plugin: 'opensearch.yaml-rest-test'
+apply plugin: 'com.github.johnrengelman.shadow'
opensearchplugin {
description 'An easy, safe and fast scripting language for OpenSearch'
@@ -47,13 +50,38 @@ testClusters.all {
dependencies {
api 'org.antlr:antlr4-runtime:4.9.3'
api 'org.ow2.asm:asm-util:9.2'
- api 'org.ow2.asm:asm-tree:7.2'
+ api 'org.ow2.asm:asm-tree:9.2'
api 'org.ow2.asm:asm-commons:9.2'
- api 'org.ow2.asm:asm-analysis:7.2'
+ api 'org.ow2.asm:asm-analysis:9.2'
api 'org.ow2.asm:asm:9.2'
api project('spi')
}
+test {
+ doFirst {
+ test.classpath -= project.files(project.tasks.named('shadowJar'))
+ test.classpath -= project.configurations.getByName(ShadowBasePlugin.CONFIGURATION_NAME)
+ test.classpath += project.extensions.getByType(SourceSetContainer).getByName(SourceSet.MAIN_SOURCE_SET_NAME).runtimeClasspath
+ }
+}
+
+shadowJar {
+ classifier = null
+ relocate 'org.objectweb', 'org.opensearch.repackage.org.objectweb'
+ dependencies {
+ include(dependency('org.ow2.asm:asm:9.2'))
+ include(dependency('org.ow2.asm:asm-util:9.2'))
+ include(dependency('org.ow2.asm:asm-tree:9.2'))
+ include(dependency('org.ow2.asm:asm-commons:9.2'))
+ include(dependency('org.ow2.asm:asm-analysis:9.2'))
+ }
+}
+
+tasks.validateNebulaPom.dependsOn tasks.generatePomFileForShadowPublication
+tasks.validateShadowPom.dependsOn tasks.generatePomFileForNebulaPublication
+tasks.publishNebulaPublicationToMavenLocal.dependsOn tasks.generatePomFileForShadowPublication
+tasks.publishShadowPublicationToMavenLocal.dependsOn tasks.generatePomFileForNebulaPublication
+
tasks.named("dependencyLicenses").configure {
mapping from: /asm-.*/, to: 'asm'
}
@@ -127,7 +155,7 @@ configurations {
}
dependencies {
- regenerate 'org.antlr:antlr4:4.5.3'
+ regenerate 'org.antlr:antlr4:4.9.3'
}
String grammarPath = 'src/main/antlr'
diff --git a/modules/lang-painless/licenses/asm-analysis-7.2.jar.sha1 b/modules/lang-painless/licenses/asm-analysis-7.2.jar.sha1
deleted file mode 100644
index 849b5e0bfa671..0000000000000
--- a/modules/lang-painless/licenses/asm-analysis-7.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b6e6abe057f23630113f4167c34bda7086691258
\ No newline at end of file
diff --git a/modules/lang-painless/licenses/asm-analysis-9.2.jar.sha1 b/modules/lang-painless/licenses/asm-analysis-9.2.jar.sha1
new file mode 100644
index 0000000000000..b93483a24da5d
--- /dev/null
+++ b/modules/lang-painless/licenses/asm-analysis-9.2.jar.sha1
@@ -0,0 +1 @@
+7487dd756daf96cab9986e44b9d7bcb796a61c10
\ No newline at end of file
diff --git a/modules/lang-painless/licenses/asm-tree-7.2.jar.sha1 b/modules/lang-painless/licenses/asm-tree-7.2.jar.sha1
deleted file mode 100644
index 986a1c55f5e8f..0000000000000
--- a/modules/lang-painless/licenses/asm-tree-7.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3a23cc36edaf8fc5a89cb100182758ccb5991487
\ No newline at end of file
diff --git a/modules/lang-painless/licenses/asm-tree-9.2.jar.sha1 b/modules/lang-painless/licenses/asm-tree-9.2.jar.sha1
new file mode 100644
index 0000000000000..7b486521ecef3
--- /dev/null
+++ b/modules/lang-painless/licenses/asm-tree-9.2.jar.sha1
@@ -0,0 +1 @@
+d96c99a30f5e1a19b0e609dbb19a44d8518ac01e
\ No newline at end of file
diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexSslConfig.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexSslConfig.java
index f48422d41ea9e..34fcd245289be 100644
--- a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexSslConfig.java
+++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexSslConfig.java
@@ -126,7 +126,7 @@ protected List getSettingAsList(String key) throws Exception {
return settings.getAsList(key);
}
};
- configuration = loader.load(environment.configFile());
+ configuration = loader.load(environment.configDir());
reload();
final FileChangesListener listener = new FileChangesListener() {
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/TransportRethrottleActionTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/TransportRethrottleActionTests.java
index a9e1a59b7e443..6456aa0af9aac 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/TransportRethrottleActionTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/TransportRethrottleActionTests.java
@@ -131,7 +131,8 @@ public void testRethrottleSuccessfulResponse() {
true,
false,
new TaskId("test", task.getId()),
- Collections.emptyMap()
+ Collections.emptyMap(),
+ null
)
);
sliceStatuses.add(new BulkByScrollTask.StatusOrException(status));
@@ -167,7 +168,8 @@ public void testRethrottleWithSomeSucceeded() {
true,
false,
new TaskId("test", task.getId()),
- Collections.emptyMap()
+ Collections.emptyMap(),
+ null
)
);
sliceStatuses.add(new BulkByScrollTask.StatusOrException(status));
diff --git a/modules/repository-s3/licenses/commons-logging-1.2.jar.sha1 b/modules/repository-s3/licenses/commons-logging-1.2.jar.sha1
new file mode 100644
index 0000000000000..f40f0242448e8
--- /dev/null
+++ b/modules/repository-s3/licenses/commons-logging-1.2.jar.sha1
@@ -0,0 +1 @@
+4bfc12adfe4842bf07b657f0369c4cb522955686
\ No newline at end of file
diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/rest/discovery/Zen2RestApiIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/rest/discovery/Zen2RestApiIT.java
index 198cc11d824e7..f7899d91e0cb9 100644
--- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/rest/discovery/Zen2RestApiIT.java
+++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/rest/discovery/Zen2RestApiIT.java
@@ -176,7 +176,7 @@ public void testFailsOnUnknownNode() throws Exception {
assertThat(e.getResponse().getStatusLine().getStatusCode(), is(400));
assertThat(
e.getMessage(),
- Matchers.containsString("add voting config exclusions request for [invalid] matched no master-eligible nodes")
+ Matchers.containsString("add voting config exclusions request for [invalid] matched no cluster-manager-eligible nodes")
);
}
}
diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.1.0-snapshot-ea989fe8f30.jar.sha1
deleted file mode 100644
index 2f0a6ad50e337..0000000000000
--- a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.1.0-snapshot-ea989fe8f30.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-77930f802430648980eded22ca6ed47fedaeaba4
\ No newline at end of file
diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.1.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.1.0.jar.sha1
new file mode 100644
index 0000000000000..b7733cfa9a00a
--- /dev/null
+++ b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.1.0.jar.sha1
@@ -0,0 +1 @@
+e9b429da553560fa0c363ffc04c774f957c56e14
\ No newline at end of file
diff --git a/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/IcuCollationTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/IcuCollationTokenFilterFactory.java
index 757a55487a162..cd2898c9d64b4 100644
--- a/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/IcuCollationTokenFilterFactory.java
+++ b/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/IcuCollationTokenFilterFactory.java
@@ -33,7 +33,7 @@
package org.opensearch.index.analysis;
import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.InvalidPathException;
@@ -72,7 +72,7 @@ public IcuCollationTokenFilterFactory(IndexSettings indexSettings, Environment e
if (rules != null) {
Exception failureToResolve = null;
try {
- rules = Streams.copyToString(Files.newBufferedReader(environment.configFile().resolve(rules), Charset.forName("UTF-8")));
+ rules = Streams.copyToString(Files.newBufferedReader(environment.configDir().resolve(rules), StandardCharsets.UTF_8));
} catch (IOException | SecurityException | InvalidPathException e) {
failureToResolve = e;
}
diff --git a/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/IcuTokenizerFactory.java b/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/IcuTokenizerFactory.java
index 37c60e02bb3b0..0ac9e0c106a91 100644
--- a/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/IcuTokenizerFactory.java
+++ b/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/IcuTokenizerFactory.java
@@ -120,7 +120,7 @@ public RuleBasedBreakIterator getBreakIterator(int script) {
// parse a single RBBi rule file
private BreakIterator parseRules(String filename, Environment env) throws IOException {
- final Path path = env.configFile().resolve(filename);
+ final Path path = env.configDir().resolve(filename);
String rules = Files.readAllLines(path).stream().filter((v) -> v.startsWith("#") == false).collect(Collectors.joining("\n"));
return new RuleBasedBreakIterator(rules.toString());
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.1.0-snapshot-ea989fe8f30.jar.sha1
deleted file mode 100644
index a0d112dd733ab..0000000000000
--- a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.1.0-snapshot-ea989fe8f30.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c66f568fa9138c6ab6f3abf1efbfab3c7b5991d4
\ No newline at end of file
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.1.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.1.0.jar.sha1
new file mode 100644
index 0000000000000..f5b818a206e7a
--- /dev/null
+++ b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.1.0.jar.sha1
@@ -0,0 +1 @@
+b247f8a877237b4663e4ab7d86fae21c68a58ea5
\ No newline at end of file
diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.1.0-snapshot-ea989fe8f30.jar.sha1
deleted file mode 100644
index a3f939bfe9e05..0000000000000
--- a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.1.0-snapshot-ea989fe8f30.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-e8c47600ea859b999a5f5647341b0350b03dafcd
\ No newline at end of file
diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.1.0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.1.0.jar.sha1
new file mode 100644
index 0000000000000..4d22255d10316
--- /dev/null
+++ b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.1.0.jar.sha1
@@ -0,0 +1 @@
+30e24b42fb0440911e702a531f4373bf397eb8c6
\ No newline at end of file
diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.1.0-snapshot-ea989fe8f30.jar.sha1
deleted file mode 100644
index e2006546433fd..0000000000000
--- a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.1.0-snapshot-ea989fe8f30.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6f0f5c71052beee26e4ce99e1147ce406234f417
\ No newline at end of file
diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.1.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.1.0.jar.sha1
new file mode 100644
index 0000000000000..a0607e6158cdd
--- /dev/null
+++ b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.1.0.jar.sha1
@@ -0,0 +1 @@
+18a321d93836ea2856a5302d192e9dc99c647c6e
\ No newline at end of file
diff --git a/plugins/analysis-phonetic/src/test/java/org/opensearch/index/analysis/AnalysisPhoneticFactoryTests.java b/plugins/analysis-phonetic/src/test/java/org/opensearch/index/analysis/AnalysisPhoneticFactoryTests.java
index 0ef8d22f37335..19bc27f6e616d 100644
--- a/plugins/analysis-phonetic/src/test/java/org/opensearch/index/analysis/AnalysisPhoneticFactoryTests.java
+++ b/plugins/analysis-phonetic/src/test/java/org/opensearch/index/analysis/AnalysisPhoneticFactoryTests.java
@@ -32,7 +32,6 @@
package org.opensearch.index.analysis;
-import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.Settings;
@@ -65,10 +64,7 @@ public void testDisallowedWithSynonyms() throws IOException {
AnalysisPhoneticPlugin plugin = new AnalysisPhoneticPlugin();
Settings settings = Settings.builder()
- .put(
- IndexMetadata.SETTING_VERSION_CREATED,
- VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT)
- )
+ .put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT))
.put("path.home", createTempDir().toString())
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.1.0-snapshot-ea989fe8f30.jar.sha1
deleted file mode 100644
index e675c5774f5a4..0000000000000
--- a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.1.0-snapshot-ea989fe8f30.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-32aad8b8491df3c9862e7fe75e98bccdb6a25bda
\ No newline at end of file
diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.1.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.1.0.jar.sha1
new file mode 100644
index 0000000000000..bff959139a86c
--- /dev/null
+++ b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.1.0.jar.sha1
@@ -0,0 +1 @@
+41c847f39a15bb8495be8c9d8a098974be15f74b
\ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.1.0-snapshot-ea989fe8f30.jar.sha1
deleted file mode 100644
index 053f5c97d65dc..0000000000000
--- a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.1.0-snapshot-ea989fe8f30.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ef546cfaaf727d93c4e86ddc7f77b525af135623
\ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.1.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.1.0.jar.sha1
new file mode 100644
index 0000000000000..39d25d7872ea9
--- /dev/null
+++ b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.1.0.jar.sha1
@@ -0,0 +1 @@
+ee7995231b181aa0a01f5aef8775562e269f5ef7
\ No newline at end of file
diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.1.0-snapshot-ea989fe8f30.jar.sha1
deleted file mode 100644
index e5a2a0b0b4ab3..0000000000000
--- a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.1.0-snapshot-ea989fe8f30.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-21c3511469f67019804e41a8d83ffc5c36de6479
\ No newline at end of file
diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.1.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.1.0.jar.sha1
new file mode 100644
index 0000000000000..9f07f122205d9
--- /dev/null
+++ b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.1.0.jar.sha1
@@ -0,0 +1 @@
+575c458431396baa7f01a546173807f27b12a087
\ No newline at end of file
diff --git a/plugins/discovery-azure-classic/build.gradle b/plugins/discovery-azure-classic/build.gradle
index 28cbc647ac31a..575b8858b16ba 100644
--- a/plugins/discovery-azure-classic/build.gradle
+++ b/plugins/discovery-azure-classic/build.gradle
@@ -59,7 +59,7 @@ dependencies {
api "com.sun.jersey:jersey-client:${versions.jersey}"
api "com.sun.jersey:jersey-core:${versions.jersey}"
api "com.sun.jersey:jersey-json:${versions.jersey}"
- api 'org.codehaus.jettison:jettison:1.1'
+ api 'org.codehaus.jettison:jettison:1.4.1'
api 'com.sun.xml.bind:jaxb-impl:2.2.3-1'
// HACK: javax.xml.bind was removed from default modules in java 9, so we pull the api in here,
diff --git a/plugins/discovery-azure-classic/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/discovery-azure-classic/licenses/commons-logging-1.1.3.jar.sha1
deleted file mode 100644
index c8756c438320f..0000000000000
--- a/plugins/discovery-azure-classic/licenses/commons-logging-1.1.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f
diff --git a/plugins/discovery-azure-classic/licenses/commons-logging-1.2.jar.sha1 b/plugins/discovery-azure-classic/licenses/commons-logging-1.2.jar.sha1
new file mode 100644
index 0000000000000..f40f0242448e8
--- /dev/null
+++ b/plugins/discovery-azure-classic/licenses/commons-logging-1.2.jar.sha1
@@ -0,0 +1 @@
+4bfc12adfe4842bf07b657f0369c4cb522955686
\ No newline at end of file
diff --git a/plugins/discovery-azure-classic/licenses/jettison-1.1.jar.sha1 b/plugins/discovery-azure-classic/licenses/jettison-1.1.jar.sha1
deleted file mode 100644
index 53133f3b018e6..0000000000000
--- a/plugins/discovery-azure-classic/licenses/jettison-1.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-1a01a2a1218fcf9faa2cc2a6ced025bdea687262
diff --git a/plugins/discovery-azure-classic/licenses/jettison-1.4.1.jar.sha1 b/plugins/discovery-azure-classic/licenses/jettison-1.4.1.jar.sha1
new file mode 100644
index 0000000000000..815d87d917f2e
--- /dev/null
+++ b/plugins/discovery-azure-classic/licenses/jettison-1.4.1.jar.sha1
@@ -0,0 +1 @@
+8d16bbcbac93446942c9e5da04530159afbe3e65
\ No newline at end of file
diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle
index 7998e0861c7b1..0e096958538a4 100644
--- a/plugins/discovery-ec2/build.gradle
+++ b/plugins/discovery-ec2/build.gradle
@@ -50,7 +50,7 @@ dependencies {
api "commons-logging:commons-logging:${versions.commonslogging}"
api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}"
api "commons-codec:commons-codec:${versions.commonscodec}"
- api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}"
+ api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}"
api "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}"
}
diff --git a/plugins/discovery-ec2/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/discovery-ec2/licenses/commons-logging-1.1.3.jar.sha1
deleted file mode 100644
index c8756c438320f..0000000000000
--- a/plugins/discovery-ec2/licenses/commons-logging-1.1.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f
diff --git a/plugins/discovery-ec2/licenses/commons-logging-1.2.jar.sha1 b/plugins/discovery-ec2/licenses/commons-logging-1.2.jar.sha1
new file mode 100644
index 0000000000000..f40f0242448e8
--- /dev/null
+++ b/plugins/discovery-ec2/licenses/commons-logging-1.2.jar.sha1
@@ -0,0 +1 @@
+4bfc12adfe4842bf07b657f0369c4cb522955686
\ No newline at end of file
diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.13.2.2.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.13.2.2.jar.sha1
new file mode 100644
index 0000000000000..9d9266300feef
--- /dev/null
+++ b/plugins/discovery-ec2/licenses/jackson-databind-2.13.2.2.jar.sha1
@@ -0,0 +1 @@
+ffeb635597d093509f33e1e94274d14be610f933
\ No newline at end of file
diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.13.2.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.13.2.jar.sha1
deleted file mode 100644
index 5d356f3fd045f..0000000000000
--- a/plugins/discovery-ec2/licenses/jackson-databind-2.13.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-926e48c451166a291f1ce6c6276d9abbefa7c00f
\ No newline at end of file
diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle
index 2396b228d77a0..eb695f84b2bd0 100644
--- a/plugins/discovery-gce/build.gradle
+++ b/plugins/discovery-gce/build.gradle
@@ -24,7 +24,7 @@ versions << [
dependencies {
api "com.google.apis:google-api-services-compute:v1-rev160-${versions.google}"
api "com.google.api-client:google-api-client:${versions.google}"
- api "com.google.oauth-client:google-oauth-client:1.33.1"
+ api "com.google.oauth-client:google-oauth-client:1.33.2"
api "com.google.http-client:google-http-client:${versions.google}"
api "com.google.http-client:google-http-client-jackson2:${versions.google}"
api 'com.google.code.findbugs:jsr305:3.0.2'
diff --git a/plugins/discovery-gce/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/discovery-gce/licenses/commons-logging-1.1.3.jar.sha1
deleted file mode 100644
index c8756c438320f..0000000000000
--- a/plugins/discovery-gce/licenses/commons-logging-1.1.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f
diff --git a/plugins/discovery-gce/licenses/commons-logging-1.2.jar.sha1 b/plugins/discovery-gce/licenses/commons-logging-1.2.jar.sha1
new file mode 100644
index 0000000000000..f40f0242448e8
--- /dev/null
+++ b/plugins/discovery-gce/licenses/commons-logging-1.2.jar.sha1
@@ -0,0 +1 @@
+4bfc12adfe4842bf07b657f0369c4cb522955686
\ No newline at end of file
diff --git a/plugins/discovery-gce/licenses/google-oauth-client-1.33.2.jar.sha1 b/plugins/discovery-gce/licenses/google-oauth-client-1.33.2.jar.sha1
new file mode 100644
index 0000000000000..289e8e8261fd3
--- /dev/null
+++ b/plugins/discovery-gce/licenses/google-oauth-client-1.33.2.jar.sha1
@@ -0,0 +1 @@
+2810fb515fe110295dc6867fc9f70c401b66daf3
\ No newline at end of file
diff --git a/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsConfig.java b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsConfig.java
index 5f494147c870f..8413a750e2741 100644
--- a/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsConfig.java
+++ b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsConfig.java
@@ -96,7 +96,7 @@ public class ExampleCustomSettingsConfig {
public ExampleCustomSettingsConfig(final Environment environment) {
// Elasticsearch config directory
- final Path configDir = environment.configFile();
+ final Path configDir = environment.configDir();
// Resolve the plugin's custom settings file
final Path customSettingsYamlFile = configDir.resolve("custom-settings/custom.yml");
diff --git a/plugins/ingest-attachment/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/ingest-attachment/licenses/commons-logging-1.1.3.jar.sha1
deleted file mode 100644
index 5b8f029e58293..0000000000000
--- a/plugins/ingest-attachment/licenses/commons-logging-1.1.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f
\ No newline at end of file
diff --git a/plugins/ingest-attachment/licenses/commons-logging-1.2.jar.sha1 b/plugins/ingest-attachment/licenses/commons-logging-1.2.jar.sha1
new file mode 100644
index 0000000000000..f40f0242448e8
--- /dev/null
+++ b/plugins/ingest-attachment/licenses/commons-logging-1.2.jar.sha1
@@ -0,0 +1 @@
+4bfc12adfe4842bf07b657f0369c4cb522955686
\ No newline at end of file
diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle
index 60fb99f459454..da644d77eb488 100644
--- a/plugins/repository-azure/build.gradle
+++ b/plugins/repository-azure/build.gradle
@@ -44,9 +44,9 @@ opensearchplugin {
}
dependencies {
- api 'com.azure:azure-core:1.26.0'
+ api 'com.azure:azure-core:1.27.0'
api 'com.azure:azure-storage-common:12.15.0'
- api 'com.azure:azure-core-http-netty:1.11.8'
+ api 'com.azure:azure-core-http-netty:1.11.9'
api "io.netty:netty-codec-dns:${versions.netty}"
api "io.netty:netty-codec-socks:${versions.netty}"
api "io.netty:netty-codec-http2:${versions.netty}"
@@ -54,15 +54,15 @@ dependencies {
api "io.netty:netty-resolver-dns:${versions.netty}"
api "io.netty:netty-transport-native-unix-common:${versions.netty}"
implementation project(':modules:transport-netty4')
- api 'com.azure:azure-storage-blob:12.14.4'
+ api 'com.azure:azure-storage-blob:12.15.0'
api 'org.reactivestreams:reactive-streams:1.0.3'
api 'io.projectreactor:reactor-core:3.4.15'
- api 'io.projectreactor.netty:reactor-netty:1.0.16'
+ api 'io.projectreactor.netty:reactor-netty:1.0.17'
api 'io.projectreactor.netty:reactor-netty-core:1.0.16'
api 'io.projectreactor.netty:reactor-netty-http:1.0.16'
api "org.slf4j:slf4j-api:${versions.slf4j}"
api "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}"
- api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}"
+ api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}"
api "com.fasterxml.jackson.datatype:jackson-datatype-jsr310:${versions.jackson}"
api "com.fasterxml.jackson.dataformat:jackson-dataformat-xml:${versions.jackson}"
api "com.fasterxml.jackson.module:jackson-module-jaxb-annotations:${versions.jackson}"
diff --git a/plugins/repository-azure/licenses/azure-core-1.26.0.jar.sha1 b/plugins/repository-azure/licenses/azure-core-1.26.0.jar.sha1
deleted file mode 100644
index 693c6a721959c..0000000000000
--- a/plugins/repository-azure/licenses/azure-core-1.26.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-461b89dcf8948a0c4a97d4f1d876f778d0cac7aa
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/azure-core-1.27.0.jar.sha1 b/plugins/repository-azure/licenses/azure-core-1.27.0.jar.sha1
new file mode 100644
index 0000000000000..9206b697ca648
--- /dev/null
+++ b/plugins/repository-azure/licenses/azure-core-1.27.0.jar.sha1
@@ -0,0 +1 @@
+75a2db538d218e2bd3c2cbdf04c955b8f6db6626
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/azure-core-http-netty-1.11.8.jar.sha1 b/plugins/repository-azure/licenses/azure-core-http-netty-1.11.8.jar.sha1
deleted file mode 100644
index df7d7ae4ce285..0000000000000
--- a/plugins/repository-azure/licenses/azure-core-http-netty-1.11.8.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-0ea66d4531fb41cb3b5ab55e2e7b7f301e7f8503
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/azure-core-http-netty-1.11.9.jar.sha1 b/plugins/repository-azure/licenses/azure-core-http-netty-1.11.9.jar.sha1
new file mode 100644
index 0000000000000..936a02dfba4d7
--- /dev/null
+++ b/plugins/repository-azure/licenses/azure-core-http-netty-1.11.9.jar.sha1
@@ -0,0 +1 @@
+1d1f34b3e60db038f3913007a2706a820383dc26
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/azure-storage-blob-12.14.4.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-blob-12.14.4.jar.sha1
deleted file mode 100644
index 5333f8fa90ada..0000000000000
--- a/plugins/repository-azure/licenses/azure-storage-blob-12.14.4.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-2b92020693d09e4980b96d278e8038a1087afea0
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/azure-storage-blob-12.15.0.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-blob-12.15.0.jar.sha1
new file mode 100644
index 0000000000000..513cb017f798d
--- /dev/null
+++ b/plugins/repository-azure/licenses/azure-storage-blob-12.15.0.jar.sha1
@@ -0,0 +1 @@
+a53a6bdf7564f4e3a7b0b93cd96b7f5f95c03d36
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-databind-2.13.2.2.jar.sha1 b/plugins/repository-azure/licenses/jackson-databind-2.13.2.2.jar.sha1
new file mode 100644
index 0000000000000..9d9266300feef
--- /dev/null
+++ b/plugins/repository-azure/licenses/jackson-databind-2.13.2.2.jar.sha1
@@ -0,0 +1 @@
+ffeb635597d093509f33e1e94274d14be610f933
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-databind-2.13.2.jar.sha1 b/plugins/repository-azure/licenses/jackson-databind-2.13.2.jar.sha1
deleted file mode 100644
index 5d356f3fd045f..0000000000000
--- a/plugins/repository-azure/licenses/jackson-databind-2.13.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-926e48c451166a291f1ce6c6276d9abbefa7c00f
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/reactor-netty-1.0.16.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-1.0.16.jar.sha1
deleted file mode 100644
index 582380e449a1d..0000000000000
--- a/plugins/repository-azure/licenses/reactor-netty-1.0.16.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d90829f6127966b0c35c4a3e8e23ca9ed29cd8a5
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/reactor-netty-1.0.17.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-1.0.17.jar.sha1
new file mode 100644
index 0000000000000..a1f6aa3686692
--- /dev/null
+++ b/plugins/repository-azure/licenses/reactor-netty-1.0.17.jar.sha1
@@ -0,0 +1 @@
+7720beb4f58a4379e6294d62766d2e9e1bfaf646
\ No newline at end of file
diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageSettings.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageSettings.java
index c9a031451bccd..4a9aa51334d0a 100644
--- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageSettings.java
+++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageSettings.java
@@ -91,8 +91,7 @@ final class AzureStorageSettings {
AZURE_CLIENT_PREFIX_KEY,
"endpoint_suffix",
key -> Setting.simpleString(key, Property.NodeScope),
- () -> ACCOUNT_SETTING,
- () -> KEY_SETTING
+ () -> ACCOUNT_SETTING
);
// The overall operation timeout
diff --git a/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java b/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java
index c9e6e299c7120..e8417f9ceaf2c 100644
--- a/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java
+++ b/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java
@@ -231,6 +231,7 @@ public void testReadBlobWithRetries() throws Exception {
exchange.getResponseHeaders().add("Content-Type", "application/octet-stream");
exchange.getResponseHeaders().add("Content-Length", String.valueOf(length));
exchange.getResponseHeaders().add("x-ms-blob-type", "blockblob");
+ exchange.getResponseHeaders().add("Content-Range", "bytes " + rangeStart + "-" + bytes.length + "/" + bytes.length);
exchange.sendResponseHeaders(RestStatus.OK.getStatus(), length);
exchange.getResponseBody().write(bytes, rangeStart, length);
return;
@@ -247,7 +248,8 @@ public void testReadBlobWithRetries() throws Exception {
final BlobContainer blobContainer = createBlobContainer(maxRetries);
try (InputStream inputStream = blobContainer.readBlob("read_blob_max_retries")) {
assertArrayEquals(bytes, BytesReference.toBytes(Streams.readFully(inputStream)));
- assertThat(countDownHead.isCountedDown(), is(true));
+ // No more getProperties() calls in BlobClientBase::openInputStream(), HEAD should not be invoked
+ assertThat(countDownHead.isCountedDown(), is(false));
assertThat(countDownGet.isCountedDown(), is(true));
}
}
@@ -278,6 +280,8 @@ public void testReadRangeBlobWithRetries() throws Exception {
assertThat(length, lessThanOrEqualTo(bytes.length - rangeStart));
exchange.getResponseHeaders().add("Content-Type", "application/octet-stream");
exchange.getResponseHeaders().add("Content-Length", String.valueOf(length));
+ exchange.getResponseHeaders()
+ .add("Content-Range", "bytes " + rangeStart + "-" + rangeEnd.get() + "/" + bytes.length);
exchange.getResponseHeaders().add("x-ms-blob-type", "blockblob");
exchange.sendResponseHeaders(RestStatus.OK.getStatus(), length);
exchange.getResponseBody().write(bytes, rangeStart, length);
diff --git a/plugins/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml b/plugins/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml
index beaa95b732d52..04ff4e8c34033 100644
--- a/plugins/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml
+++ b/plugins/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml
@@ -29,6 +29,9 @@ setup:
---
"Snapshot/Restore with repository-azure":
+ - skip:
+ features: allowed_warnings
+
# Get repository
- do:
snapshot.get_repository:
@@ -169,12 +172,16 @@ setup:
# Remove the snapshots
- do:
+ allowed_warnings:
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
snapshot.delete:
repository: repository
snapshot: snapshot-two
master_timeout: 5m
- do:
+ allowed_warnings:
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
snapshot.delete:
repository: repository
snapshot: snapshot-one
diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle
index e1ecf3c65a0f9..0e1ed06879f91 100644
--- a/plugins/repository-gcs/build.gradle
+++ b/plugins/repository-gcs/build.gradle
@@ -61,15 +61,15 @@ dependencies {
api 'com.google.api:api-common:1.8.1'
api 'com.google.api:gax:1.54.0'
api 'org.threeten:threetenbp:1.4.4'
- api 'com.google.protobuf:protobuf-java-util:3.19.3'
+ api 'com.google.protobuf:protobuf-java-util:3.20.0'
api 'com.google.protobuf:protobuf-java:3.19.3'
api 'com.google.code.gson:gson:2.9.0'
- api 'com.google.api.grpc:proto-google-common-protos:1.16.0'
+ api 'com.google.api.grpc:proto-google-common-protos:2.8.0'
api 'com.google.api.grpc:proto-google-iam-v1:0.12.0'
api 'com.google.cloud:google-cloud-core-http:1.93.3'
api 'com.google.auth:google-auth-library-credentials:0.20.0'
api 'com.google.auth:google-auth-library-oauth2-http:0.20.0'
- api 'com.google.oauth-client:google-oauth-client:1.31.0'
+ api 'com.google.oauth-client:google-oauth-client:1.33.1'
api 'com.google.api-client:google-api-client:1.30.10'
api 'com.google.http-client:google-http-client-appengine:1.35.0'
api 'com.google.http-client:google-http-client-jackson2:1.35.0'
diff --git a/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1
deleted file mode 100644
index 5b8f029e58293..0000000000000
--- a/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f
\ No newline at end of file
diff --git a/plugins/repository-gcs/licenses/commons-logging-1.2.jar.sha1 b/plugins/repository-gcs/licenses/commons-logging-1.2.jar.sha1
new file mode 100644
index 0000000000000..f40f0242448e8
--- /dev/null
+++ b/plugins/repository-gcs/licenses/commons-logging-1.2.jar.sha1
@@ -0,0 +1 @@
+4bfc12adfe4842bf07b657f0369c4cb522955686
\ No newline at end of file
diff --git a/plugins/repository-gcs/licenses/google-oauth-client-1.31.0.jar.sha1 b/plugins/repository-gcs/licenses/google-oauth-client-1.31.0.jar.sha1
deleted file mode 100644
index 942dbb5d167a4..0000000000000
--- a/plugins/repository-gcs/licenses/google-oauth-client-1.31.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-bf1cfbbaa2497d0a841ea0363df4a61170d5823b
\ No newline at end of file
diff --git a/plugins/discovery-gce/licenses/google-oauth-client-1.33.1.jar.sha1 b/plugins/repository-gcs/licenses/google-oauth-client-1.33.1.jar.sha1
similarity index 100%
rename from plugins/discovery-gce/licenses/google-oauth-client-1.33.1.jar.sha1
rename to plugins/repository-gcs/licenses/google-oauth-client-1.33.1.jar.sha1
diff --git a/plugins/repository-gcs/licenses/proto-google-common-protos-1.16.0.jar.sha1 b/plugins/repository-gcs/licenses/proto-google-common-protos-1.16.0.jar.sha1
deleted file mode 100644
index 7762b7a3ebdc3..0000000000000
--- a/plugins/repository-gcs/licenses/proto-google-common-protos-1.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-2c5f022ea3b8e8df6a619c4cd8faf9af86022daa
\ No newline at end of file
diff --git a/plugins/repository-gcs/licenses/proto-google-common-protos-2.8.0.jar.sha1 b/plugins/repository-gcs/licenses/proto-google-common-protos-2.8.0.jar.sha1
new file mode 100644
index 0000000000000..3f14d9e59c9e9
--- /dev/null
+++ b/plugins/repository-gcs/licenses/proto-google-common-protos-2.8.0.jar.sha1
@@ -0,0 +1 @@
+8adcbc3c5c3b1b7af1cf1e8a25af26a516d62a4c
\ No newline at end of file
diff --git a/plugins/repository-gcs/licenses/protobuf-java-util-3.19.3.jar.sha1 b/plugins/repository-gcs/licenses/protobuf-java-util-3.19.3.jar.sha1
deleted file mode 100644
index 9ba36d444c541..0000000000000
--- a/plugins/repository-gcs/licenses/protobuf-java-util-3.19.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3e6812cbbb7e6faffa7b56438740dec510e1fc1a
\ No newline at end of file
diff --git a/plugins/repository-gcs/licenses/protobuf-java-util-3.20.0.jar.sha1 b/plugins/repository-gcs/licenses/protobuf-java-util-3.20.0.jar.sha1
new file mode 100644
index 0000000000000..1e9d00d8d5c03
--- /dev/null
+++ b/plugins/repository-gcs/licenses/protobuf-java-util-3.20.0.jar.sha1
@@ -0,0 +1 @@
+ee4496b296418283cbe7ae784984347fc4717a9a
\ No newline at end of file
diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle
index dc1f55b686044..d8811ded8d092 100644
--- a/plugins/repository-hdfs/build.gradle
+++ b/plugins/repository-hdfs/build.gradle
@@ -61,13 +61,13 @@ dependencies {
api "org.apache.hadoop:hadoop-client-api:${versions.hadoop3}"
runtimeOnly "org.apache.hadoop:hadoop-client-runtime:${versions.hadoop3}"
api "org.apache.hadoop:hadoop-hdfs:${versions.hadoop3}"
- api 'org.apache.htrace:htrace-core4:4.1.0-incubating'
+ api 'org.apache.htrace:htrace-core4:4.2.0-incubating'
api "org.apache.logging.log4j:log4j-core:${versions.log4j}"
api 'org.apache.avro:avro:1.10.2'
- api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}"
+ api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}"
api 'com.google.code.gson:gson:2.9.0'
runtimeOnly 'com.google.guava:guava:30.1.1-jre'
- api 'com.google.protobuf:protobuf-java:3.19.3'
+ api 'com.google.protobuf:protobuf-java:3.20.0'
api "commons-logging:commons-logging:${versions.commonslogging}"
api 'commons-cli:commons-cli:1.2'
api "commons-codec:commons-codec:${versions.commonscodec}"
@@ -80,7 +80,7 @@ dependencies {
api 'javax.servlet:servlet-api:2.5'
api "org.slf4j:slf4j-api:${versions.slf4j}"
api "org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}"
- api 'net.minidev:json-smart:2.4.7'
+ api 'net.minidev:json-smart:2.4.8'
api 'org.apache.zookeeper:zookeeper:3.7.0'
api "io.netty:netty-all:${versions.netty}"
implementation 'com.fasterxml.woodstox:woodstox-core:6.2.8'
diff --git a/plugins/repository-hdfs/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/repository-hdfs/licenses/commons-logging-1.1.3.jar.sha1
deleted file mode 100644
index 5b8f029e58293..0000000000000
--- a/plugins/repository-hdfs/licenses/commons-logging-1.1.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/commons-logging-1.2.jar.sha1 b/plugins/repository-hdfs/licenses/commons-logging-1.2.jar.sha1
new file mode 100644
index 0000000000000..f40f0242448e8
--- /dev/null
+++ b/plugins/repository-hdfs/licenses/commons-logging-1.2.jar.sha1
@@ -0,0 +1 @@
+4bfc12adfe4842bf07b657f0369c4cb522955686
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/htrace-core4-4.1.0-incubating.jar.sha1 b/plugins/repository-hdfs/licenses/htrace-core4-4.1.0-incubating.jar.sha1
deleted file mode 100644
index 806c624c02cf0..0000000000000
--- a/plugins/repository-hdfs/licenses/htrace-core4-4.1.0-incubating.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-12b3e2adda95e8c41d9d45d33db075137871d2e2
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/htrace-core4-4.2.0-incubating.jar.sha1 b/plugins/repository-hdfs/licenses/htrace-core4-4.2.0-incubating.jar.sha1
new file mode 100644
index 0000000000000..e2eafb09dba00
--- /dev/null
+++ b/plugins/repository-hdfs/licenses/htrace-core4-4.2.0-incubating.jar.sha1
@@ -0,0 +1 @@
+94b3f1966922bc45d0f8a86a2aa867a4b0df288b
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/jackson-databind-2.13.2.2.jar.sha1 b/plugins/repository-hdfs/licenses/jackson-databind-2.13.2.2.jar.sha1
new file mode 100644
index 0000000000000..9d9266300feef
--- /dev/null
+++ b/plugins/repository-hdfs/licenses/jackson-databind-2.13.2.2.jar.sha1
@@ -0,0 +1 @@
+ffeb635597d093509f33e1e94274d14be610f933
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/jackson-databind-2.13.2.jar.sha1 b/plugins/repository-hdfs/licenses/jackson-databind-2.13.2.jar.sha1
deleted file mode 100644
index 5d356f3fd045f..0000000000000
--- a/plugins/repository-hdfs/licenses/jackson-databind-2.13.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-926e48c451166a291f1ce6c6276d9abbefa7c00f
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/json-smart-2.4.7.jar.sha1 b/plugins/repository-hdfs/licenses/json-smart-2.4.7.jar.sha1
deleted file mode 100644
index 16f9a4431485a..0000000000000
--- a/plugins/repository-hdfs/licenses/json-smart-2.4.7.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-8d7f4c1530c07c54930935f3da85f48b83b3c109
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/json-smart-2.4.8.jar.sha1 b/plugins/repository-hdfs/licenses/json-smart-2.4.8.jar.sha1
new file mode 100644
index 0000000000000..7a3710cf1b364
--- /dev/null
+++ b/plugins/repository-hdfs/licenses/json-smart-2.4.8.jar.sha1
@@ -0,0 +1 @@
+7c62f5f72ab05eb54d40e2abf0360a2fe9ea477f
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/protobuf-java-3.19.3.jar.sha1 b/plugins/repository-hdfs/licenses/protobuf-java-3.19.3.jar.sha1
deleted file mode 100644
index 655ecd1f1c1c9..0000000000000
--- a/plugins/repository-hdfs/licenses/protobuf-java-3.19.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4b57f1b1b9e281231c3fcfc039ce3021e29ff570
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/protobuf-java-3.20.0.jar.sha1 b/plugins/repository-hdfs/licenses/protobuf-java-3.20.0.jar.sha1
new file mode 100644
index 0000000000000..c5b0169ce0dba
--- /dev/null
+++ b/plugins/repository-hdfs/licenses/protobuf-java-3.20.0.jar.sha1
@@ -0,0 +1 @@
+3c72ddaaab7ffafe789e4f732c1fd614eb798bf4
\ No newline at end of file
diff --git a/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsSecurityContext.java b/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsSecurityContext.java
index 9078e2b76cc6d..03abb94e1263c 100644
--- a/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsSecurityContext.java
+++ b/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsSecurityContext.java
@@ -102,7 +102,7 @@ class HdfsSecurityContext {
* Expects keytab file to exist at {@code $CONFIG_DIR$/repository-hdfs/krb5.keytab}
*/
static Path locateKeytabFile(Environment environment) {
- Path keytabPath = environment.configFile().resolve("repository-hdfs").resolve("krb5.keytab");
+ Path keytabPath = environment.configDir().resolve("repository-hdfs").resolve("krb5.keytab");
try {
if (Files.exists(keytabPath) == false) {
throw new RuntimeException("Could not locate keytab at [" + keytabPath + "].");
diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle
index c5939958c816a..33448b0039ce2 100644
--- a/plugins/repository-s3/build.gradle
+++ b/plugins/repository-s3/build.gradle
@@ -58,7 +58,7 @@ dependencies {
api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}"
api "commons-codec:commons-codec:${versions.commonscodec}"
api "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
- api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}"
+ api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}"
api "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}"
api "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:${versions.jackson}"
api "joda-time:joda-time:${versions.joda}"
@@ -190,7 +190,7 @@ internalClusterTest {
}
yamlRestTest {
- systemProperty 'tests.rest.blacklist', (
+ systemProperty 'tests.rest.denylist', (
useFixture ?
['repository_s3/50_repository_ecs_credentials/*']
:
@@ -246,7 +246,7 @@ if (useFixture) {
setClasspath(yamlRestTestSourceSet.getRuntimeClasspath())
// Minio only supports a single access key, see https://github.com/minio/minio/pull/5968
- systemProperty 'tests.rest.blacklist', [
+ systemProperty 'tests.rest.denylist', [
'repository_s3/30_repository_temporary_credentials/*',
'repository_s3/40_repository_ec2_credentials/*',
'repository_s3/50_repository_ecs_credentials/*'
@@ -272,7 +272,7 @@ if (useFixture) {
SourceSet yamlRestTestSourceSet = sourceSets.getByName(YamlRestTestPlugin.SOURCE_SET_NAME)
setTestClassesDirs(yamlRestTestSourceSet.getOutput().getClassesDirs())
setClasspath(yamlRestTestSourceSet.getRuntimeClasspath())
- systemProperty 'tests.rest.blacklist', [
+ systemProperty 'tests.rest.denylist', [
'repository_s3/10_basic/*',
'repository_s3/20_repository_permanent_credentials/*',
'repository_s3/30_repository_temporary_credentials/*',
diff --git a/plugins/repository-s3/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/repository-s3/licenses/commons-logging-1.1.3.jar.sha1
deleted file mode 100644
index c8756c438320f..0000000000000
--- a/plugins/repository-s3/licenses/commons-logging-1.1.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f
diff --git a/plugins/repository-s3/licenses/commons-logging-1.2.jar.sha1 b/plugins/repository-s3/licenses/commons-logging-1.2.jar.sha1
new file mode 100644
index 0000000000000..f40f0242448e8
--- /dev/null
+++ b/plugins/repository-s3/licenses/commons-logging-1.2.jar.sha1
@@ -0,0 +1 @@
+4bfc12adfe4842bf07b657f0369c4cb522955686
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/jackson-databind-2.13.2.2.jar.sha1 b/plugins/repository-s3/licenses/jackson-databind-2.13.2.2.jar.sha1
new file mode 100644
index 0000000000000..9d9266300feef
--- /dev/null
+++ b/plugins/repository-s3/licenses/jackson-databind-2.13.2.2.jar.sha1
@@ -0,0 +1 @@
+ffeb635597d093509f33e1e94274d14be610f933
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/jackson-databind-2.13.2.jar.sha1 b/plugins/repository-s3/licenses/jackson-databind-2.13.2.jar.sha1
deleted file mode 100644
index 5d356f3fd045f..0000000000000
--- a/plugins/repository-s3/licenses/jackson-databind-2.13.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-926e48c451166a291f1ce6c6276d9abbefa7c00f
\ No newline at end of file
diff --git a/qa/evil-tests/src/test/java/org/opensearch/bootstrap/EvilSecurityTests.java b/qa/evil-tests/src/test/java/org/opensearch/bootstrap/EvilSecurityTests.java
index 6f9368fa767b0..3dcc547fa002b 100644
--- a/qa/evil-tests/src/test/java/org/opensearch/bootstrap/EvilSecurityTests.java
+++ b/qa/evil-tests/src/test/java/org/opensearch/bootstrap/EvilSecurityTests.java
@@ -125,23 +125,23 @@ public void testEnvironmentPaths() throws Exception {
// check that all directories got permissions:
// bin file: ro
- assertExactPermissions(new FilePermission(environment.binFile().toString(), "read,readlink"), permissions);
+ assertExactPermissions(new FilePermission(environment.binDir().toString(), "read,readlink"), permissions);
// lib file: ro
- assertExactPermissions(new FilePermission(environment.libFile().toString(), "read,readlink"), permissions);
+ assertExactPermissions(new FilePermission(environment.libDir().toString(), "read,readlink"), permissions);
// modules file: ro
- assertExactPermissions(new FilePermission(environment.modulesFile().toString(), "read,readlink"), permissions);
+ assertExactPermissions(new FilePermission(environment.modulesDir().toString(), "read,readlink"), permissions);
// config file: ro
- assertExactPermissions(new FilePermission(environment.configFile().toString(), "read,readlink"), permissions);
+ assertExactPermissions(new FilePermission(environment.configDir().toString(), "read,readlink"), permissions);
// plugins: ro
- assertExactPermissions(new FilePermission(environment.pluginsFile().toString(), "read,readlink"), permissions);
+ assertExactPermissions(new FilePermission(environment.pluginsDir().toString(), "read,readlink"), permissions);
// data paths: r/w
for (Path dataPath : environment.dataFiles()) {
assertExactPermissions(new FilePermission(dataPath.toString(), "read,readlink,write,delete"), permissions);
}
- assertExactPermissions(new FilePermission(environment.sharedDataFile().toString(), "read,readlink,write,delete"), permissions);
+ assertExactPermissions(new FilePermission(environment.sharedDataDir().toString(), "read,readlink,write,delete"), permissions);
// logs: r/w
- assertExactPermissions(new FilePermission(environment.logsFile().toString(), "read,readlink,write,delete"), permissions);
+ assertExactPermissions(new FilePermission(environment.logsDir().toString(), "read,readlink,write,delete"), permissions);
// temp dir: r/w
assertExactPermissions(new FilePermission(fakeTmpDir.toString(), "read,readlink,write,delete"), permissions);
// PID file: delete only (for the shutdown hook)
diff --git a/qa/mixed-cluster/src/test/java/org/opensearch/backwards/ExceptionIT.java b/qa/mixed-cluster/src/test/java/org/opensearch/backwards/ExceptionIT.java
index e0246870181c0..f85a94cc9f556 100644
--- a/qa/mixed-cluster/src/test/java/org/opensearch/backwards/ExceptionIT.java
+++ b/qa/mixed-cluster/src/test/java/org/opensearch/backwards/ExceptionIT.java
@@ -47,8 +47,13 @@ public void testOpensearchException() throws Exception {
private void logClusterNodes() throws IOException {
ObjectPath objectPath = ObjectPath.createFromResponse(client().performRequest(new Request("GET", "_nodes")));
Map nodes = objectPath.evaluate("nodes");
- String master = EntityUtils.toString(client().performRequest(new Request("GET", "_cat/master?h=id")).getEntity()).trim();
- logger.info("cluster discovered: master id='{}'", master);
+ // As of 2.0, 'GET _cat/master' API is deprecated to promote inclusive language.
+ // Allow the deprecation warning for the node running an older version.
+ // TODO: Replace the API with 'GET _cat/cluster_manager' when dropping compatibility with 1.x versions.
+ Request catRequest = new Request("GET", "_cat/master?h=id");
+ catRequest.setOptions(expectWarningsOnce("[GET /_cat/master] is deprecated! Use [GET /_cat/cluster_manager] instead."));
+ String clusterManager = EntityUtils.toString(client().performRequest(catRequest).getEntity()).trim();
+ logger.info("cluster discovered: cluster-manager id='{}'", clusterManager);
for (String id : nodes.keySet()) {
logger.info("{}: id='{}', name='{}', version={}",
objectPath.evaluate("nodes." + id + ".http.publish_address"),
diff --git a/qa/no-bootstrap-tests/src/test/java/org/opensearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/opensearch/bootstrap/SpawnerNoBootstrapTests.java
index 949369899dc82..c3c332aecfd4c 100644
--- a/qa/no-bootstrap-tests/src/test/java/org/opensearch/bootstrap/SpawnerNoBootstrapTests.java
+++ b/qa/no-bootstrap-tests/src/test/java/org/opensearch/bootstrap/SpawnerNoBootstrapTests.java
@@ -90,8 +90,8 @@ public void testNoControllerSpawn() throws IOException {
Environment environment = TestEnvironment.newEnvironment(settings);
// This plugin will NOT have a controller daemon
- Path plugin = environment.modulesFile().resolve("a_plugin");
- Files.createDirectories(environment.modulesFile());
+ Path plugin = environment.modulesDir().resolve("a_plugin");
+ Files.createDirectories(environment.modulesDir());
Files.createDirectories(plugin);
PluginTestUtil.writePluginProperties(
plugin,
@@ -113,8 +113,8 @@ public void testNoControllerSpawn() throws IOException {
* Two plugins - one with a controller daemon and one without.
*/
public void testControllerSpawn() throws Exception {
- assertControllerSpawns(Environment::pluginsFile, false);
- assertControllerSpawns(Environment::modulesFile, true);
+ assertControllerSpawns(Environment::pluginsDir, false);
+ assertControllerSpawns(Environment::modulesDir, true);
}
private void assertControllerSpawns(final Function pluginsDirFinder, boolean expectSpawn) throws Exception {
@@ -133,8 +133,8 @@ private void assertControllerSpawns(final Function pluginsDir
// this plugin will have a controller daemon
Path plugin = pluginsDirFinder.apply(environment).resolve("test_plugin");
- Files.createDirectories(environment.modulesFile());
- Files.createDirectories(environment.pluginsFile());
+ Files.createDirectories(environment.modulesDir());
+ Files.createDirectories(environment.pluginsDir());
Files.createDirectories(plugin);
PluginTestUtil.writePluginProperties(
plugin,
@@ -192,7 +192,7 @@ public void testControllerSpawnWithIncorrectDescriptor() throws IOException {
Environment environment = TestEnvironment.newEnvironment(settings);
- Path plugin = environment.modulesFile().resolve("test_plugin");
+ Path plugin = environment.modulesDir().resolve("test_plugin");
Files.createDirectories(plugin);
PluginTestUtil.writePluginProperties(
plugin,
diff --git a/qa/os/build.gradle b/qa/os/build.gradle
index 038e3d16745c3..92c5e4f154ad8 100644
--- a/qa/os/build.gradle
+++ b/qa/os/build.gradle
@@ -50,7 +50,7 @@ dependencies {
testImplementation "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}"
testImplementation "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
- testImplementation "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}"
+ testImplementation "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}"
}
tasks.named('forbiddenApisTest').configure {
diff --git a/qa/smoke-test-http/src/test/java/org/opensearch/http/DanglingIndicesRestIT.java b/qa/smoke-test-http/src/test/java/org/opensearch/http/DanglingIndicesRestIT.java
index d5dcde2492046..3b32ac40917e4 100644
--- a/qa/smoke-test-http/src/test/java/org/opensearch/http/DanglingIndicesRestIT.java
+++ b/qa/smoke-test-http/src/test/java/org/opensearch/http/DanglingIndicesRestIT.java
@@ -135,7 +135,7 @@ public void testDanglingIndicesCanBeImported() throws Exception {
importRequest.addParameter("accept_data_loss", "true");
// Ensure this parameter is accepted
importRequest.addParameter("timeout", "20s");
- importRequest.addParameter("master_timeout", "20s");
+ importRequest.addParameter("cluster_manager_timeout", "20s");
final Response importResponse = restClient.performRequest(importRequest);
assertThat(importResponse.getStatusLine().getStatusCode(), equalTo(ACCEPTED.getStatus()));
@@ -171,7 +171,7 @@ public void testDanglingIndicesCanBeDeleted() throws Exception {
deleteRequest.addParameter("accept_data_loss", "true");
// Ensure these parameters is accepted
deleteRequest.addParameter("timeout", "20s");
- deleteRequest.addParameter("master_timeout", "20s");
+ deleteRequest.addParameter("cluster_manager_timeout", "20s");
final Response deleteResponse = restClient.performRequest(deleteRequest);
assertThat(deleteResponse.getStatusLine().getStatusCode(), equalTo(ACCEPTED.getStatus()));
diff --git a/qa/wildfly/build.gradle b/qa/wildfly/build.gradle
index 7cb08a9de6f08..0cf2098c24b91 100644
--- a/qa/wildfly/build.gradle
+++ b/qa/wildfly/build.gradle
@@ -39,9 +39,9 @@ apply plugin: 'opensearch.internal-distribution-download'
testFixtures.useFixture()
dependencies {
- providedCompile 'javax.enterprise:cdi-api:1.2'
- providedCompile 'org.jboss.spec.javax.annotation:jboss-annotations-api_1.2_spec:1.0.0.Final'
- providedCompile 'org.jboss.spec.javax.ws.rs:jboss-jaxrs-api_2.0_spec:1.0.0.Final'
+ providedCompile 'javax.enterprise:cdi-api:2.0'
+ providedCompile 'org.jboss.spec.javax.annotation:jboss-annotations-api_1.2_spec:1.0.2.Final'
+ providedCompile 'org.jboss.spec.javax.ws.rs:jboss-jaxrs-api_2.0_spec:1.0.1.Final'
api('org.jboss.resteasy:resteasy-jackson2-provider:3.0.19.Final') {
exclude module: 'jackson-annotations'
exclude module: 'jackson-core'
@@ -50,7 +50,7 @@ dependencies {
}
api "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}"
api "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
- api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}"
+ api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}"
api "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:${versions.jackson}"
api "com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:${versions.jackson}"
api "com.fasterxml.jackson.module:jackson-module-jaxb-annotations:${versions.jackson}"
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json b/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json
index f7c0d69805caf..bb066cd131480 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json
@@ -26,23 +26,6 @@
"description":"Default index for items which don't provide one"
}
}
- },
- {
- "path":"/{index}/{type}/_bulk",
- "methods":[
- "POST",
- "PUT"
- ],
- "parts":{
- "index":{
- "type":"string",
- "description":"Default index for items which don't provide one"
- },
- "type":{
- "type":"string",
- "description":"Default document type for items which don't provide one"
- }
- }
}
]
},
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.allocation.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.allocation.json
index 7b3dc70b03c38..717c1c49808f6 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.allocation.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.allocation.json
@@ -55,7 +55,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"h":{
"type":"list",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.cluster_manager.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.cluster_manager.json
index c1084825546bf..cd96038ad0693 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.cluster_manager.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.cluster_manager.json
@@ -36,7 +36,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"h":{
"type":"list",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json
index a92189134f88f..2491ab309531d 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json
@@ -55,7 +55,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"h":{
"type":"list",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodeattrs.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodeattrs.json
index e688e23cab089..c8afa4cb17039 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodeattrs.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodeattrs.json
@@ -26,7 +26,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"h":{
"type":"list",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.pending_tasks.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.pending_tasks.json
index 36fa33be495cd..9c0edf8c53d90 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.pending_tasks.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.pending_tasks.json
@@ -26,7 +26,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"h":{
"type":"list",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.plugins.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.plugins.json
index d5346c6d9e7b4..0b5b39b01ee58 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.plugins.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.plugins.json
@@ -26,7 +26,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"h":{
"type":"list",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.repositories.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.repositories.json
index 84d9965907ff3..58960709a99bb 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.repositories.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.repositories.json
@@ -27,7 +27,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"h":{
"type":"list",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.segments.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.segments.json
index 472ef7fd22eee..5107353c7b14f 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.segments.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.segments.json
@@ -49,6 +49,18 @@
"pb"
]
},
+ "master_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
+ },
"h":{
"type":"list",
"description":"Comma-separated list of column names to display"
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.shards.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.shards.json
index a13c0f6bf6d4a..fab381a098e3f 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.shards.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.shards.json
@@ -55,7 +55,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"h":{
"type":"list",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.snapshots.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.snapshots.json
index 757c2cfbe7dc6..1320207abfe75 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.snapshots.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.snapshots.json
@@ -39,7 +39,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"h":{
"type":"list",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.templates.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.templates.json
index 53fc872b5dae2..d45593b7bb2c8 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.templates.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.templates.json
@@ -38,7 +38,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"h":{
"type":"list",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.thread_pool.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.thread_pool.json
index 710c297dbbe75..1165703490d1a 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.thread_pool.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.thread_pool.json
@@ -54,7 +54,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"h":{
"type":"list",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.delete_component_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.delete_component_template.json
index 9beea52c86b37..43e14ad0e2dd8 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.delete_component_template.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.delete_component_template.json
@@ -28,7 +28,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
}
}
}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_component_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_component_template.json
index ecf32f50c0a6c..aa4e395672ef3 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_component_template.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_component_template.json
@@ -30,7 +30,15 @@
"params":{
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"local":{
"type":"boolean",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json
index 6f91fbbedf5de..c60230dbc43b3 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json
@@ -22,7 +22,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"timeout":{
"type":"time",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json
index 894b141f2f3b3..b3fc958891dfe 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json
@@ -56,7 +56,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"timeout":{
"type":"time",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.pending_tasks.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.pending_tasks.json
index d940adf9aef5d..22cfbac7ff447 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.pending_tasks.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.pending_tasks.json
@@ -22,7 +22,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
}
}
}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.put_component_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.put_component_template.json
index abc83fb15f48a..05558bc7bfc50 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.put_component_template.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.put_component_template.json
@@ -34,7 +34,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
}
},
"body":{
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.put_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.put_settings.json
index f6b9a0863380e..1e36acc51544d 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.put_settings.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.put_settings.json
@@ -22,7 +22,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"timeout":{
"type":"time",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.reroute.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.reroute.json
index bcf2704110664..285da40dd0245 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.reroute.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.reroute.json
@@ -44,7 +44,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"timeout":{
"type":"time",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.state.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.state.json
index c17e5b073e361..b43ab901785bd 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.state.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.state.json
@@ -71,7 +71,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
},
"flat_settings":{
"type":"boolean",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/create.json b/rest-api-spec/src/main/resources/rest-api-spec/api/create.json
index 171f3da44d36d..767af84b82258 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/create.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/create.json
@@ -23,32 +23,6 @@
"description":"The name of the index"
}
}
- },
- {
- "path":"/{index}/{type}/{id}/_create",
- "methods":[
- "PUT",
- "POST"
- ],
- "parts":{
- "id":{
- "type":"string",
- "description":"Document ID"
- },
- "index":{
- "type":"string",
- "description":"The name of the index"
- },
- "type":{
- "type":"string",
- "description":"The type of the document",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
}
]
},
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.delete_dangling_index.json b/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.delete_dangling_index.json
index 1e3d74784591b..5d832fc794f4f 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.delete_dangling_index.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.delete_dangling_index.json
@@ -30,9 +30,17 @@
"type": "time",
"description": "Explicit operation timeout"
},
- "master_timeout": {
- "type": "time",
- "description": "Specify timeout for connection to master"
+ "master_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to master",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
}
}
}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.import_dangling_index.json b/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.import_dangling_index.json
index e9dce01a76727..5b056e1fa145f 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.import_dangling_index.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/dangling_indices.import_dangling_index.json
@@ -30,9 +30,17 @@
"type": "time",
"description": "Explicit operation timeout"
},
- "master_timeout": {
- "type": "time",
- "description": "Specify timeout for connection to master"
+ "master_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to master",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
}
}
}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete.json
index 0d82bca9d4173..76dceb455627f 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/delete.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete.json
@@ -22,31 +22,6 @@
"description":"The name of the index"
}
}
- },
- {
- "path":"/{index}/{type}/{id}",
- "methods":[
- "DELETE"
- ],
- "parts":{
- "id":{
- "type":"string",
- "description":"The document ID"
- },
- "index":{
- "type":"string",
- "description":"The name of the index"
- },
- "type":{
- "type":"string",
- "description":"The type of the document",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
}
]
},
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/exists_source.json b/rest-api-spec/src/main/resources/rest-api-spec/api/exists_source.json
index 143ee406025ce..bdbf818fb5d81 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/exists_source.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/exists_source.json
@@ -22,31 +22,6 @@
"description":"The name of the index"
}
}
- },
- {
- "path":"/{index}/{type}/{id}/_source",
- "methods":[
- "HEAD"
- ],
- "parts":{
- "id":{
- "type":"string",
- "description":"The document ID"
- },
- "index":{
- "type":"string",
- "description":"The name of the index"
- },
- "type":{
- "type":"string",
- "description":"The type of the document; deprecated and optional starting with 7.0",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
}
]
},
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json b/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json
index c7c393a6a1cba..7f630f7666f30 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json
@@ -23,32 +23,6 @@
"description":"The name of the index"
}
}
- },
- {
- "path":"/{index}/{type}/{id}/_explain",
- "methods":[
- "GET",
- "POST"
- ],
- "parts":{
- "id":{
- "type":"string",
- "description":"The document ID"
- },
- "index":{
- "type":"string",
- "description":"The name of the index"
- },
- "type":{
- "type":"string",
- "description":"The type of the document",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
}
]
},
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.add_block.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.add_block.json
index 7389fb1322824..af10b9f50091f 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.add_block.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.add_block.json
@@ -32,7 +32,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
},
"ignore_unavailable":{
"type":"boolean",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clone.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clone.json
index d3a249583bd84..b55d43371005f 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clone.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clone.json
@@ -31,9 +31,17 @@
"type" : "time",
"description" : "Explicit operation timeout"
},
- "master_timeout": {
- "type" : "time",
- "description" : "Specify timeout for connection to master"
+ "master_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to master",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
},
"wait_for_active_shards": {
"type" : "string",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.close.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.close.json
index f26c8e77a06a6..1182b73541f93 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.close.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.close.json
@@ -28,7 +28,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
},
"ignore_unavailable":{
"type":"boolean",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json
index 922183d628ac6..53ea4cbd80803 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json
@@ -32,7 +32,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
}
},
"body":{
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_alias.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_alias.json
index 13abf70ca739b..049a397c6b3e2 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_alias.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_alias.json
@@ -48,7 +48,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
}
}
}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_index_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_index_template.json
index d037b03dc5277..c74771ffe4b81 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_index_template.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_index_template.json
@@ -28,7 +28,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
}
}
}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_template.json
index ca484a73e99f9..74dbb1822b64a 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_template.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete_template.json
@@ -28,7 +28,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
}
}
}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_type.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_type.json
deleted file mode 100644
index c854d0e8fd841..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_type.json
+++ /dev/null
@@ -1,55 +0,0 @@
-{
- "indices.exists_type":{
- "documentation":{
- "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-types-exists.html",
- "description":"Returns information about whether a particular document type exists. (DEPRECATED)"
- },
- "stability":"stable",
- "url":{
- "paths":[
- {
- "path":"/{index}/_mapping/{type}",
- "methods":[
- "HEAD"
- ],
- "parts":{
- "index":{
- "type":"list",
- "description":"A comma-separated list of index names; use `_all` to check the types across all indices"
- },
- "type":{
- "type":"list",
- "description":"A comma-separated list of document types to check"
- }
- }
- }
- ]
- },
- "params":{
- "ignore_unavailable":{
- "type":"boolean",
- "description":"Whether specified concrete indices should be ignored when unavailable (missing or closed)"
- },
- "allow_no_indices":{
- "type":"boolean",
- "description":"Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)"
- },
- "expand_wildcards":{
- "type":"enum",
- "options":[
- "open",
- "closed",
- "hidden",
- "none",
- "all"
- ],
- "default":"open",
- "description":"Whether to expand wildcard expression to concrete indices that are open, closed or both."
- },
- "local":{
- "type":"boolean",
- "description":"Return local information, do not retrieve the state from master node (default: false)"
- }
- }
- }
-}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json
index 90a1274ecb059..0a43f6481d86d 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json
@@ -57,7 +57,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
}
}
}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_index_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_index_template.json
index 7ea6dd2944c79..fbd03f99d2547 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_index_template.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_index_template.json
@@ -34,7 +34,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"local":{
"type":"boolean",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json
index 24fd668069697..321bfaba4f941 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json
@@ -50,7 +50,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
},
"local":{
"type":"boolean",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json
index 68e325446d3dc..1bdaea01f87bf 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json
@@ -58,7 +58,15 @@
"params":{
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
},
"ignore_unavailable":{
"type":"boolean",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json
index 337016763ad0a..52aeb17913db4 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json
@@ -34,7 +34,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"local":{
"type":"boolean",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_alias.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_alias.json
index 603f24b665eb7..00767afbaec04 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_alias.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_alias.json
@@ -50,7 +50,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
}
},
"body":{
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_index_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_index_template.json
index 3f758e18737e2..a2ceb259a4376 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_index_template.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_index_template.json
@@ -35,7 +35,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
}
},
"body":{
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json
index 451cbccd8d329..c8b63d4e1cee1 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json
@@ -29,7 +29,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
},
"ignore_unavailable":{
"type":"boolean",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_settings.json
index 66fe23bab8ba2..ca245ec543da3 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_settings.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_settings.json
@@ -30,7 +30,15 @@
"params":{
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
},
"timeout":{
"type":"time",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json
index 75a328af929ef..3b1c230178bb8 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json
@@ -34,7 +34,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
}
},
"body":{
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json
index fef1f03d1c9a7..303b7c7b03c19 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json
@@ -48,7 +48,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
},
"wait_for_active_shards":{
"type":"string",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shrink.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shrink.json
index fd6d705d6a5fa..6bb09ee0019e1 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shrink.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shrink.json
@@ -35,9 +35,17 @@
"type" : "time",
"description" : "Explicit operation timeout"
},
- "master_timeout": {
- "type" : "time",
- "description" : "Specify timeout for connection to master"
+ "master_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to master",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
},
"wait_for_active_shards": {
"type" : "string",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.simulate_index_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.simulate_index_template.json
index 2b81572f0aaaf..0e42ba6028a9f 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.simulate_index_template.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.simulate_index_template.json
@@ -34,7 +34,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
}
},
"body":{
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.simulate_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.simulate_template.json
index 364547dd318a2..65b555082c3b1 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.simulate_template.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.simulate_template.json
@@ -40,7 +40,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
}
},
"body":{
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.split.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.split.json
index 02df3cdedf01f..d1b5a28c9ff0f 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.split.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.split.json
@@ -35,9 +35,17 @@
"type" : "time",
"description" : "Explicit operation timeout"
},
- "master_timeout": {
- "type" : "time",
- "description" : "Specify timeout for connection to master"
+ "master_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to master",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
},
"wait_for_active_shards": {
"type" : "string",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.stats.json
index 0a8960f2f9e89..382bb9efde0ff 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.stats.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.stats.json
@@ -118,10 +118,6 @@
],
"default":"indices"
},
- "types":{
- "type":"list",
- "description":"A comma-separated list of document types for the `indexing` index metric"
- },
"include_segment_file_sizes":{
"type":"boolean",
"description":"Whether to report the aggregated disk usage of each one of the Lucene index files (only applies if segment stats are requested)",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.update_aliases.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.update_aliases.json
index d4a222f2061c8..c31cb8fe59c0f 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.update_aliases.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.update_aliases.json
@@ -22,7 +22,15 @@
},
"master_timeout":{
"type":"time",
- "description":"Specify timeout for connection to master"
+ "description":"Specify timeout for connection to master",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To promote inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Specify timeout for connection to cluster-manager node"
}
},
"body":{
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.validate_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.validate_query.json
index 3becec003a9e6..cc0386ee3b972 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.validate_query.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.validate_query.json
@@ -26,28 +26,6 @@
"description":"A comma-separated list of index names to restrict the operation; use `_all` or empty string to perform the operation on all indices"
}
}
- },
- {
- "path":"/{index}/{type}/_validate/query",
- "methods":[
- "GET",
- "POST"
- ],
- "parts":{
- "index":{
- "type":"list",
- "description":"A comma-separated list of index names to restrict the operation; use `_all` or empty string to perform the operation on all indices"
- },
- "type":{
- "type":"list",
- "description":"A comma-separated list of document types to restrict the operation; leave empty to perform the operation on all types",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
}
]
},
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.cleanup_repository.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.cleanup_repository.json
index 727fe79176797..05eb3309b11e6 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.cleanup_repository.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.cleanup_repository.json
@@ -24,7 +24,15 @@
"params": {
"master_timeout": {
"type" : "time",
- "description" : "Explicit operation timeout for connection to master node"
+ "description" : "Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"timeout": {
"type" : "time",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.clone.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.clone.json
index 18122bc209b0e..c79460fc30a48 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.clone.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.clone.json
@@ -32,7 +32,15 @@
"params":{
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
}
},
"body":{
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.create.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.create.json
index da8cb9916f584..64aaeaef9d897 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.create.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.create.json
@@ -29,7 +29,15 @@
"params":{
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"wait_for_completion":{
"type":"boolean",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.create_repository.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.create_repository.json
index 431ac3c68c0bd..4965162bcd86c 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.create_repository.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.create_repository.json
@@ -25,7 +25,15 @@
"params":{
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"timeout":{
"type":"time",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.delete.json
index 30053cd5b94d3..2e21a08219942 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.delete.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.delete.json
@@ -28,7 +28,15 @@
"params":{
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
}
}
}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.delete_repository.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.delete_repository.json
index b60aeba83a329..3fc22f969784c 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.delete_repository.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.delete_repository.json
@@ -24,7 +24,15 @@
"params":{
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"timeout":{
"type":"time",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.get.json
index 20006f6f499b6..e084a997a61b1 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.get.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.get.json
@@ -28,7 +28,15 @@
"params":{
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"ignore_unavailable":{
"type":"boolean",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.get_repository.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.get_repository.json
index 8c91caa4fe81f..cf03bab18c03f 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.get_repository.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.get_repository.json
@@ -30,7 +30,15 @@
"params":{
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"local":{
"type":"boolean",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.restore.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.restore.json
index 697ea395dcc2b..87ab8117ec489 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.restore.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.restore.json
@@ -28,7 +28,15 @@
"params":{
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"wait_for_completion":{
"type":"boolean",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.status.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.status.json
index 70a7ba23ef506..4f22c24fd9a56 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.status.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.status.json
@@ -46,7 +46,15 @@
"params":{
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"ignore_unavailable":{
"type":"boolean",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.verify_repository.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.verify_repository.json
index de638c19d4a0b..865eb15d11310 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.verify_repository.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.verify_repository.json
@@ -24,7 +24,15 @@
"params":{
"master_timeout":{
"type":"time",
- "description":"Explicit operation timeout for connection to master node"
+ "description":"Explicit operation timeout for connection to master node",
+ "deprecated":{
+ "version":"2.0.0",
+ "description":"To support inclusive language, use 'cluster_manager_timeout' instead."
+ }
+ },
+ "cluster_manager_timeout":{
+ "type":"time",
+ "description":"Explicit operation timeout for connection to cluster-manager node"
},
"timeout":{
"type":"time",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update.json
index 81bc101600aeb..c8d1ed435756b 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/update.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update.json
@@ -22,31 +22,6 @@
"description":"The name of the index"
}
}
- },
- {
- "path":"/{index}/{type}/{id}/_update",
- "methods":[
- "POST"
- ],
- "parts":{
- "id":{
- "type":"string",
- "description":"Document ID"
- },
- "index":{
- "type":"string",
- "description":"The name of the index"
- },
- "type":{
- "type":"string",
- "description":"The type of the document",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
}
]
},
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/20_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/20_filtering.yml
index 3d20f1d0f7e52..b2c1e1e561933 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/20_filtering.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/20_filtering.yml
@@ -169,7 +169,7 @@ setup:
cluster.state:
metric: [ master_node, version ]
allowed_warnings:
- - 'Deprecated value [master_node] used for parameter [metric]. To promote inclusive language, please use [cluster_manager_node] instead. It will be unsupported in a future major version.'
+ - 'Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead.'
- match: { cluster_uuid: $cluster_uuid }
- is_true: master_node
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/10_basic.yml
index a4d1841ed7108..ca8342b2e91c2 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/10_basic.yml
@@ -31,6 +31,7 @@ setup:
- skip:
version: " - 7.3.99"
reason: index cloning was added in 7.4.0
+ features: allowed_warnings
# make it read-only
- do:
indices.put_settings:
@@ -46,6 +47,8 @@ setup:
# now we do the actual clone
- do:
+ allowed_warnings:
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
indices.clone:
index: "source"
target: "target"
@@ -94,9 +97,12 @@ setup:
- skip:
version: " - 7.3.99"
reason: index cloning was added in 7.4.0
+ features: allowed_warnings
# try to do an illegal clone with illegal number_of_shards
- do:
catch: /illegal_argument_exception/
+ allowed_warnings:
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
indices.clone:
index: "source"
target: "target"
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/20_source_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/20_source_mapping.yml
index 625f574fa73de..21c476c76965c 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/20_source_mapping.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/20_source_mapping.yml
@@ -3,6 +3,7 @@
- skip:
version: " - 7.3.99"
reason: index cloning was added in 7.4.0
+ features: allowed_warnings
# create index
- do:
indices.create:
@@ -50,6 +51,8 @@
# now we do the actual clone
- do:
+ allowed_warnings:
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
indices.clone:
index: "source"
target: "target"
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/30_copy_settings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/30_copy_settings.yml
index 503cc15609072..b0bd8056cb004 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/30_copy_settings.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/30_copy_settings.yml
@@ -3,7 +3,7 @@
- skip:
version: " - 7.3.99"
reason: index cloning was added in 7.4.0
- features: [arbitrary_key]
+ features: [arbitrary_key, allowed_warnings]
- do:
nodes.info:
@@ -36,6 +36,8 @@
# now we do an actual clone and copy settings
- do:
+ allowed_warnings:
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
indices.clone:
index: "source"
target: "copy-settings-target"
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.exists_template/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.exists_template/10_basic.yml
index 67592a013e8f1..c7892f58a6f59 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.exists_template/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.exists_template/10_basic.yml
@@ -6,6 +6,8 @@ setup:
ignore: [404]
---
"Test indices.exists_template":
+ - skip:
+ features: allowed_warnings
- do:
indices.exists_template:
@@ -23,6 +25,8 @@ setup:
number_of_replicas: 0
- do:
+ allowed_warnings:
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
indices.exists_template:
name: test
master_timeout: 1m
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yml
index 9becbd54a3773..32536f8f72650 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yml
@@ -72,8 +72,12 @@ setup:
---
"Get template with flat settings and master timeout":
+ - skip:
+ features: allowed_warnings
- do:
+ allowed_warnings:
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
indices.get_template:
name: test
flat_settings: true
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml
index a5b1cb8607b3a..032f061d8a160 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml
@@ -7,6 +7,9 @@
# which node is the one with the highest version and that is the only one that can safely
# be used to shrink the index.
+ - skip:
+ features: allowed_warnings
+
- do:
nodes.info:
node_id: data:true
@@ -53,6 +56,8 @@
# now we do the actual shrink
- do:
+ allowed_warnings:
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
indices.shrink:
index: "source"
target: "target"
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml
index dec0760fc6b19..8d08373208216 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml
@@ -3,7 +3,7 @@
- skip:
version: " - 6.9.99"
reason: expects warnings that pre-7.0.0 will not send
- features: [warnings, arbitrary_key]
+ features: [warnings, arbitrary_key, allowed_warnings]
- do:
nodes.info:
@@ -60,6 +60,8 @@
# now we do the actual shrink
- do:
+ allowed_warnings:
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
indices.shrink:
index: "source"
target: "target"
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml
index a744895c4ce38..33bcb18f8afb6 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml
@@ -47,6 +47,7 @@
index.merge.scheduler.max_thread_count: 2
allowed_warnings:
- "parameter [copy_settings] is deprecated and will be removed in 8.0.0"
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
- do:
cluster.health:
@@ -64,6 +65,8 @@
# now we do a actual shrink and copy settings (by default)
- do:
+ allowed_warnings:
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
indices.shrink:
index: "source"
target: "default-copy-settings-target"
@@ -91,6 +94,8 @@
# now we do a actual shrink and try to set no copy settings
- do:
catch: /illegal_argument_exception/
+ allowed_warnings:
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
indices.shrink:
index: "source"
target: "explicit-no-copy-settings-target"
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml
index 4ae1d0002a237..2432f47d4dca7 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml
@@ -29,6 +29,9 @@ setup:
---
"Split index via API":
+ - skip:
+ features: allowed_warnings
+
# make it read-only
- do:
indices.put_settings:
@@ -44,6 +47,8 @@ setup:
# now we do the actual split
- do:
+ allowed_warnings:
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
indices.split:
index: "source"
target: "target"
@@ -90,6 +95,8 @@ setup:
---
"Split from 1 to N":
+ - skip:
+ features: allowed_warnings
- do:
indices.create:
index: source_one_shard
@@ -131,6 +138,8 @@ setup:
# now we do the actual split from 1 to 5
- do:
+ allowed_warnings:
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
indices.split:
index: "source_one_shard"
target: "target"
@@ -176,9 +185,14 @@ setup:
---
"Create illegal split indices":
+ - skip:
+ features: allowed_warnings
+
# try to do an illegal split with number_of_routing_shards set
- do:
catch: /illegal_argument_exception/
+ allowed_warnings:
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
indices.split:
index: "source"
target: "target"
@@ -193,6 +207,8 @@ setup:
# try to do an illegal split with illegal number_of_shards
- do:
catch: /illegal_state_exception/
+ allowed_warnings:
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
indices.split:
index: "source"
target: "target"
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml
index c86e49aac0561..69061c6d0fbd4 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml
@@ -1,5 +1,8 @@
---
"Split index ignores target template mapping":
+ - skip:
+ features: allowed_warnings
+
# create index
- do:
indices.create:
@@ -48,6 +51,8 @@
# now we do the actual split
- do:
+ allowed_warnings:
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
indices.split:
index: "source"
target: "target"
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml
index 0ceacf1f064ca..5b7f8f9960774 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml
@@ -49,7 +49,7 @@
index.merge.scheduler.max_thread_count: 2
allowed_warnings:
- "parameter [copy_settings] is deprecated and will be removed in 8.0.0"
-
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
- do:
cluster.health:
@@ -67,6 +67,8 @@
# now we do a actual shrink and copy settings (by default)
- do:
+ allowed_warnings:
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
indices.split:
index: "source"
target: "default-copy-settings-target"
@@ -94,6 +96,8 @@
- do:
catch: /illegal_argument_exception/
+ allowed_warnings:
+ - "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead."
indices.split:
index: "source"
target: "explicit-no-copy-settings-target"
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml
index d0385ac0125f4..fd6bb7f96eb9d 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml
@@ -32,4 +32,3 @@
- is_true: tasks
- match: { tasks.0.headers.X-Opaque-Id: "That is me" }
-
diff --git a/sandbox/plugins/concurrent-search/build.gradle b/sandbox/plugins/concurrent-search/build.gradle
new file mode 100644
index 0000000000000..acc3cb5092cd8
--- /dev/null
+++ b/sandbox/plugins/concurrent-search/build.gradle
@@ -0,0 +1,42 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+apply plugin: 'opensearch.opensearchplugin'
+apply plugin: 'opensearch.yaml-rest-test'
+
+opensearchplugin {
+ name 'concurrent-search'
+ description 'The experimental plugin which implements concurrent search over Apache Lucene segments'
+ classname 'org.opensearch.search.ConcurrentSegmentSearchPlugin'
+ licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt')
+ noticeFile rootProject.file('NOTICE.txt')
+}
+
+yamlRestTest.enabled = false;
+testingConventions.enabled = false;
\ No newline at end of file
diff --git a/sandbox/plugins/concurrent-search/src/main/java/org/opensearch/search/ConcurrentSegmentSearchPlugin.java b/sandbox/plugins/concurrent-search/src/main/java/org/opensearch/search/ConcurrentSegmentSearchPlugin.java
new file mode 100644
index 0000000000000..da999e40f0f07
--- /dev/null
+++ b/sandbox/plugins/concurrent-search/src/main/java/org/opensearch/search/ConcurrentSegmentSearchPlugin.java
@@ -0,0 +1,53 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search;
+
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.util.concurrent.OpenSearchExecutors;
+import org.opensearch.plugins.Plugin;
+import org.opensearch.plugins.SearchPlugin;
+import org.opensearch.search.query.ConcurrentQueryPhaseSearcher;
+import org.opensearch.search.query.QueryPhaseSearcher;
+import org.opensearch.threadpool.ExecutorBuilder;
+import org.opensearch.threadpool.FixedExecutorBuilder;
+import org.opensearch.threadpool.ThreadPool;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+
+/**
+ * The experimental plugin which implements the concurrent search over Apache Lucene segments.
+ */
+public class ConcurrentSegmentSearchPlugin extends Plugin implements SearchPlugin {
+ private static final String INDEX_SEARCHER = "index_searcher";
+
+ /**
+ * Default constructor
+ */
+ public ConcurrentSegmentSearchPlugin() {}
+
+ @Override
+ public Optional getQueryPhaseSearcher() {
+ return Optional.of(new ConcurrentQueryPhaseSearcher());
+ }
+
+ @Override
+ public List> getExecutorBuilders(Settings settings) {
+ final int allocatedProcessors = OpenSearchExecutors.allocatedProcessors(settings);
+ return Collections.singletonList(
+ new FixedExecutorBuilder(settings, INDEX_SEARCHER, allocatedProcessors, 1000, "thread_pool." + INDEX_SEARCHER)
+ );
+ }
+
+ @Override
+ public Optional getIndexSearcherExecutorProvider() {
+ return Optional.of((ThreadPool threadPool) -> threadPool.executor(INDEX_SEARCHER));
+ }
+}
diff --git a/sandbox/plugins/concurrent-search/src/main/java/org/opensearch/search/package-info.java b/sandbox/plugins/concurrent-search/src/main/java/org/opensearch/search/package-info.java
new file mode 100644
index 0000000000000..041f914fab7d7
--- /dev/null
+++ b/sandbox/plugins/concurrent-search/src/main/java/org/opensearch/search/package-info.java
@@ -0,0 +1,12 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/**
+ * The implementation of the experimental plugin which implements the concurrent search over Apache Lucene segments.
+ */
+package org.opensearch.search;
diff --git a/sandbox/plugins/concurrent-search/src/main/java/org/opensearch/search/query/ConcurrentQueryPhaseSearcher.java b/sandbox/plugins/concurrent-search/src/main/java/org/opensearch/search/query/ConcurrentQueryPhaseSearcher.java
new file mode 100644
index 0000000000000..65f339838a40b
--- /dev/null
+++ b/sandbox/plugins/concurrent-search/src/main/java/org/opensearch/search/query/ConcurrentQueryPhaseSearcher.java
@@ -0,0 +1,119 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.query;
+
+import static org.opensearch.search.query.TopDocsCollectorContext.createTopDocsCollectorContext;
+
+import java.io.IOException;
+import java.util.LinkedList;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.CollectorManager;
+import org.apache.lucene.search.Query;
+import org.opensearch.search.internal.ContextIndexSearcher;
+import org.opensearch.search.internal.SearchContext;
+import org.opensearch.search.profile.query.ProfileCollectorManager;
+import org.opensearch.search.query.QueryPhase.DefaultQueryPhaseSearcher;
+import org.opensearch.search.query.QueryPhase.TimeExceededException;
+
+/**
+ * The implementation of the {@link QueryPhaseSearcher} which attempts to use concurrent
+ * search of Apache Lucene segments if it has been enabled.
+ */
+public class ConcurrentQueryPhaseSearcher extends DefaultQueryPhaseSearcher {
+ private static final Logger LOGGER = LogManager.getLogger(ConcurrentQueryPhaseSearcher.class);
+
+ /**
+ * Default constructor
+ */
+ public ConcurrentQueryPhaseSearcher() {}
+
+ @Override
+ protected boolean searchWithCollector(
+ SearchContext searchContext,
+ ContextIndexSearcher searcher,
+ Query query,
+ LinkedList collectors,
+ boolean hasFilterCollector,
+ boolean hasTimeout
+ ) throws IOException {
+ boolean couldUseConcurrentSegmentSearch = allowConcurrentSegmentSearch(searcher);
+
+ // TODO: support aggregations
+ if (searchContext.aggregations() != null) {
+ couldUseConcurrentSegmentSearch = false;
+ LOGGER.debug("Unable to use concurrent search over index segments (experimental): aggregations are present");
+ }
+
+ if (couldUseConcurrentSegmentSearch) {
+ LOGGER.debug("Using concurrent search over index segments (experimental)");
+ return searchWithCollectorManager(searchContext, searcher, query, collectors, hasFilterCollector, hasTimeout);
+ } else {
+ return super.searchWithCollector(searchContext, searcher, query, collectors, hasFilterCollector, hasTimeout);
+ }
+ }
+
+ private static boolean searchWithCollectorManager(
+ SearchContext searchContext,
+ ContextIndexSearcher searcher,
+ Query query,
+ LinkedList collectorContexts,
+ boolean hasFilterCollector,
+ boolean timeoutSet
+ ) throws IOException {
+ // create the top docs collector last when the other collectors are known
+ final TopDocsCollectorContext topDocsFactory = createTopDocsCollectorContext(searchContext, hasFilterCollector);
+ // add the top docs collector, the first collector context in the chain
+ collectorContexts.addFirst(topDocsFactory);
+
+ final QuerySearchResult queryResult = searchContext.queryResult();
+ final CollectorManager, ReduceableSearchResult> collectorManager;
+
+ // TODO: support aggregations in concurrent segment search flow
+ if (searchContext.aggregations() != null) {
+ throw new UnsupportedOperationException("The concurrent segment search does not support aggregations yet");
+ }
+
+ if (searchContext.getProfilers() != null) {
+ final ProfileCollectorManager extends Collector, ReduceableSearchResult> profileCollectorManager =
+ QueryCollectorManagerContext.createQueryCollectorManagerWithProfiler(collectorContexts);
+ searchContext.getProfilers().getCurrentQueryProfiler().setCollector(profileCollectorManager);
+ collectorManager = profileCollectorManager;
+ } else {
+ // Create multi collector manager instance
+ collectorManager = QueryCollectorManagerContext.createMultiCollectorManager(collectorContexts);
+ }
+
+ try {
+ final ReduceableSearchResult result = searcher.search(query, collectorManager);
+ result.reduce(queryResult);
+ } catch (EarlyTerminatingCollector.EarlyTerminationException e) {
+ queryResult.terminatedEarly(true);
+ } catch (TimeExceededException e) {
+ assert timeoutSet : "TimeExceededException thrown even though timeout wasn't set";
+ if (searchContext.request().allowPartialSearchResults() == false) {
+ // Can't rethrow TimeExceededException because not serializable
+ throw new QueryPhaseExecutionException(searchContext.shardTarget(), "Time exceeded");
+ }
+ queryResult.searchTimedOut(true);
+ }
+ if (searchContext.terminateAfter() != SearchContext.DEFAULT_TERMINATE_AFTER && queryResult.terminatedEarly() == null) {
+ queryResult.terminatedEarly(false);
+ }
+
+ return topDocsFactory.shouldRescore();
+ }
+
+ private static boolean allowConcurrentSegmentSearch(final ContextIndexSearcher searcher) {
+ return (searcher.getExecutor() != null);
+ }
+
+}
diff --git a/sandbox/plugins/concurrent-search/src/main/java/org/opensearch/search/query/package-info.java b/sandbox/plugins/concurrent-search/src/main/java/org/opensearch/search/query/package-info.java
new file mode 100644
index 0000000000000..0f98ae7682a84
--- /dev/null
+++ b/sandbox/plugins/concurrent-search/src/main/java/org/opensearch/search/query/package-info.java
@@ -0,0 +1,12 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/**
+ * {@link org.opensearch.search.query.QueryPhaseSearcher} implementation for concurrent search
+ */
+package org.opensearch.search.query;
diff --git a/sandbox/plugins/concurrent-search/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java b/sandbox/plugins/concurrent-search/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java
new file mode 100644
index 0000000000000..51cb3c8c0cddc
--- /dev/null
+++ b/sandbox/plugins/concurrent-search/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java
@@ -0,0 +1,316 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.profile.query;
+
+import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.StringField;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.Explanation;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.LRUQueryCache;
+import org.apache.lucene.search.LeafCollector;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.QueryCachingPolicy;
+import org.apache.lucene.search.QueryVisitor;
+import org.apache.lucene.search.ScoreMode;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.ScorerSupplier;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TotalHitCountCollector;
+import org.apache.lucene.search.Weight;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.tests.index.RandomIndexWriter;
+import org.apache.lucene.tests.search.RandomApproximationQuery;
+import org.apache.lucene.tests.util.TestUtil;
+import org.opensearch.core.internal.io.IOUtils;
+import org.opensearch.search.internal.ContextIndexSearcher;
+import org.opensearch.search.profile.ProfileResult;
+import org.opensearch.test.OpenSearchTestCase;
+import org.opensearch.threadpool.ThreadPool;
+import org.junit.After;
+import org.junit.Before;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.greaterThan;
+
+public class QueryProfilerTests extends OpenSearchTestCase {
+
+ private Directory dir;
+ private IndexReader reader;
+ private ContextIndexSearcher searcher;
+ private ExecutorService executor;
+
+ @ParametersFactory
+ public static Collection
*
- *
The blob store is written to and read from by master-eligible nodes and data nodes. All metadata related to a snapshot's
+ *
The blob store is written to and read from by cluster-manager-eligible nodes and data nodes. All metadata related to a snapshot's
* scope and health is written by the master node.
*
The data-nodes on the other hand, write the data for each individual shard but do not write any blobs outside of shard directories for
* shards that they hold the primary of. For each shard, the data-node holding the shard's primary writes the actual data in form of
diff --git a/server/src/main/java/org/opensearch/rest/BaseRestHandler.java b/server/src/main/java/org/opensearch/rest/BaseRestHandler.java
index 4ee209111bdcb..e16e385910d98 100644
--- a/server/src/main/java/org/opensearch/rest/BaseRestHandler.java
+++ b/server/src/main/java/org/opensearch/rest/BaseRestHandler.java
@@ -36,9 +36,12 @@
import org.apache.logging.log4j.Logger;
import org.apache.lucene.search.spell.LevenshteinDistance;
import org.apache.lucene.util.CollectionUtil;
+import org.opensearch.OpenSearchParseException;
+import org.opensearch.action.support.master.MasterNodeRequest;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.CheckedConsumer;
import org.opensearch.common.collect.Tuple;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Setting.Property;
import org.opensearch.plugins.ActionPlugin;
@@ -200,6 +203,34 @@ protected Set responseParams() {
return Collections.emptySet();
}
+ /**
+ * Parse the deprecated request parameter 'master_timeout', and add deprecated log if the parameter is used.
+ * It also validates whether the two parameters 'master_timeout' and 'cluster_manager_timeout' are not assigned together.
+ * The method is temporarily added in 2.0 duing applying inclusive language. Remove the method along with MASTER_ROLE.
+ * @param mnr the action request
+ * @param request the REST request to handle
+ * @param logger the logger that logs deprecation notices
+ * @param logMsgKeyPrefix the key prefix of a deprecation message to avoid duplicate messages.
+ */
+ public static void parseDeprecatedMasterTimeoutParameter(
+ MasterNodeRequest mnr,
+ RestRequest request,
+ DeprecationLogger logger,
+ String logMsgKeyPrefix
+ ) {
+ final String MASTER_TIMEOUT_DEPRECATED_MESSAGE =
+ "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead.";
+ final String DUPLICATE_PARAMETER_ERROR_MESSAGE =
+ "Please only use one of the request parameters [master_timeout, cluster_manager_timeout].";
+ if (request.hasParam("master_timeout")) {
+ logger.deprecate(logMsgKeyPrefix + "_master_timeout_parameter", MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ if (request.hasParam("cluster_manager_timeout")) {
+ throw new OpenSearchParseException(DUPLICATE_PARAMETER_ERROR_MESSAGE);
+ }
+ mnr.masterNodeTimeout(request.paramAsTime("master_timeout", mnr.masterNodeTimeout()));
+ }
+ }
+
public static class Wrapper extends BaseRestHandler {
protected final BaseRestHandler delegate;
diff --git a/server/src/main/java/org/opensearch/rest/RestRequest.java b/server/src/main/java/org/opensearch/rest/RestRequest.java
index e04d8faa8af39..7d11da7e122cd 100644
--- a/server/src/main/java/org/opensearch/rest/RestRequest.java
+++ b/server/src/main/java/org/opensearch/rest/RestRequest.java
@@ -54,7 +54,6 @@
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
@@ -579,32 +578,6 @@ public static XContentType parseContentType(List header) {
throw new IllegalArgumentException("empty Content-Type header");
}
- /**
- * The method is only used to validate whether the values of the 2 request parameters "master_timeout" and "cluster_manager_timeout" is the same value or not.
- * If the 2 values are not the same, throw an {@link OpenSearchParseException}.
- * @param keys Names of the request parameters.
- * @deprecated The method will be removed along with the request parameter "master_timeout".
- */
- @Deprecated
- public void validateParamValuesAreEqual(String... keys) {
- // Track the last seen value and ensure that every subsequent value matches it.
- // The value to be tracked is the non-empty values of the parameters with the key.
- String lastSeenValue = null;
- for (String key : keys) {
- String value = param(key);
- if (!Strings.isNullOrEmpty(value)) {
- if (lastSeenValue == null || value.equals(lastSeenValue)) {
- lastSeenValue = value;
- } else {
- throw new OpenSearchParseException(
- "The values of the request parameters: {} are required to be equal, otherwise please only assign value to one of the request parameters.",
- Arrays.toString(keys)
- );
- }
- }
- }
- }
-
public static class ContentTypeHeaderException extends RuntimeException {
ContentTypeHeaderException(final IllegalArgumentException cause) {
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestCleanupRepositoryAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestCleanupRepositoryAction.java
index 2371581cefccb..c568eae91a528 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestCleanupRepositoryAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestCleanupRepositoryAction.java
@@ -34,6 +34,7 @@
import org.opensearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryRequest;
import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -50,6 +51,8 @@
*/
public class RestCleanupRepositoryAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestCleanupRepositoryAction.class);
+
@Override
public List routes() {
return singletonList(new Route(POST, "/_snapshot/{repository}/_cleanup"));
@@ -64,7 +67,10 @@ public String getName() {
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
CleanupRepositoryRequest cleanupRepositoryRequest = cleanupRepositoryRequest(request.param("repository"));
cleanupRepositoryRequest.timeout(request.paramAsTime("timeout", cleanupRepositoryRequest.timeout()));
- cleanupRepositoryRequest.masterNodeTimeout(request.paramAsTime("master_timeout", cleanupRepositoryRequest.masterNodeTimeout()));
+ cleanupRepositoryRequest.masterNodeTimeout(
+ request.paramAsTime("cluster_manager_timeout", cleanupRepositoryRequest.masterNodeTimeout())
+ );
+ parseDeprecatedMasterTimeoutParameter(cleanupRepositoryRequest, request, deprecationLogger, getName());
return channel -> client.admin().cluster().cleanupRepository(cleanupRepositoryRequest, new RestToXContentListener<>(channel));
}
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestCloneSnapshotAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestCloneSnapshotAction.java
index 5c10c96c17227..abf1efa19f79b 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestCloneSnapshotAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestCloneSnapshotAction.java
@@ -35,6 +35,7 @@
import org.opensearch.action.admin.cluster.snapshots.clone.CloneSnapshotRequest;
import org.opensearch.action.support.IndicesOptions;
import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.xcontent.support.XContentMapValues;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
@@ -52,6 +53,8 @@
*/
public class RestCloneSnapshotAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestCloneSnapshotAction.class);
+
@Override
public List routes() {
return Collections.singletonList(new Route(PUT, "/_snapshot/{repository}/{snapshot}/_clone/{target_snapshot}"));
@@ -71,7 +74,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
request.param("target_snapshot"),
XContentMapValues.nodeStringArrayValue(source.getOrDefault("indices", Collections.emptyList()))
);
- cloneSnapshotRequest.masterNodeTimeout(request.paramAsTime("master_timeout", cloneSnapshotRequest.masterNodeTimeout()));
+ cloneSnapshotRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", cloneSnapshotRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(cloneSnapshotRequest, request, deprecationLogger, getName());
cloneSnapshotRequest.indicesOptions(IndicesOptions.fromMap(source, cloneSnapshotRequest.indicesOptions()));
return channel -> client.admin().cluster().cloneSnapshot(cloneSnapshotRequest, new RestToXContentListener<>(channel));
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java
index 003f1bec11d87..1238cd2a818d5 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java
@@ -38,6 +38,7 @@
import org.opensearch.client.Requests;
import org.opensearch.client.node.NodeClient;
import org.opensearch.cluster.ClusterState;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.ClusterSettings;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.settings.SettingsFilter;
@@ -59,6 +60,8 @@
public class RestClusterGetSettingsAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestClusterGetSettingsAction.class);
+
private final Settings settings;
private final ClusterSettings clusterSettings;
private final SettingsFilter settingsFilter;
@@ -84,7 +87,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
ClusterStateRequest clusterStateRequest = Requests.clusterStateRequest().routingTable(false).nodes(false);
final boolean renderDefaults = request.paramAsBoolean("include_defaults", false);
clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local()));
- clusterStateRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterStateRequest.masterNodeTimeout()));
+ clusterStateRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", clusterStateRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(clusterStateRequest, request, deprecationLogger, getName());
return channel -> client.admin().cluster().state(clusterStateRequest, new RestBuilderListener(channel) {
@Override
public RestResponse buildResponse(ClusterStateResponse response, XContentBuilder builder) throws Exception {
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterHealthAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterHealthAction.java
index 8b782e4399e73..877e387a15ec3 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterHealthAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterHealthAction.java
@@ -39,6 +39,7 @@
import org.opensearch.cluster.health.ClusterHealthStatus;
import org.opensearch.common.Priority;
import org.opensearch.common.Strings;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestStatusToXContentListener;
@@ -56,6 +57,8 @@
public class RestClusterHealthAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestClusterHealthAction.class);
+
@Override
public List routes() {
return unmodifiableList(asList(new Route(GET, "/_cluster/health"), new Route(GET, "/_cluster/health/{index}")));
@@ -81,7 +84,8 @@ public static ClusterHealthRequest fromRequest(final RestRequest request) {
final ClusterHealthRequest clusterHealthRequest = clusterHealthRequest(Strings.splitStringByCommaToArray(request.param("index")));
clusterHealthRequest.indicesOptions(IndicesOptions.fromRequest(request, clusterHealthRequest.indicesOptions()));
clusterHealthRequest.local(request.paramAsBoolean("local", clusterHealthRequest.local()));
- clusterHealthRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterHealthRequest.masterNodeTimeout()));
+ clusterHealthRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", clusterHealthRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(clusterHealthRequest, request, deprecationLogger, "cluster_health");
clusterHealthRequest.timeout(request.paramAsTime("timeout", clusterHealthRequest.timeout()));
String waitForStatus = request.param("wait_for_status");
if (waitForStatus != null) {
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterRerouteAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterRerouteAction.java
index f519da109ba09..9c0e09b7629e0 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterRerouteAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterRerouteAction.java
@@ -80,7 +80,7 @@ public RestClusterRerouteAction(SettingsFilter settingsFilter) {
}
// TODO: Remove the DeprecationLogger after removing MASTER_ROLE.
- // It's used to log deprecation when request parameter 'metric' contains 'master_node'.
+ // It's used to log deprecation when request parameter 'metric' contains 'master_node', or request parameter 'master_timeout' is used.
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestClusterRerouteAction.class);
private static final String DEPRECATED_MESSAGE_MASTER_NODE =
"Deprecated value [master_node] used for parameter [metric]. To promote inclusive language, please use [cluster_manager_node] instead. It will be unsupported in a future major version.";
@@ -143,7 +143,8 @@ public static ClusterRerouteRequest createRequest(RestRequest request) throws IO
clusterRerouteRequest.explain(request.paramAsBoolean("explain", clusterRerouteRequest.explain()));
clusterRerouteRequest.timeout(request.paramAsTime("timeout", clusterRerouteRequest.timeout()));
clusterRerouteRequest.setRetryFailed(request.paramAsBoolean("retry_failed", clusterRerouteRequest.isRetryFailed()));
- clusterRerouteRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterRerouteRequest.masterNodeTimeout()));
+ clusterRerouteRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", clusterRerouteRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(clusterRerouteRequest, request, deprecationLogger, "cluster_reroute");
request.applyContentParser(parser -> PARSER.parse(parser, clusterRerouteRequest, null));
return clusterRerouteRequest;
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterStateAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterStateAction.java
index 32aa055c18300..7f18a19b5cd54 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterStateAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterStateAction.java
@@ -73,7 +73,7 @@ public RestClusterStateAction(SettingsFilter settingsFilter) {
}
// TODO: Remove the DeprecationLogger after removing MASTER_ROLE.
- // It's used to log deprecation when request parameter 'metric' contains 'master_node'.
+ // It's used to log deprecation when request parameter 'metric' contains 'master_node', or request parameter 'master_timeout' is used.
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestClusterStateAction.class);
private static final String DEPRECATED_MESSAGE_MASTER_NODE =
"Deprecated value [master_node] used for parameter [metric]. To promote inclusive language, please use [cluster_manager_node] instead. It will be unsupported in a future major version.";
@@ -104,7 +104,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
final ClusterStateRequest clusterStateRequest = Requests.clusterStateRequest();
clusterStateRequest.indicesOptions(IndicesOptions.fromRequest(request, clusterStateRequest.indicesOptions()));
clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local()));
- clusterStateRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterStateRequest.masterNodeTimeout()));
+ clusterStateRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", clusterStateRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(clusterStateRequest, request, deprecationLogger, getName());
if (request.hasParam("wait_for_metadata_version")) {
clusterStateRequest.waitForMetadataVersion(request.paramAsLong("wait_for_metadata_version", 0));
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java
index bbe1bba70926f..c05cdc752b5f7 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java
@@ -35,6 +35,7 @@
import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.opensearch.client.Requests;
import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.rest.BaseRestHandler;
@@ -51,6 +52,8 @@
public class RestClusterUpdateSettingsAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestClusterUpdateSettingsAction.class);
+
private static final String PERSISTENT = "persistent";
private static final String TRANSIENT = "transient";
@@ -69,8 +72,9 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
final ClusterUpdateSettingsRequest clusterUpdateSettingsRequest = Requests.clusterUpdateSettingsRequest();
clusterUpdateSettingsRequest.timeout(request.paramAsTime("timeout", clusterUpdateSettingsRequest.timeout()));
clusterUpdateSettingsRequest.masterNodeTimeout(
- request.paramAsTime("master_timeout", clusterUpdateSettingsRequest.masterNodeTimeout())
+ request.paramAsTime("cluster_manager_timeout", clusterUpdateSettingsRequest.masterNodeTimeout())
);
+ parseDeprecatedMasterTimeoutParameter(clusterUpdateSettingsRequest, request, deprecationLogger, getName());
Map source;
try (XContentParser parser = request.contentParser()) {
source = parser.map();
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestCreateSnapshotAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestCreateSnapshotAction.java
index c53f1d0cd5637..b3503f0dfbb56 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestCreateSnapshotAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestCreateSnapshotAction.java
@@ -34,6 +34,7 @@
import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -52,6 +53,8 @@
*/
public class RestCreateSnapshotAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestCreateSnapshotAction.class);
+
@Override
public List routes() {
return unmodifiableList(
@@ -68,7 +71,8 @@ public String getName() {
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
CreateSnapshotRequest createSnapshotRequest = createSnapshotRequest(request.param("repository"), request.param("snapshot"));
request.applyContentParser(p -> createSnapshotRequest.source(p.mapOrdered()));
- createSnapshotRequest.masterNodeTimeout(request.paramAsTime("master_timeout", createSnapshotRequest.masterNodeTimeout()));
+ createSnapshotRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", createSnapshotRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(createSnapshotRequest, request, deprecationLogger, getName());
createSnapshotRequest.waitForCompletion(request.paramAsBoolean("wait_for_completion", false));
return channel -> client.admin().cluster().createSnapshot(createSnapshotRequest, new RestToXContentListener<>(channel));
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java
index 4f8771b5db171..5e53595e3a49b 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java
@@ -34,6 +34,7 @@
import org.opensearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest;
import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -50,6 +51,8 @@
*/
public class RestDeleteRepositoryAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestDeleteRepositoryAction.class);
+
@Override
public List routes() {
return singletonList(new Route(DELETE, "/_snapshot/{repository}"));
@@ -64,7 +67,10 @@ public String getName() {
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
DeleteRepositoryRequest deleteRepositoryRequest = deleteRepositoryRequest(request.param("repository"));
deleteRepositoryRequest.timeout(request.paramAsTime("timeout", deleteRepositoryRequest.timeout()));
- deleteRepositoryRequest.masterNodeTimeout(request.paramAsTime("master_timeout", deleteRepositoryRequest.masterNodeTimeout()));
+ deleteRepositoryRequest.masterNodeTimeout(
+ request.paramAsTime("cluster_manager_timeout", deleteRepositoryRequest.masterNodeTimeout())
+ );
+ parseDeprecatedMasterTimeoutParameter(deleteRepositoryRequest, request, deprecationLogger, getName());
return channel -> client.admin().cluster().deleteRepository(deleteRepositoryRequest, new RestToXContentListener<>(channel));
}
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestDeleteSnapshotAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestDeleteSnapshotAction.java
index 57b651215bc4f..891b84f8d0869 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestDeleteSnapshotAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestDeleteSnapshotAction.java
@@ -35,6 +35,7 @@
import org.opensearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.Strings;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -51,6 +52,8 @@
*/
public class RestDeleteSnapshotAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestDeleteSnapshotAction.class);
+
@Override
public List routes() {
return singletonList(new Route(DELETE, "/_snapshot/{repository}/{snapshot}"));
@@ -67,7 +70,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
request.param("repository"),
Strings.splitStringByCommaToArray(request.param("snapshot"))
);
- deleteSnapshotRequest.masterNodeTimeout(request.paramAsTime("master_timeout", deleteSnapshotRequest.masterNodeTimeout()));
+ deleteSnapshotRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", deleteSnapshotRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(deleteSnapshotRequest, request, deprecationLogger, getName());
return channel -> client.admin().cluster().deleteSnapshot(deleteSnapshotRequest, new RestToXContentListener<>(channel));
}
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestGetRepositoriesAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestGetRepositoriesAction.java
index 2a359fa08eb59..780d9266549ae 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestGetRepositoriesAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestGetRepositoriesAction.java
@@ -35,6 +35,7 @@
import org.opensearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.Strings;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.settings.SettingsFilter;
import org.opensearch.rest.BaseRestHandler;
@@ -55,6 +56,8 @@
*/
public class RestGetRepositoriesAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetRepositoriesAction.class);
+
private final SettingsFilter settingsFilter;
public RestGetRepositoriesAction(SettingsFilter settingsFilter) {
@@ -75,7 +78,10 @@ public List routes() {
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
final String[] repositories = request.paramAsStringArray("repository", Strings.EMPTY_ARRAY);
GetRepositoriesRequest getRepositoriesRequest = getRepositoryRequest(repositories);
- getRepositoriesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getRepositoriesRequest.masterNodeTimeout()));
+ getRepositoriesRequest.masterNodeTimeout(
+ request.paramAsTime("cluster_manager_timeout", getRepositoriesRequest.masterNodeTimeout())
+ );
+ parseDeprecatedMasterTimeoutParameter(getRepositoriesRequest, request, deprecationLogger, getName());
getRepositoriesRequest.local(request.paramAsBoolean("local", getRepositoriesRequest.local()));
settingsFilter.addFilterSettingParams(request);
return channel -> client.admin().cluster().getRepositories(getRepositoriesRequest, new RestToXContentListener<>(channel));
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestGetSnapshotsAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestGetSnapshotsAction.java
index 383369ce595c5..189795516adbd 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestGetSnapshotsAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestGetSnapshotsAction.java
@@ -35,6 +35,7 @@
import org.opensearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.Strings;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -51,6 +52,8 @@
*/
public class RestGetSnapshotsAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetSnapshotsAction.class);
+
@Override
public List routes() {
return singletonList(new Route(GET, "/_snapshot/{repository}/{snapshot}"));
@@ -69,7 +72,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
GetSnapshotsRequest getSnapshotsRequest = getSnapshotsRequest(repository).snapshots(snapshots);
getSnapshotsRequest.ignoreUnavailable(request.paramAsBoolean("ignore_unavailable", getSnapshotsRequest.ignoreUnavailable()));
getSnapshotsRequest.verbose(request.paramAsBoolean("verbose", getSnapshotsRequest.verbose()));
- getSnapshotsRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getSnapshotsRequest.masterNodeTimeout()));
+ getSnapshotsRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", getSnapshotsRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(getSnapshotsRequest, request, deprecationLogger, getName());
return channel -> client.admin().cluster().getSnapshots(getSnapshotsRequest, new RestToXContentListener<>(channel));
}
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestPendingClusterTasksAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestPendingClusterTasksAction.java
index de80054a9afb2..155adc8cc7e19 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestPendingClusterTasksAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestPendingClusterTasksAction.java
@@ -34,6 +34,7 @@
import org.opensearch.action.admin.cluster.tasks.PendingClusterTasksRequest;
import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -46,6 +47,8 @@
public class RestPendingClusterTasksAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestPendingClusterTasksAction.class);
+
@Override
public List routes() {
return singletonList(new Route(GET, "/_cluster/pending_tasks"));
@@ -59,7 +62,10 @@ public String getName() {
@Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
PendingClusterTasksRequest pendingClusterTasksRequest = new PendingClusterTasksRequest();
- pendingClusterTasksRequest.masterNodeTimeout(request.paramAsTime("master_timeout", pendingClusterTasksRequest.masterNodeTimeout()));
+ pendingClusterTasksRequest.masterNodeTimeout(
+ request.paramAsTime("cluster_manager_timeout", pendingClusterTasksRequest.masterNodeTimeout())
+ );
+ parseDeprecatedMasterTimeoutParameter(pendingClusterTasksRequest, request, deprecationLogger, getName());
pendingClusterTasksRequest.local(request.paramAsBoolean("local", pendingClusterTasksRequest.local()));
return channel -> client.admin().cluster().pendingClusterTasks(pendingClusterTasksRequest, new RestToXContentListener<>(channel));
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestPutRepositoryAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestPutRepositoryAction.java
index afc4a142b689a..a1f34294d630d 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestPutRepositoryAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestPutRepositoryAction.java
@@ -34,6 +34,7 @@
import org.opensearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
@@ -53,6 +54,8 @@
*/
public class RestPutRepositoryAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestPutRepositoryAction.class);
+
@Override
public List routes() {
return unmodifiableList(asList(new Route(POST, "/_snapshot/{repository}"), new Route(PUT, "/_snapshot/{repository}")));
@@ -70,7 +73,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
putRepositoryRequest.source(parser.mapOrdered());
}
putRepositoryRequest.verify(request.paramAsBoolean("verify", true));
- putRepositoryRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putRepositoryRequest.masterNodeTimeout()));
+ putRepositoryRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", putRepositoryRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(putRepositoryRequest, request, deprecationLogger, getName());
putRepositoryRequest.timeout(request.paramAsTime("timeout", putRepositoryRequest.timeout()));
return channel -> client.admin().cluster().putRepository(putRepositoryRequest, new RestToXContentListener<>(channel));
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestRestoreSnapshotAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestRestoreSnapshotAction.java
index 4f6032d58b633..6c607b48a89fb 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestRestoreSnapshotAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestRestoreSnapshotAction.java
@@ -34,6 +34,7 @@
import org.opensearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest;
import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -50,6 +51,8 @@
*/
public class RestRestoreSnapshotAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestRestoreSnapshotAction.class);
+
@Override
public List routes() {
return singletonList(new Route(POST, "/_snapshot/{repository}/{snapshot}/_restore"));
@@ -63,7 +66,10 @@ public String getName() {
@Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
RestoreSnapshotRequest restoreSnapshotRequest = restoreSnapshotRequest(request.param("repository"), request.param("snapshot"));
- restoreSnapshotRequest.masterNodeTimeout(request.paramAsTime("master_timeout", restoreSnapshotRequest.masterNodeTimeout()));
+ restoreSnapshotRequest.masterNodeTimeout(
+ request.paramAsTime("cluster_manager_timeout", restoreSnapshotRequest.masterNodeTimeout())
+ );
+ parseDeprecatedMasterTimeoutParameter(restoreSnapshotRequest, request, deprecationLogger, getName());
restoreSnapshotRequest.waitForCompletion(request.paramAsBoolean("wait_for_completion", false));
request.applyContentParser(p -> restoreSnapshotRequest.source(p.mapOrdered()));
return channel -> client.admin().cluster().restoreSnapshot(restoreSnapshotRequest, new RestToXContentListener<>(channel));
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestSnapshotsStatusAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestSnapshotsStatusAction.java
index a0db762a68c19..469c1eac348d1 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestSnapshotsStatusAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestSnapshotsStatusAction.java
@@ -35,6 +35,7 @@
import org.opensearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.Strings;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -52,6 +53,8 @@
*/
public class RestSnapshotsStatusAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestSnapshotsStatusAction.class);
+
@Override
public List routes() {
return unmodifiableList(
@@ -78,7 +81,10 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
SnapshotsStatusRequest snapshotsStatusRequest = snapshotsStatusRequest(repository).snapshots(snapshots);
snapshotsStatusRequest.ignoreUnavailable(request.paramAsBoolean("ignore_unavailable", snapshotsStatusRequest.ignoreUnavailable()));
- snapshotsStatusRequest.masterNodeTimeout(request.paramAsTime("master_timeout", snapshotsStatusRequest.masterNodeTimeout()));
+ snapshotsStatusRequest.masterNodeTimeout(
+ request.paramAsTime("cluster_manager_timeout", snapshotsStatusRequest.masterNodeTimeout())
+ );
+ parseDeprecatedMasterTimeoutParameter(snapshotsStatusRequest, request, deprecationLogger, getName());
return channel -> client.admin().cluster().snapshotsStatus(snapshotsStatusRequest, new RestToXContentListener<>(channel));
}
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestVerifyRepositoryAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestVerifyRepositoryAction.java
index 735325a69c2ed..4792d0e6db148 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestVerifyRepositoryAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestVerifyRepositoryAction.java
@@ -34,6 +34,7 @@
import org.opensearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -47,6 +48,8 @@
public class RestVerifyRepositoryAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestVerifyRepositoryAction.class);
+
@Override
public List routes() {
return singletonList(new Route(POST, "/_snapshot/{repository}/_verify"));
@@ -60,7 +63,10 @@ public String getName() {
@Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
VerifyRepositoryRequest verifyRepositoryRequest = verifyRepositoryRequest(request.param("repository"));
- verifyRepositoryRequest.masterNodeTimeout(request.paramAsTime("master_timeout", verifyRepositoryRequest.masterNodeTimeout()));
+ verifyRepositoryRequest.masterNodeTimeout(
+ request.paramAsTime("cluster_manager_timeout", verifyRepositoryRequest.masterNodeTimeout())
+ );
+ parseDeprecatedMasterTimeoutParameter(verifyRepositoryRequest, request, deprecationLogger, getName());
verifyRepositoryRequest.timeout(request.paramAsTime("timeout", verifyRepositoryRequest.timeout()));
return channel -> client.admin().cluster().verifyRepository(verifyRepositoryRequest, new RestToXContentListener<>(channel));
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/dangling/RestDeleteDanglingIndexAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/dangling/RestDeleteDanglingIndexAction.java
index 8024d97743cc8..3e8883058d18e 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/dangling/RestDeleteDanglingIndexAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/dangling/RestDeleteDanglingIndexAction.java
@@ -35,6 +35,7 @@
import org.opensearch.action.admin.indices.dangling.delete.DeleteDanglingIndexRequest;
import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.RestStatus;
@@ -49,6 +50,8 @@
public class RestDeleteDanglingIndexAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestDeleteDanglingIndexAction.class);
+
@Override
public List routes() {
return singletonList(new Route(DELETE, "/_dangling/{index_uuid}"));
@@ -67,7 +70,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, NodeClient
);
deleteRequest.timeout(request.paramAsTime("timeout", deleteRequest.timeout()));
- deleteRequest.masterNodeTimeout(request.paramAsTime("master_timeout", deleteRequest.masterNodeTimeout()));
+ deleteRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", deleteRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(deleteRequest, request, deprecationLogger, getName());
return channel -> client.admin()
.cluster()
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/dangling/RestImportDanglingIndexAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/dangling/RestImportDanglingIndexAction.java
index b7ba7361c2980..5a48159bd9651 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/dangling/RestImportDanglingIndexAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/dangling/RestImportDanglingIndexAction.java
@@ -42,12 +42,15 @@
import org.opensearch.action.admin.indices.dangling.import_index.ImportDanglingIndexRequest;
import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.RestStatus;
import org.opensearch.rest.action.RestToXContentListener;
public class RestImportDanglingIndexAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestImportDanglingIndexAction.class);
+
@Override
public List routes() {
return singletonList(new Route(POST, "/_dangling/{index_uuid}"));
@@ -66,7 +69,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, NodeClient
);
importRequest.timeout(request.paramAsTime("timeout", importRequest.timeout()));
- importRequest.masterNodeTimeout(request.paramAsTime("master_timeout", importRequest.masterNodeTimeout()));
+ importRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", importRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(importRequest, request, deprecationLogger, getName());
return channel -> client.admin()
.cluster()
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestAddIndexBlockAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestAddIndexBlockAction.java
index 89faeb1b7c7d9..6854662e3bb18 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestAddIndexBlockAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestAddIndexBlockAction.java
@@ -37,6 +37,7 @@
import org.opensearch.client.node.NodeClient;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.Strings;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -49,6 +50,8 @@
public class RestAddIndexBlockAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestAddIndexBlockAction.class);
+
@Override
public List routes() {
return Collections.singletonList(new Route(PUT, "/{index}/_block/{block}"));
@@ -65,7 +68,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
IndexMetadata.APIBlock.fromName(request.param("block")),
Strings.splitStringByCommaToArray(request.param("index"))
);
- addIndexBlockRequest.masterNodeTimeout(request.paramAsTime("master_timeout", addIndexBlockRequest.masterNodeTimeout()));
+ addIndexBlockRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", addIndexBlockRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(addIndexBlockRequest, request, deprecationLogger, getName());
addIndexBlockRequest.timeout(request.paramAsTime("timeout", addIndexBlockRequest.timeout()));
addIndexBlockRequest.indicesOptions(IndicesOptions.fromRequest(request, addIndexBlockRequest.indicesOptions()));
return channel -> client.admin().indices().addBlock(addIndexBlockRequest, new RestToXContentListener<>(channel));
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCloseIndexAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCloseIndexAction.java
index 8da0ec3c5a349..168d553112fe1 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCloseIndexAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCloseIndexAction.java
@@ -37,6 +37,7 @@
import org.opensearch.action.support.IndicesOptions;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.Strings;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -50,6 +51,8 @@
public class RestCloseIndexAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestCloseIndexAction.class);
+
@Override
public List routes() {
return unmodifiableList(asList(new Route(POST, "/_close"), new Route(POST, "/{index}/_close")));
@@ -63,7 +66,8 @@ public String getName() {
@Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
CloseIndexRequest closeIndexRequest = new CloseIndexRequest(Strings.splitStringByCommaToArray(request.param("index")));
- closeIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", closeIndexRequest.masterNodeTimeout()));
+ closeIndexRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", closeIndexRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(closeIndexRequest, request, deprecationLogger, getName());
closeIndexRequest.timeout(request.paramAsTime("timeout", closeIndexRequest.timeout()));
closeIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, closeIndexRequest.indicesOptions()));
String waitForActiveShards = request.param("wait_for_active_shards");
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCreateIndexAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCreateIndexAction.java
index 5b628bc094c41..54199b8e68516 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCreateIndexAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCreateIndexAction.java
@@ -35,6 +35,7 @@
import org.opensearch.action.admin.indices.create.CreateIndexRequest;
import org.opensearch.action.support.ActiveShardCount;
import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.xcontent.LoggingDeprecationHandler;
import org.opensearch.common.xcontent.XContentHelper;
import org.opensearch.index.mapper.MapperService;
@@ -53,6 +54,8 @@
public class RestCreateIndexAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestIndexPutAliasAction.class);
+
@Override
public List routes() {
return singletonList(new Route(PUT, "/{index}"));
@@ -74,7 +77,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
}
createIndexRequest.timeout(request.paramAsTime("timeout", createIndexRequest.timeout()));
- createIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", createIndexRequest.masterNodeTimeout()));
+ createIndexRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", createIndexRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(createIndexRequest, request, deprecationLogger, getName());
createIndexRequest.waitForActiveShards(ActiveShardCount.parseString(request.param("wait_for_active_shards")));
return channel -> client.admin().indices().create(createIndexRequest, new RestToXContentListener<>(channel));
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestDeleteComponentTemplateAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestDeleteComponentTemplateAction.java
index e4f0347192dbe..05656cb250596 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestDeleteComponentTemplateAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestDeleteComponentTemplateAction.java
@@ -34,6 +34,7 @@
import org.opensearch.action.admin.indices.template.delete.DeleteComponentTemplateAction;
import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -46,6 +47,8 @@
public class RestDeleteComponentTemplateAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestDeleteComponentTemplateAction.class);
+
@Override
public List routes() {
return Collections.singletonList(new Route(DELETE, "/_component_template/{name}"));
@@ -60,7 +63,8 @@ public String getName() {
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
DeleteComponentTemplateAction.Request deleteReq = new DeleteComponentTemplateAction.Request(request.param("name"));
- deleteReq.masterNodeTimeout(request.paramAsTime("master_timeout", deleteReq.masterNodeTimeout()));
+ deleteReq.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", deleteReq.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(deleteReq, request, deprecationLogger, getName());
return channel -> client.execute(DeleteComponentTemplateAction.INSTANCE, deleteReq, new RestToXContentListener<>(channel));
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestDeleteComposableIndexTemplateAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestDeleteComposableIndexTemplateAction.java
index f5832b4167852..b08288593515b 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestDeleteComposableIndexTemplateAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestDeleteComposableIndexTemplateAction.java
@@ -34,6 +34,7 @@
import org.opensearch.action.admin.indices.template.delete.DeleteComposableIndexTemplateAction;
import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -46,6 +47,8 @@
public class RestDeleteComposableIndexTemplateAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestDeleteComposableIndexTemplateAction.class);
+
@Override
public List routes() {
return Collections.singletonList(new Route(DELETE, "/_index_template/{name}"));
@@ -60,7 +63,8 @@ public String getName() {
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
DeleteComposableIndexTemplateAction.Request deleteReq = new DeleteComposableIndexTemplateAction.Request(request.param("name"));
- deleteReq.masterNodeTimeout(request.paramAsTime("master_timeout", deleteReq.masterNodeTimeout()));
+ deleteReq.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", deleteReq.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(deleteReq, request, deprecationLogger, getName());
return channel -> client.execute(DeleteComposableIndexTemplateAction.INSTANCE, deleteReq, new RestToXContentListener<>(channel));
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestDeleteIndexAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestDeleteIndexAction.java
index e8b5caf8f234a..63fa8a4b29d85 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestDeleteIndexAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestDeleteIndexAction.java
@@ -36,6 +36,7 @@
import org.opensearch.action.support.IndicesOptions;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.Strings;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -49,6 +50,8 @@
public class RestDeleteIndexAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestDeleteIndexAction.class);
+
@Override
public List routes() {
return unmodifiableList(asList(new Route(DELETE, "/"), new Route(DELETE, "/{index}")));
@@ -63,7 +66,8 @@ public String getName() {
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(Strings.splitStringByCommaToArray(request.param("index")));
deleteIndexRequest.timeout(request.paramAsTime("timeout", deleteIndexRequest.timeout()));
- deleteIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", deleteIndexRequest.masterNodeTimeout()));
+ deleteIndexRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", deleteIndexRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(deleteIndexRequest, request, deprecationLogger, getName());
deleteIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, deleteIndexRequest.indicesOptions()));
return channel -> client.admin().indices().delete(deleteIndexRequest, new RestToXContentListener<>(channel));
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestDeleteIndexTemplateAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestDeleteIndexTemplateAction.java
index 51040082c2c47..ce49332a4abbd 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestDeleteIndexTemplateAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestDeleteIndexTemplateAction.java
@@ -33,6 +33,7 @@
import org.opensearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest;
import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -45,6 +46,8 @@
public class RestDeleteIndexTemplateAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestDeleteIndexTemplateAction.class);
+
@Override
public List routes() {
return singletonList(new Route(DELETE, "/_template/{name}"));
@@ -58,7 +61,10 @@ public String getName() {
@Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
DeleteIndexTemplateRequest deleteIndexTemplateRequest = new DeleteIndexTemplateRequest(request.param("name"));
- deleteIndexTemplateRequest.masterNodeTimeout(request.paramAsTime("master_timeout", deleteIndexTemplateRequest.masterNodeTimeout()));
+ deleteIndexTemplateRequest.masterNodeTimeout(
+ request.paramAsTime("cluster_manager_timeout", deleteIndexTemplateRequest.masterNodeTimeout())
+ );
+ parseDeprecatedMasterTimeoutParameter(deleteIndexTemplateRequest, request, deprecationLogger, getName());
return channel -> client.admin().indices().deleteTemplate(deleteIndexTemplateRequest, new RestToXContentListener<>(channel));
}
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetComponentTemplateAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetComponentTemplateAction.java
index 8a85a66eef635..e245c5a20848f 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetComponentTemplateAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetComponentTemplateAction.java
@@ -34,6 +34,7 @@
import org.opensearch.action.admin.indices.template.get.GetComponentTemplateAction;
import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
@@ -52,6 +53,8 @@
public class RestGetComponentTemplateAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetComponentTemplateAction.class);
+
@Override
public List routes() {
return Arrays.asList(
@@ -72,7 +75,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
final GetComponentTemplateAction.Request getRequest = new GetComponentTemplateAction.Request(request.param("name"));
getRequest.local(request.paramAsBoolean("local", getRequest.local()));
- getRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getRequest.masterNodeTimeout()));
+ getRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", getRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(getRequest, request, deprecationLogger, getName());
final boolean implicitAll = getRequest.name() == null;
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetComposableIndexTemplateAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetComposableIndexTemplateAction.java
index 684198c6799f0..d860e8856571e 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetComposableIndexTemplateAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetComposableIndexTemplateAction.java
@@ -34,6 +34,7 @@
import org.opensearch.action.admin.indices.template.get.GetComposableIndexTemplateAction;
import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
@@ -52,6 +53,8 @@
public class RestGetComposableIndexTemplateAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetComposableIndexTemplateAction.class);
+
@Override
public List routes() {
return Arrays.asList(
@@ -71,7 +74,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
final GetComposableIndexTemplateAction.Request getRequest = new GetComposableIndexTemplateAction.Request(request.param("name"));
getRequest.local(request.paramAsBoolean("local", getRequest.local()));
- getRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getRequest.masterNodeTimeout()));
+ getRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", getRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(getRequest, request, deprecationLogger, getName());
final boolean implicitAll = getRequest.name() == null;
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndexTemplateAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndexTemplateAction.java
index 71e7ed098cf8d..cda29d2b0ad47 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndexTemplateAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndexTemplateAction.java
@@ -36,6 +36,7 @@
import org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.Strings;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
@@ -58,6 +59,8 @@
*/
public class RestGetIndexTemplateAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetIndexTemplateAction.class);
+
@Override
public List routes() {
return unmodifiableList(
@@ -76,7 +79,10 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
final GetIndexTemplatesRequest getIndexTemplatesRequest = new GetIndexTemplatesRequest(names);
getIndexTemplatesRequest.local(request.paramAsBoolean("local", getIndexTemplatesRequest.local()));
- getIndexTemplatesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getIndexTemplatesRequest.masterNodeTimeout()));
+ getIndexTemplatesRequest.masterNodeTimeout(
+ request.paramAsTime("cluster_manager_timeout", getIndexTemplatesRequest.masterNodeTimeout())
+ );
+ parseDeprecatedMasterTimeoutParameter(getIndexTemplatesRequest, request, deprecationLogger, getName());
final boolean implicitAll = getIndexTemplatesRequest.names().length == 0;
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndicesAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndicesAction.java
index 37c8162c6d31b..c94d691d4e99b 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndicesAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndicesAction.java
@@ -36,6 +36,7 @@
import org.opensearch.action.support.IndicesOptions;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.Strings;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
@@ -55,6 +56,8 @@
*/
public class RestGetIndicesAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetIndicesAction.class);
+
@Override
public List routes() {
return unmodifiableList(asList(new Route(GET, "/{index}"), new Route(HEAD, "/{index}")));
@@ -72,7 +75,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
getIndexRequest.indices(indices);
getIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, getIndexRequest.indicesOptions()));
getIndexRequest.local(request.paramAsBoolean("local", getIndexRequest.local()));
- getIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getIndexRequest.masterNodeTimeout()));
+ getIndexRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", getIndexRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(getIndexRequest, request, deprecationLogger, getName());
getIndexRequest.humanReadable(request.paramAsBoolean("human", false));
getIndexRequest.includeDefaults(request.paramAsBoolean("include_defaults", false));
return channel -> client.admin().indices().getIndex(getIndexRequest, new RestToXContentListener<>(channel));
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetMappingAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetMappingAction.java
index f4f33905408e7..86bf6b626c24a 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetMappingAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetMappingAction.java
@@ -32,6 +32,7 @@
package org.opensearch.rest.action.admin.indices;
+import org.opensearch.OpenSearchParseException;
import org.opensearch.OpenSearchTimeoutException;
import org.opensearch.action.ActionRunnable;
import org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest;
@@ -39,6 +40,7 @@
import org.opensearch.action.support.IndicesOptions;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.Strings;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.rest.BaseRestHandler;
@@ -59,6 +61,12 @@
public class RestGetMappingAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetMappingAction.class);
+ private static final String MASTER_TIMEOUT_DEPRECATED_MESSAGE =
+ "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead.";
+ private static final String DUPLICATE_PARAMETER_ERROR_MESSAGE =
+ "Please only use one of the request parameters [master_timeout, cluster_manager_timeout].";
+
private final ThreadPool threadPool;
public RestGetMappingAction(ThreadPool threadPool) {
@@ -89,7 +97,16 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
final GetMappingsRequest getMappingsRequest = new GetMappingsRequest();
getMappingsRequest.indices(indices);
getMappingsRequest.indicesOptions(IndicesOptions.fromRequest(request, getMappingsRequest.indicesOptions()));
- final TimeValue timeout = request.paramAsTime("master_timeout", getMappingsRequest.masterNodeTimeout());
+ TimeValue clusterManagerTimeout = request.paramAsTime("cluster_manager_timeout", getMappingsRequest.masterNodeTimeout());
+ // TODO: Remove the if condition and statements inside after removing MASTER_ROLE.
+ if (request.hasParam("master_timeout")) {
+ deprecationLogger.deprecate("get_mapping_master_timeout_parameter", MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ if (request.hasParam("cluster_manager_timeout")) {
+ throw new OpenSearchParseException(DUPLICATE_PARAMETER_ERROR_MESSAGE);
+ }
+ clusterManagerTimeout = request.paramAsTime("master_timeout", getMappingsRequest.masterNodeTimeout());
+ }
+ final TimeValue timeout = clusterManagerTimeout;
getMappingsRequest.masterNodeTimeout(timeout);
getMappingsRequest.local(request.paramAsBoolean("local", getMappingsRequest.local()));
return channel -> client.admin().indices().getMappings(getMappingsRequest, new RestActionListener(channel) {
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetSettingsAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetSettingsAction.java
index 589bdfdbe79dd..293078b3568e3 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetSettingsAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetSettingsAction.java
@@ -36,6 +36,7 @@
import org.opensearch.action.support.IndicesOptions;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.Strings;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -49,6 +50,8 @@
public class RestGetSettingsAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetSettingsAction.class);
+
@Override
public List routes() {
return unmodifiableList(
@@ -79,7 +82,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
.includeDefaults(renderDefaults)
.names(names);
getSettingsRequest.local(request.paramAsBoolean("local", getSettingsRequest.local()));
- getSettingsRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getSettingsRequest.masterNodeTimeout()));
+ getSettingsRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", getSettingsRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(getSettingsRequest, request, deprecationLogger, getName());
return channel -> client.admin().indices().getSettings(getSettingsRequest, new RestToXContentListener<>(channel));
}
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestIndexDeleteAliasesAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestIndexDeleteAliasesAction.java
index 6a8098dfaf633..9f0b543a456f2 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestIndexDeleteAliasesAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestIndexDeleteAliasesAction.java
@@ -35,6 +35,7 @@
import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.Strings;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -48,6 +49,8 @@
public class RestIndexDeleteAliasesAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestIndexPutAliasAction.class);
+
@Override
public List routes() {
return unmodifiableList(asList(new Route(DELETE, "/{index}/_alias/{name}"), new Route(DELETE, "/{index}/_aliases/{name}")));
@@ -65,7 +68,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest();
indicesAliasesRequest.timeout(request.paramAsTime("timeout", indicesAliasesRequest.timeout()));
indicesAliasesRequest.addAliasAction(AliasActions.remove().indices(indices).aliases(aliases));
- indicesAliasesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", indicesAliasesRequest.masterNodeTimeout()));
+ indicesAliasesRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", indicesAliasesRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(indicesAliasesRequest, request, deprecationLogger, getName());
return channel -> client.admin().indices().aliases(indicesAliasesRequest, new RestToXContentListener<>(channel));
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestIndexPutAliasAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestIndexPutAliasAction.java
index cc31dc1117946..8f8eeaf1c8e1c 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestIndexPutAliasAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestIndexPutAliasAction.java
@@ -35,6 +35,7 @@
import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.Strings;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
@@ -51,6 +52,8 @@
public class RestIndexPutAliasAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestIndexPutAliasAction.class);
+
@Override
public List routes() {
return unmodifiableList(
@@ -124,7 +127,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest();
indicesAliasesRequest.timeout(request.paramAsTime("timeout", indicesAliasesRequest.timeout()));
- indicesAliasesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", indicesAliasesRequest.masterNodeTimeout()));
+ indicesAliasesRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", indicesAliasesRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(indicesAliasesRequest, request, deprecationLogger, getName());
IndicesAliasesRequest.AliasActions aliasAction = AliasActions.add().indices(indices).alias(alias);
if (routing != null) {
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestIndicesAliasesAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestIndicesAliasesAction.java
index 138343a2e7335..20b67fa73a3f5 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestIndicesAliasesAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestIndicesAliasesAction.java
@@ -34,6 +34,7 @@
import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
@@ -47,6 +48,8 @@
public class RestIndicesAliasesAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestIndicesAliasesAction.class);
+
@Override
public String getName() {
return "indices_aliases_action";
@@ -60,7 +63,8 @@ public List routes() {
@Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest();
- indicesAliasesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", indicesAliasesRequest.masterNodeTimeout()));
+ indicesAliasesRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", indicesAliasesRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(indicesAliasesRequest, request, deprecationLogger, getName());
indicesAliasesRequest.timeout(request.paramAsTime("timeout", indicesAliasesRequest.timeout()));
try (XContentParser parser = request.contentParser()) {
IndicesAliasesRequest.PARSER.parse(parser, indicesAliasesRequest, null);
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestOpenIndexAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestOpenIndexAction.java
index 4b6450c35233f..a6d434b1e865e 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestOpenIndexAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestOpenIndexAction.java
@@ -37,6 +37,7 @@
import org.opensearch.action.support.IndicesOptions;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.Strings;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -50,6 +51,8 @@
public class RestOpenIndexAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestOpenIndexAction.class);
+
@Override
public List routes() {
return unmodifiableList(asList(new Route(POST, "/_open"), new Route(POST, "/{index}/_open")));
@@ -64,7 +67,8 @@ public String getName() {
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
OpenIndexRequest openIndexRequest = new OpenIndexRequest(Strings.splitStringByCommaToArray(request.param("index")));
openIndexRequest.timeout(request.paramAsTime("timeout", openIndexRequest.timeout()));
- openIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", openIndexRequest.masterNodeTimeout()));
+ openIndexRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", openIndexRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(openIndexRequest, request, deprecationLogger, getName());
openIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, openIndexRequest.indicesOptions()));
String waitForActiveShards = request.param("wait_for_active_shards");
if (waitForActiveShards != null) {
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutComponentTemplateAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutComponentTemplateAction.java
index ab02eef51a072..0d956b4dd147f 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutComponentTemplateAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutComponentTemplateAction.java
@@ -35,6 +35,7 @@
import org.opensearch.action.admin.indices.template.put.PutComponentTemplateAction;
import org.opensearch.client.node.NodeClient;
import org.opensearch.cluster.metadata.ComponentTemplate;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -48,6 +49,8 @@
public class RestPutComponentTemplateAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestPutComponentTemplateAction.class);
+
@Override
public List routes() {
return Arrays.asList(new Route(POST, "/_component_template/{name}"), new Route(PUT, "/_component_template/{name}"));
@@ -62,7 +65,8 @@ public String getName() {
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
PutComponentTemplateAction.Request putRequest = new PutComponentTemplateAction.Request(request.param("name"));
- putRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putRequest.masterNodeTimeout()));
+ putRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", putRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(putRequest, request, deprecationLogger, getName());
putRequest.create(request.paramAsBoolean("create", false));
putRequest.cause(request.param("cause", "api"));
putRequest.componentTemplate(ComponentTemplate.parse(request.contentParser()));
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutComposableIndexTemplateAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutComposableIndexTemplateAction.java
index 790aad33008b9..63352a73b281d 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutComposableIndexTemplateAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutComposableIndexTemplateAction.java
@@ -35,6 +35,7 @@
import org.opensearch.action.admin.indices.template.put.PutComposableIndexTemplateAction;
import org.opensearch.client.node.NodeClient;
import org.opensearch.cluster.metadata.ComposableIndexTemplate;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -48,6 +49,8 @@
public class RestPutComposableIndexTemplateAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestPutComposableIndexTemplateAction.class);
+
@Override
public List routes() {
return Arrays.asList(new Route(POST, "/_index_template/{name}"), new Route(PUT, "/_index_template/{name}"));
@@ -62,7 +65,8 @@ public String getName() {
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
PutComposableIndexTemplateAction.Request putRequest = new PutComposableIndexTemplateAction.Request(request.param("name"));
- putRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putRequest.masterNodeTimeout()));
+ putRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", putRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(putRequest, request, deprecationLogger, getName());
putRequest.create(request.paramAsBoolean("create", false));
putRequest.cause(request.param("cause", "api"));
putRequest.indexTemplate(ComposableIndexTemplate.parse(request.contentParser()));
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutIndexTemplateAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutIndexTemplateAction.java
index f17ac495b494b..42cd8e8103a18 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutIndexTemplateAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutIndexTemplateAction.java
@@ -78,7 +78,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
putRequest.patterns(Arrays.asList(request.paramAsStringArray("index_patterns", Strings.EMPTY_ARRAY)));
}
putRequest.order(request.paramAsInt("order", putRequest.order()));
- putRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putRequest.masterNodeTimeout()));
+ putRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", putRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(putRequest, request, deprecationLogger, getName());
putRequest.create(request.paramAsBoolean("create", false));
putRequest.cause(request.param("cause", ""));
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java
index f65dea1ebe3d2..5c305b7c75038 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java
@@ -36,6 +36,7 @@
import org.opensearch.action.support.IndicesOptions;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.Strings;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.xcontent.XContentHelper;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.rest.BaseRestHandler;
@@ -54,6 +55,8 @@
public class RestPutMappingAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestPutMappingAction.class);
+
@Override
public List routes() {
return unmodifiableList(
@@ -83,7 +86,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
putMappingRequest.source(sourceAsMap);
putMappingRequest.timeout(request.paramAsTime("timeout", putMappingRequest.timeout()));
- putMappingRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putMappingRequest.masterNodeTimeout()));
+ putMappingRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", putMappingRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(putMappingRequest, request, deprecationLogger, getName());
putMappingRequest.indicesOptions(IndicesOptions.fromRequest(request, putMappingRequest.indicesOptions()));
putMappingRequest.writeIndexOnly(request.paramAsBoolean("write_index_only", false));
return channel -> client.admin().indices().putMapping(putMappingRequest, new RestToXContentListener<>(channel));
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestResizeHandler.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestResizeHandler.java
index a0d479890f2d0..4168c7ad48e7c 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestResizeHandler.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestResizeHandler.java
@@ -91,7 +91,8 @@ public final RestChannelConsumer prepareRequest(final RestRequest request, final
resizeRequest.setCopySettings(copySettings);
request.applyContentParser(resizeRequest::fromXContent);
resizeRequest.timeout(request.paramAsTime("timeout", resizeRequest.timeout()));
- resizeRequest.masterNodeTimeout(request.paramAsTime("master_timeout", resizeRequest.masterNodeTimeout()));
+ resizeRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", resizeRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(resizeRequest, request, deprecationLogger, getName());
resizeRequest.setWaitForActiveShards(ActiveShardCount.parseString(request.param("wait_for_active_shards")));
return channel -> client.admin().indices().resizeIndex(resizeRequest, new RestToXContentListener<>(channel));
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestRolloverIndexAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestRolloverIndexAction.java
index 08b84cc6fe6cc..b9f8b936ff23e 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestRolloverIndexAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestRolloverIndexAction.java
@@ -72,7 +72,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
request.applyContentParser(parser -> rolloverIndexRequest.fromXContent(parser));
rolloverIndexRequest.dryRun(request.paramAsBoolean("dry_run", false));
rolloverIndexRequest.timeout(request.paramAsTime("timeout", rolloverIndexRequest.timeout()));
- rolloverIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", rolloverIndexRequest.masterNodeTimeout()));
+ rolloverIndexRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", rolloverIndexRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(rolloverIndexRequest, request, deprecationLogger, getName());
rolloverIndexRequest.getCreateIndexRequest()
.waitForActiveShards(ActiveShardCount.parseString(request.param("wait_for_active_shards")));
return channel -> client.admin().indices().rolloverIndex(rolloverIndexRequest, new RestToXContentListener<>(channel));
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestSimulateIndexTemplateAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestSimulateIndexTemplateAction.java
index 596b96c28be41..fb78b7b0a826d 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestSimulateIndexTemplateAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestSimulateIndexTemplateAction.java
@@ -37,6 +37,7 @@
import org.opensearch.action.admin.indices.template.put.PutComposableIndexTemplateAction;
import org.opensearch.client.node.NodeClient;
import org.opensearch.cluster.metadata.ComposableIndexTemplate;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -48,6 +49,8 @@
public class RestSimulateIndexTemplateAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestSimulateIndexTemplateAction.class);
+
@Override
public List routes() {
return org.opensearch.common.collect.List.of(new Route(POST, "/_index_template/_simulate_index/{name}"));
@@ -62,8 +65,9 @@ public String getName() {
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
SimulateIndexTemplateRequest simulateIndexTemplateRequest = new SimulateIndexTemplateRequest(request.param("name"));
simulateIndexTemplateRequest.masterNodeTimeout(
- request.paramAsTime("master_timeout", simulateIndexTemplateRequest.masterNodeTimeout())
+ request.paramAsTime("cluster_manager_timeout", simulateIndexTemplateRequest.masterNodeTimeout())
);
+ parseDeprecatedMasterTimeoutParameter(simulateIndexTemplateRequest, request, deprecationLogger, getName());
if (request.hasContent()) {
PutComposableIndexTemplateAction.Request indexTemplateRequest = new PutComposableIndexTemplateAction.Request(
"simulating_template"
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestSimulateTemplateAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestSimulateTemplateAction.java
index c3f0958f62718..58cc0c1e369d8 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestSimulateTemplateAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestSimulateTemplateAction.java
@@ -36,6 +36,7 @@
import org.opensearch.action.admin.indices.template.put.PutComposableIndexTemplateAction;
import org.opensearch.client.node.NodeClient;
import org.opensearch.cluster.metadata.ComposableIndexTemplate;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@@ -47,6 +48,8 @@
import static org.opensearch.rest.RestRequest.Method.POST;
public class RestSimulateTemplateAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestSimulateTemplateAction.class);
+
@Override
public List routes() {
return Arrays.asList(new Route(POST, "/_index_template/_simulate"), new Route(POST, "/_index_template/_simulate/{name}"));
@@ -58,7 +61,7 @@ public String getName() {
}
@Override
- protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
+ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
SimulateTemplateAction.Request simulateRequest = new SimulateTemplateAction.Request();
simulateRequest.templateName(request.param("name"));
if (request.hasContent()) {
@@ -71,7 +74,8 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli
simulateRequest.indexTemplateRequest(indexTemplateRequest);
}
- simulateRequest.masterNodeTimeout(request.paramAsTime("master_timeout", simulateRequest.masterNodeTimeout()));
+ simulateRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", simulateRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(simulateRequest, request, deprecationLogger, getName());
return channel -> client.execute(SimulateTemplateAction.INSTANCE, simulateRequest, new RestToXContentListener<>(channel));
}
diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestUpdateSettingsAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestUpdateSettingsAction.java
index 8356901dbc7ab..bfb634d421f2d 100644
--- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestUpdateSettingsAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestUpdateSettingsAction.java
@@ -36,6 +36,7 @@
import org.opensearch.action.support.IndicesOptions;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.Strings;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
@@ -52,6 +53,8 @@
public class RestUpdateSettingsAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestUpdateSettingsAction.class);
+
@Override
public List routes() {
return unmodifiableList(asList(new Route(PUT, "/{index}/_settings"), new Route(PUT, "/_settings")));
@@ -67,7 +70,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
UpdateSettingsRequest updateSettingsRequest = updateSettingsRequest(Strings.splitStringByCommaToArray(request.param("index")));
updateSettingsRequest.timeout(request.paramAsTime("timeout", updateSettingsRequest.timeout()));
updateSettingsRequest.setPreserveExisting(request.paramAsBoolean("preserve_existing", updateSettingsRequest.isPreserveExisting()));
- updateSettingsRequest.masterNodeTimeout(request.paramAsTime("master_timeout", updateSettingsRequest.masterNodeTimeout()));
+ updateSettingsRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", updateSettingsRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(updateSettingsRequest, request, deprecationLogger, getName());
updateSettingsRequest.indicesOptions(IndicesOptions.fromRequest(request, updateSettingsRequest.indicesOptions()));
updateSettingsRequest.fromXContent(request.contentParser());
diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestAllocationAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestAllocationAction.java
index a74d64d1530c5..eb03b9e25a294 100644
--- a/server/src/main/java/org/opensearch/rest/action/cat/RestAllocationAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/cat/RestAllocationAction.java
@@ -44,6 +44,7 @@
import org.opensearch.cluster.routing.ShardRouting;
import org.opensearch.common.Strings;
import org.opensearch.common.Table;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.unit.ByteSizeValue;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.RestResponse;
@@ -58,6 +59,8 @@
public class RestAllocationAction extends AbstractCatAction {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestAllocationAction.class);
+
@Override
public List routes() {
return unmodifiableList(asList(new Route(GET, "/_cat/allocation"), new Route(GET, "/_cat/allocation/{nodes}")));
@@ -79,7 +82,8 @@ public RestChannelConsumer doCatRequest(final RestRequest request, final NodeCli
final ClusterStateRequest clusterStateRequest = new ClusterStateRequest();
clusterStateRequest.clear().routingTable(true);
clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local()));
- clusterStateRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterStateRequest.masterNodeTimeout()));
+ clusterStateRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", clusterStateRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(clusterStateRequest, request, deprecationLogger, getName());
return channel -> client.admin().cluster().state(clusterStateRequest, new RestActionListener(channel) {
@Override
diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestIndicesAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestIndicesAction.java
index 2c0eef6a8fdb8..1b70603edf6e1 100644
--- a/server/src/main/java/org/opensearch/rest/action/cat/RestIndicesAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/cat/RestIndicesAction.java
@@ -32,6 +32,7 @@
package org.opensearch.rest.action.cat;
+import org.opensearch.OpenSearchParseException;
import org.opensearch.action.ActionListener;
import org.opensearch.action.ActionResponse;
import org.opensearch.action.admin.cluster.health.ClusterHealthRequest;
@@ -52,6 +53,7 @@
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.Strings;
import org.opensearch.common.Table;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.time.DateFormatter;
import org.opensearch.common.unit.TimeValue;
@@ -82,6 +84,11 @@
public class RestIndicesAction extends AbstractCatAction {
private static final DateFormatter STRICT_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_time");
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestIndicesAction.class);
+ private static final String MASTER_TIMEOUT_DEPRECATED_MESSAGE =
+ "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead.";
+ private static final String DUPLICATE_PARAMETER_ERROR_MESSAGE =
+ "Please only use one of the request parameters [master_timeout, cluster_manager_timeout].";
@Override
public List routes() {
@@ -109,7 +116,16 @@ public RestChannelConsumer doCatRequest(final RestRequest request, final NodeCli
final String[] indices = Strings.splitStringByCommaToArray(request.param("index"));
final IndicesOptions indicesOptions = IndicesOptions.fromRequest(request, IndicesOptions.strictExpand());
final boolean local = request.paramAsBoolean("local", false);
- final TimeValue masterNodeTimeout = request.paramAsTime("master_timeout", DEFAULT_MASTER_NODE_TIMEOUT);
+ TimeValue clusterManagerTimeout = request.paramAsTime("cluster_manager_timeout", DEFAULT_MASTER_NODE_TIMEOUT);
+ // Remove the if condition and statements inside after removing MASTER_ROLE.
+ if (request.hasParam("master_timeout")) {
+ deprecationLogger.deprecate("cat_indices_master_timeout_parameter", MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ if (request.hasParam("cluster_manager_timeout")) {
+ throw new OpenSearchParseException(DUPLICATE_PARAMETER_ERROR_MESSAGE);
+ }
+ clusterManagerTimeout = request.paramAsTime("master_timeout", DEFAULT_MASTER_NODE_TIMEOUT);
+ }
+ final TimeValue clusterManagerNodeTimeout = clusterManagerTimeout;
final boolean includeUnloadedSegments = request.paramAsBoolean("include_unloaded_segments", false);
return channel -> {
@@ -120,56 +136,66 @@ public RestResponse buildResponse(final Table table) throws Exception {
}
});
- sendGetSettingsRequest(indices, indicesOptions, local, masterNodeTimeout, client, new ActionListener() {
- @Override
- public void onResponse(final GetSettingsResponse getSettingsResponse) {
- final GroupedActionListener groupedListener = createGroupedListener(request, 4, listener);
- groupedListener.onResponse(getSettingsResponse);
-
- // The list of indices that will be returned is determined by the indices returned from the Get Settings call.
- // All the other requests just provide additional detail, and wildcards may be resolved differently depending on the
- // type of request in the presence of security plugins (looking at you, ClusterHealthRequest), so
- // force the IndicesOptions for all the sub-requests to be as inclusive as possible.
- final IndicesOptions subRequestIndicesOptions = IndicesOptions.lenientExpandHidden();
-
- // Indices that were successfully resolved during the get settings request might be deleted when the subsequent cluster
- // state, cluster health and indices stats requests execute. We have to distinguish two cases:
- // 1) the deleted index was explicitly passed as parameter to the /_cat/indices request. In this case we want the
- // subsequent requests to fail.
- // 2) the deleted index was resolved as part of a wildcard or _all. In this case, we want the subsequent requests not to
- // fail on the deleted index (as we want to ignore wildcards that cannot be resolved).
- // This behavior can be ensured by letting the cluster state, cluster health and indices stats requests re-resolve the
- // index names with the same indices options that we used for the initial cluster state request (strictExpand).
- sendIndicesStatsRequest(
- indices,
- subRequestIndicesOptions,
- includeUnloadedSegments,
- client,
- ActionListener.wrap(groupedListener::onResponse, groupedListener::onFailure)
- );
- sendClusterStateRequest(
- indices,
- subRequestIndicesOptions,
- local,
- masterNodeTimeout,
- client,
- ActionListener.wrap(groupedListener::onResponse, groupedListener::onFailure)
- );
- sendClusterHealthRequest(
- indices,
- subRequestIndicesOptions,
- local,
- masterNodeTimeout,
- client,
- ActionListener.wrap(groupedListener::onResponse, groupedListener::onFailure)
- );
- }
-
- @Override
- public void onFailure(final Exception e) {
- listener.onFailure(e);
+ sendGetSettingsRequest(
+ indices,
+ indicesOptions,
+ local,
+ clusterManagerNodeTimeout,
+ client,
+ new ActionListener() {
+ @Override
+ public void onResponse(final GetSettingsResponse getSettingsResponse) {
+ final GroupedActionListener groupedListener = createGroupedListener(request, 4, listener);
+ groupedListener.onResponse(getSettingsResponse);
+
+ // The list of indices that will be returned is determined by the indices returned from the Get Settings call.
+ // All the other requests just provide additional detail, and wildcards may be resolved differently depending on the
+ // type of request in the presence of security plugins (looking at you, ClusterHealthRequest), so
+ // force the IndicesOptions for all the sub-requests to be as inclusive as possible.
+ final IndicesOptions subRequestIndicesOptions = IndicesOptions.lenientExpandHidden();
+
+ // Indices that were successfully resolved during the get settings request might be deleted when the subsequent
+ // cluster
+ // state, cluster health and indices stats requests execute. We have to distinguish two cases:
+ // 1) the deleted index was explicitly passed as parameter to the /_cat/indices request. In this case we want the
+ // subsequent requests to fail.
+ // 2) the deleted index was resolved as part of a wildcard or _all. In this case, we want the subsequent requests
+ // not to
+ // fail on the deleted index (as we want to ignore wildcards that cannot be resolved).
+ // This behavior can be ensured by letting the cluster state, cluster health and indices stats requests re-resolve
+ // the
+ // index names with the same indices options that we used for the initial cluster state request (strictExpand).
+ sendIndicesStatsRequest(
+ indices,
+ subRequestIndicesOptions,
+ includeUnloadedSegments,
+ client,
+ ActionListener.wrap(groupedListener::onResponse, groupedListener::onFailure)
+ );
+ sendClusterStateRequest(
+ indices,
+ subRequestIndicesOptions,
+ local,
+ clusterManagerNodeTimeout,
+ client,
+ ActionListener.wrap(groupedListener::onResponse, groupedListener::onFailure)
+ );
+ sendClusterHealthRequest(
+ indices,
+ subRequestIndicesOptions,
+ local,
+ clusterManagerNodeTimeout,
+ client,
+ ActionListener.wrap(groupedListener::onResponse, groupedListener::onFailure)
+ );
+ }
+
+ @Override
+ public void onFailure(final Exception e) {
+ listener.onFailure(e);
+ }
}
- });
+ );
};
}
diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestMasterAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestMasterAction.java
index 1219b419122c6..4bcb16c741ecf 100644
--- a/server/src/main/java/org/opensearch/rest/action/cat/RestMasterAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/cat/RestMasterAction.java
@@ -38,6 +38,7 @@
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.cluster.node.DiscoveryNodes;
import org.opensearch.common.Table;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.RestResponse;
import org.opensearch.rest.action.RestResponseListener;
@@ -49,6 +50,8 @@
public class RestMasterAction extends AbstractCatAction {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestMasterAction.class);
+
@Override
public List replacedRoutes() {
// The deprecated path will be removed in a future major version.
@@ -70,7 +73,8 @@ public RestChannelConsumer doCatRequest(final RestRequest request, final NodeCli
final ClusterStateRequest clusterStateRequest = new ClusterStateRequest();
clusterStateRequest.clear().nodes(true);
clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local()));
- clusterStateRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterStateRequest.masterNodeTimeout()));
+ clusterStateRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", clusterStateRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(clusterStateRequest, request, deprecationLogger, getName());
return channel -> client.admin().cluster().state(clusterStateRequest, new RestResponseListener(channel) {
@Override
diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestNodeAttrsAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestNodeAttrsAction.java
index d6fc27a2713db..6ea36267d39ed 100644
--- a/server/src/main/java/org/opensearch/rest/action/cat/RestNodeAttrsAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/cat/RestNodeAttrsAction.java
@@ -42,6 +42,7 @@
import org.opensearch.cluster.node.DiscoveryNodes;
import org.opensearch.common.Strings;
import org.opensearch.common.Table;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.monitor.process.ProcessInfo;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.RestResponse;
@@ -56,6 +57,8 @@
public class RestNodeAttrsAction extends AbstractCatAction {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestNodeAttrsAction.class);
+
@Override
public List routes() {
return singletonList(new Route(GET, "/_cat/nodeattrs"));
@@ -76,7 +79,8 @@ public RestChannelConsumer doCatRequest(final RestRequest request, final NodeCli
final ClusterStateRequest clusterStateRequest = new ClusterStateRequest();
clusterStateRequest.clear().nodes(true);
clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local()));
- clusterStateRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterStateRequest.masterNodeTimeout()));
+ clusterStateRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", clusterStateRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(clusterStateRequest, request, deprecationLogger, getName());
return channel -> client.admin().cluster().state(clusterStateRequest, new RestActionListener(channel) {
@Override
diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java
index abc4b48b01cff..3052a9736f9a3 100644
--- a/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java
@@ -86,8 +86,6 @@ public class RestNodesAction extends AbstractCatAction {
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestNodesAction.class);
static final String LOCAL_DEPRECATED_MESSAGE = "Deprecated parameter [local] used. This parameter does not cause this API to act "
+ "locally, and should not be used. It will be unsupported in version 8.0.";
- static final String MASTER_TIMEOUT_DEPRECATED_MESSAGE =
- "Deprecated parameter [master_timeout] used. To promote inclusive language, please use [cluster_manager_timeout] instead. It will be unsupported in a future major version.";
@Override
public List routes() {
@@ -113,7 +111,7 @@ public RestChannelConsumer doCatRequest(final RestRequest request, final NodeCli
}
clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local()));
clusterStateRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", clusterStateRequest.masterNodeTimeout()));
- parseDeprecatedMasterTimeoutParameter(clusterStateRequest, request);
+ parseDeprecatedMasterTimeoutParameter(clusterStateRequest, request, deprecationLogger, getName());
final boolean fullId = request.paramAsBoolean("full_id", false);
return channel -> client.admin().cluster().state(clusterStateRequest, new RestActionListener(channel) {
@Override
@@ -529,20 +527,4 @@ Table buildTable(
private short calculatePercentage(long used, long max) {
return max <= 0 ? 0 : (short) ((100d * used) / max);
}
-
- /**
- * Parse the deprecated request parameter 'master_timeout', and add deprecated log if the parameter is used.
- * It also validates whether the value of 'master_timeout' is the same with 'cluster_manager_timeout'.
- * Remove the method along with MASTER_ROLE.
- * @deprecated As of 2.0, because promoting inclusive language.
- */
- @Deprecated
- private void parseDeprecatedMasterTimeoutParameter(ClusterStateRequest clusterStateRequest, RestRequest request) {
- final String deprecatedTimeoutParam = "master_timeout";
- if (request.hasParam(deprecatedTimeoutParam)) {
- deprecationLogger.deprecate("cat_nodes_master_timeout_parameter", MASTER_TIMEOUT_DEPRECATED_MESSAGE);
- request.validateParamValuesAreEqual(deprecatedTimeoutParam, "cluster_manager_timeout");
- clusterStateRequest.masterNodeTimeout(request.paramAsTime(deprecatedTimeoutParam, clusterStateRequest.masterNodeTimeout()));
- }
- }
}
diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestPendingClusterTasksAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestPendingClusterTasksAction.java
index dbc1b21fcf0ab..7e261510ee742 100644
--- a/server/src/main/java/org/opensearch/rest/action/cat/RestPendingClusterTasksAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/cat/RestPendingClusterTasksAction.java
@@ -37,6 +37,7 @@
import org.opensearch.client.node.NodeClient;
import org.opensearch.cluster.service.PendingClusterTask;
import org.opensearch.common.Table;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.RestResponse;
import org.opensearch.rest.action.RestResponseListener;
@@ -48,6 +49,8 @@
public class RestPendingClusterTasksAction extends AbstractCatAction {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestPendingClusterTasksAction.class);
+
@Override
public List routes() {
return singletonList(new Route(GET, "/_cat/pending_tasks"));
@@ -66,7 +69,10 @@ protected void documentation(StringBuilder sb) {
@Override
public RestChannelConsumer doCatRequest(final RestRequest request, final NodeClient client) {
PendingClusterTasksRequest pendingClusterTasksRequest = new PendingClusterTasksRequest();
- pendingClusterTasksRequest.masterNodeTimeout(request.paramAsTime("master_timeout", pendingClusterTasksRequest.masterNodeTimeout()));
+ pendingClusterTasksRequest.masterNodeTimeout(
+ request.paramAsTime("cluster_manager_timeout", pendingClusterTasksRequest.masterNodeTimeout())
+ );
+ parseDeprecatedMasterTimeoutParameter(pendingClusterTasksRequest, request, deprecationLogger, getName());
pendingClusterTasksRequest.local(request.paramAsBoolean("local", pendingClusterTasksRequest.local()));
return channel -> client.admin()
.cluster()
diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestPluginsAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestPluginsAction.java
index 79cac0f906c74..8975c8ab4b3de 100644
--- a/server/src/main/java/org/opensearch/rest/action/cat/RestPluginsAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/cat/RestPluginsAction.java
@@ -42,6 +42,7 @@
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.cluster.node.DiscoveryNodes;
import org.opensearch.common.Table;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.plugins.PluginInfo;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.RestResponse;
@@ -55,6 +56,8 @@
public class RestPluginsAction extends AbstractCatAction {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestPluginsAction.class);
+
@Override
public List routes() {
return singletonList(new Route(GET, "/_cat/plugins"));
@@ -75,7 +78,8 @@ public RestChannelConsumer doCatRequest(final RestRequest request, final NodeCli
final ClusterStateRequest clusterStateRequest = new ClusterStateRequest();
clusterStateRequest.clear().nodes(true);
clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local()));
- clusterStateRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterStateRequest.masterNodeTimeout()));
+ clusterStateRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", clusterStateRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(clusterStateRequest, request, deprecationLogger, getName());
return channel -> client.admin().cluster().state(clusterStateRequest, new RestActionListener(channel) {
@Override
diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestRepositoriesAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestRepositoriesAction.java
index fec3996506fca..76c71bee791ef 100644
--- a/server/src/main/java/org/opensearch/rest/action/cat/RestRepositoriesAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/cat/RestRepositoriesAction.java
@@ -37,6 +37,7 @@
import org.opensearch.client.node.NodeClient;
import org.opensearch.cluster.metadata.RepositoryMetadata;
import org.opensearch.common.Table;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.RestResponse;
import org.opensearch.rest.action.RestResponseListener;
@@ -51,16 +52,21 @@
*/
public class RestRepositoriesAction extends AbstractCatAction {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestRepositoriesAction.class);
+
@Override
public List routes() {
return singletonList(new Route(GET, "/_cat/repositories"));
}
@Override
- protected RestChannelConsumer doCatRequest(RestRequest request, NodeClient client) {
+ public RestChannelConsumer doCatRequest(RestRequest request, NodeClient client) {
GetRepositoriesRequest getRepositoriesRequest = new GetRepositoriesRequest();
getRepositoriesRequest.local(request.paramAsBoolean("local", getRepositoriesRequest.local()));
- getRepositoriesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getRepositoriesRequest.masterNodeTimeout()));
+ getRepositoriesRequest.masterNodeTimeout(
+ request.paramAsTime("cluster_manager_timeout", getRepositoriesRequest.masterNodeTimeout())
+ );
+ parseDeprecatedMasterTimeoutParameter(getRepositoriesRequest, request, deprecationLogger, getName());
return channel -> client.admin()
.cluster()
diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestSegmentsAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestSegmentsAction.java
index 8d9d1937bdf56..3a31fdab125b1 100644
--- a/server/src/main/java/org/opensearch/rest/action/cat/RestSegmentsAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/cat/RestSegmentsAction.java
@@ -43,6 +43,7 @@
import org.opensearch.cluster.node.DiscoveryNodes;
import org.opensearch.common.Strings;
import org.opensearch.common.Table;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.index.engine.Segment;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.RestResponse;
@@ -58,6 +59,8 @@
public class RestSegmentsAction extends AbstractCatAction {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestSegmentsAction.class);
+
@Override
public List routes() {
return unmodifiableList(asList(new Route(GET, "/_cat/segments"), new Route(GET, "/_cat/segments/{index}")));
@@ -74,12 +77,13 @@ public boolean allowSystemIndexAccessByDefault() {
}
@Override
- protected RestChannelConsumer doCatRequest(final RestRequest request, final NodeClient client) {
+ public RestChannelConsumer doCatRequest(final RestRequest request, final NodeClient client) {
final String[] indices = Strings.splitStringByCommaToArray(request.param("index"));
final ClusterStateRequest clusterStateRequest = new ClusterStateRequest();
clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local()));
- clusterStateRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterStateRequest.masterNodeTimeout()));
+ clusterStateRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", clusterStateRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(clusterStateRequest, request, deprecationLogger, getName());
clusterStateRequest.clear().nodes(true).routingTable(true).indices(indices);
return channel -> client.admin().cluster().state(clusterStateRequest, new RestActionListener(channel) {
diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestShardsAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestShardsAction.java
index f9aa1a5554e9e..32e2ca8481f83 100644
--- a/server/src/main/java/org/opensearch/rest/action/cat/RestShardsAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/cat/RestShardsAction.java
@@ -43,6 +43,7 @@
import org.opensearch.cluster.routing.UnassignedInfo;
import org.opensearch.common.Strings;
import org.opensearch.common.Table;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.index.cache.query.QueryCacheStats;
import org.opensearch.index.engine.CommitStats;
@@ -75,6 +76,8 @@
public class RestShardsAction extends AbstractCatAction {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestShardsAction.class);
+
@Override
public List routes() {
return unmodifiableList(asList(new Route(GET, "/_cat/shards"), new Route(GET, "/_cat/shards/{index}")));
@@ -101,7 +104,8 @@ public RestChannelConsumer doCatRequest(final RestRequest request, final NodeCli
final String[] indices = Strings.splitStringByCommaToArray(request.param("index"));
final ClusterStateRequest clusterStateRequest = new ClusterStateRequest();
clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local()));
- clusterStateRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterStateRequest.masterNodeTimeout()));
+ clusterStateRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", clusterStateRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(clusterStateRequest, request, deprecationLogger, getName());
clusterStateRequest.clear().nodes(true).routingTable(true).indices(indices);
return channel -> client.admin().cluster().state(clusterStateRequest, new RestActionListener(channel) {
@Override
diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestSnapshotAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestSnapshotAction.java
index 55e20bc59aeac..5adb9228d9869 100644
--- a/server/src/main/java/org/opensearch/rest/action/cat/RestSnapshotAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/cat/RestSnapshotAction.java
@@ -36,6 +36,7 @@
import org.opensearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.Table;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.time.DateFormatter;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.rest.RestRequest;
@@ -58,6 +59,8 @@
*/
public class RestSnapshotAction extends AbstractCatAction {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestSnapshotAction.class);
+
@Override
public List routes() {
return unmodifiableList(asList(new Route(GET, "/_cat/snapshots"), new Route(GET, "/_cat/snapshots/{repository}")));
@@ -69,13 +72,14 @@ public String getName() {
}
@Override
- protected RestChannelConsumer doCatRequest(final RestRequest request, NodeClient client) {
+ public RestChannelConsumer doCatRequest(final RestRequest request, NodeClient client) {
GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest().repository(request.param("repository"))
.snapshots(new String[] { GetSnapshotsRequest.ALL_SNAPSHOTS });
getSnapshotsRequest.ignoreUnavailable(request.paramAsBoolean("ignore_unavailable", getSnapshotsRequest.ignoreUnavailable()));
- getSnapshotsRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getSnapshotsRequest.masterNodeTimeout()));
+ getSnapshotsRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", getSnapshotsRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(getSnapshotsRequest, request, deprecationLogger, getName());
return channel -> client.admin()
.cluster()
diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestTasksAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestTasksAction.java
index b87205593ce87..a6624c2f8cfdc 100644
--- a/server/src/main/java/org/opensearch/rest/action/cat/RestTasksAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/cat/RestTasksAction.java
@@ -137,6 +137,7 @@ protected Table getTableWithHeader(final RestRequest request) {
// Task detailed info
if (detailed) {
table.addCell("description", "default:true;alias:desc;desc:task action");
+ table.addCell("resource_stats", "default:false;desc:resource consumption info of the task");
}
table.endHeaders();
return table;
@@ -173,6 +174,7 @@ private void buildRow(Table table, boolean fullId, boolean detailed, DiscoveryNo
if (detailed) {
table.addCell(taskInfo.getDescription());
+ table.addCell(taskInfo.getResourceStats());
}
table.endRow();
}
diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestTemplatesAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestTemplatesAction.java
index fcead3a34e8a9..bedc57453cb9b 100644
--- a/server/src/main/java/org/opensearch/rest/action/cat/RestTemplatesAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/cat/RestTemplatesAction.java
@@ -40,6 +40,7 @@
import org.opensearch.cluster.metadata.ComposableIndexTemplate;
import org.opensearch.cluster.metadata.Metadata;
import org.opensearch.common.Table;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.regex.Regex;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.RestResponse;
@@ -54,6 +55,8 @@
public class RestTemplatesAction extends AbstractCatAction {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestTemplatesAction.class);
+
@Override
public List routes() {
return unmodifiableList(asList(new Route(GET, "/_cat/templates"), new Route(GET, "/_cat/templates/{name}")));
@@ -70,12 +73,13 @@ protected void documentation(StringBuilder sb) {
}
@Override
- protected RestChannelConsumer doCatRequest(final RestRequest request, NodeClient client) {
+ public RestChannelConsumer doCatRequest(final RestRequest request, NodeClient client) {
final String matchPattern = request.hasParam("name") ? request.param("name") : null;
final ClusterStateRequest clusterStateRequest = new ClusterStateRequest();
clusterStateRequest.clear().metadata(true);
clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local()));
- clusterStateRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterStateRequest.masterNodeTimeout()));
+ clusterStateRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", clusterStateRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(clusterStateRequest, request, deprecationLogger, getName());
return channel -> client.admin().cluster().state(clusterStateRequest, new RestResponseListener(channel) {
@Override
diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestThreadPoolAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestThreadPoolAction.java
index 1198fe0bdcba3..2f43a3a66d01b 100644
--- a/server/src/main/java/org/opensearch/rest/action/cat/RestThreadPoolAction.java
+++ b/server/src/main/java/org/opensearch/rest/action/cat/RestThreadPoolAction.java
@@ -44,6 +44,7 @@
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.cluster.node.DiscoveryNodes;
import org.opensearch.common.Table;
+import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.regex.Regex;
import org.opensearch.monitor.process.ProcessInfo;
import org.opensearch.rest.RestRequest;
@@ -68,6 +69,8 @@
public class RestThreadPoolAction extends AbstractCatAction {
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestThreadPoolAction.class);
+
@Override
public List routes() {
return unmodifiableList(asList(new Route(GET, "/_cat/thread_pool"), new Route(GET, "/_cat/thread_pool/{thread_pool_patterns}")));
@@ -89,7 +92,8 @@ public RestChannelConsumer doCatRequest(final RestRequest request, final NodeCli
final ClusterStateRequest clusterStateRequest = new ClusterStateRequest();
clusterStateRequest.clear().nodes(true);
clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local()));
- clusterStateRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterStateRequest.masterNodeTimeout()));
+ clusterStateRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", clusterStateRequest.masterNodeTimeout()));
+ parseDeprecatedMasterTimeoutParameter(clusterStateRequest, request, deprecationLogger, getName());
return channel -> client.admin().cluster().state(clusterStateRequest, new RestActionListener(channel) {
@Override
diff --git a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java
index bfe8eed05ea9b..6fd78b834344d 100644
--- a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java
+++ b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java
@@ -36,6 +36,7 @@
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.CollectorManager;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
@@ -82,6 +83,7 @@
import org.opensearch.search.profile.Profilers;
import org.opensearch.search.query.QueryPhaseExecutionException;
import org.opensearch.search.query.QuerySearchResult;
+import org.opensearch.search.query.ReduceableSearchResult;
import org.opensearch.search.rescore.RescoreContext;
import org.opensearch.search.slice.SliceBuilder;
import org.opensearch.search.sort.SortAndFormats;
@@ -163,7 +165,7 @@ final class DefaultSearchContext extends SearchContext {
private Profilers profilers;
private final Map searchExtBuilders = new HashMap<>();
- private final Map, Collector> queryCollectors = new HashMap<>();
+ private final Map, CollectorManager extends Collector, ReduceableSearchResult>> queryCollectorManagers = new HashMap<>();
private final QueryShardContext queryShardContext;
private final FetchPhase fetchPhase;
@@ -823,8 +825,8 @@ public long getRelativeTimeInMillis() {
}
@Override
- public Map, Collector> queryCollectors() {
- return queryCollectors;
+ public Map, CollectorManager extends Collector, ReduceableSearchResult>> queryCollectorManagers() {
+ return queryCollectorManagers;
}
@Override
diff --git a/server/src/main/java/org/opensearch/search/aggregations/AggregationPhase.java b/server/src/main/java/org/opensearch/search/aggregations/AggregationPhase.java
index be62b33adb356..5a837a6e14c5a 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/AggregationPhase.java
+++ b/server/src/main/java/org/opensearch/search/aggregations/AggregationPhase.java
@@ -32,6 +32,7 @@
package org.opensearch.search.aggregations;
import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.CollectorManager;
import org.apache.lucene.search.Query;
import org.opensearch.common.inject.Inject;
import org.opensearch.common.lucene.search.Queries;
@@ -40,9 +41,11 @@
import org.opensearch.search.profile.query.CollectorResult;
import org.opensearch.search.profile.query.InternalProfileCollector;
import org.opensearch.search.query.QueryPhaseExecutionException;
+import org.opensearch.search.query.ReduceableSearchResult;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.Collections;
import java.util.List;
@@ -68,17 +71,18 @@ public void preProcess(SearchContext context) {
}
context.aggregations().aggregators(aggregators);
if (!collectors.isEmpty()) {
- Collector collector = MultiBucketCollector.wrap(collectors);
- ((BucketCollector) collector).preCollection();
- if (context.getProfilers() != null) {
- collector = new InternalProfileCollector(
- collector,
- CollectorResult.REASON_AGGREGATION,
- // TODO: report on child aggs as well
- Collections.emptyList()
- );
- }
- context.queryCollectors().put(AggregationPhase.class, collector);
+ final Collector collector = createCollector(context, collectors);
+ context.queryCollectorManagers().put(AggregationPhase.class, new CollectorManager() {
+ @Override
+ public Collector newCollector() throws IOException {
+ return collector;
+ }
+
+ @Override
+ public ReduceableSearchResult reduce(Collection collectors) throws IOException {
+ throw new UnsupportedOperationException("The concurrent aggregation over index segments is not supported");
+ }
+ });
}
} catch (IOException e) {
throw new AggregationInitializationException("Could not initialize aggregators", e);
@@ -147,6 +151,20 @@ public void execute(SearchContext context) {
// disable aggregations so that they don't run on next pages in case of scrolling
context.aggregations(null);
- context.queryCollectors().remove(AggregationPhase.class);
+ context.queryCollectorManagers().remove(AggregationPhase.class);
+ }
+
+ private Collector createCollector(SearchContext context, List collectors) throws IOException {
+ Collector collector = MultiBucketCollector.wrap(collectors);
+ ((BucketCollector) collector).preCollection();
+ if (context.getProfilers() != null) {
+ collector = new InternalProfileCollector(
+ collector,
+ CollectorResult.REASON_AGGREGATION,
+ // TODO: report on child aggs as well
+ Collections.emptyList()
+ );
+ }
+ return collector;
}
}
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java
index e632a13b95fb7..71320909ca5d2 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java
+++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java
@@ -55,6 +55,7 @@
import org.opensearch.common.xcontent.ToXContentFragment;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentParser;
+import org.opensearch.index.IndexSettings;
import org.opensearch.search.DocValueFormat;
import java.io.IOException;
@@ -337,19 +338,16 @@ public LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) thro
}
- private final RegExp include, exclude;
+ private final String include, exclude;
private final SortedSet includeValues, excludeValues;
private final int incZeroBasedPartition;
private final int incNumPartitions;
/**
- * @param include The regular expression pattern for the terms to be included
- * @param exclude The regular expression pattern for the terms to be excluded
+ * @param include The string or regular expression pattern for the terms to be included
+ * @param exclude The string or regular expression pattern for the terms to be excluded
*/
- public IncludeExclude(RegExp include, RegExp exclude) {
- if (include == null && exclude == null) {
- throw new IllegalArgumentException();
- }
+ public IncludeExclude(String include, String exclude) {
this.include = include;
this.exclude = exclude;
this.includeValues = null;
@@ -358,10 +356,6 @@ public IncludeExclude(RegExp include, RegExp exclude) {
this.incNumPartitions = 0;
}
- public IncludeExclude(String include, String exclude) {
- this(include == null ? null : new RegExp(include), exclude == null ? null : new RegExp(exclude));
- }
-
/**
* @param includeValues The terms to be included
* @param excludeValues The terms to be excluded
@@ -412,10 +406,8 @@ public IncludeExclude(StreamInput in) throws IOException {
excludeValues = null;
incZeroBasedPartition = 0;
incNumPartitions = 0;
- String includeString = in.readOptionalString();
- include = includeString == null ? null : new RegExp(includeString);
- String excludeString = in.readOptionalString();
- exclude = excludeString == null ? null : new RegExp(excludeString);
+ include = in.readOptionalString();
+ exclude = in.readOptionalString();
return;
}
include = null;
@@ -447,8 +439,8 @@ public void writeTo(StreamOutput out) throws IOException {
boolean regexBased = isRegexBased();
out.writeBoolean(regexBased);
if (regexBased) {
- out.writeOptionalString(include == null ? null : include.getOriginalString());
- out.writeOptionalString(exclude == null ? null : exclude.getOriginalString());
+ out.writeOptionalString(include);
+ out.writeOptionalString(exclude);
} else {
boolean hasIncludes = includeValues != null;
out.writeBoolean(hasIncludes);
@@ -584,26 +576,46 @@ public boolean isPartitionBased() {
return incNumPartitions > 0;
}
- private Automaton toAutomaton() {
- Automaton a = null;
+ private Automaton toAutomaton(IndexSettings indexSettings) {
+ Automaton a;
if (include != null) {
- a = include.toAutomaton();
+ validateRegExpStringLength(include, indexSettings);
+ a = new RegExp(include).toAutomaton();
} else if (includeValues != null) {
a = Automata.makeStringUnion(includeValues);
} else {
a = Automata.makeAnyString();
}
if (exclude != null) {
- a = Operations.minus(a, exclude.toAutomaton(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT);
+ validateRegExpStringLength(exclude, indexSettings);
+ Automaton excludeAutomaton = new RegExp(exclude).toAutomaton();
+ a = Operations.minus(a, excludeAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT);
} else if (excludeValues != null) {
a = Operations.minus(a, Automata.makeStringUnion(excludeValues), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT);
}
return a;
}
- public StringFilter convertToStringFilter(DocValueFormat format) {
+ private static void validateRegExpStringLength(String source, IndexSettings indexSettings) {
+ int maxRegexLength = indexSettings.getMaxRegexLength();
+ if (maxRegexLength > 0 && source.length() > maxRegexLength) {
+ throw new IllegalArgumentException(
+ "The length of regex ["
+ + source.length()
+ + "] used in the request has exceeded "
+ + "the allowed maximum of ["
+ + maxRegexLength
+ + "]. "
+ + "This maximum can be set by changing the ["
+ + IndexSettings.MAX_REGEX_LENGTH_SETTING.getKey()
+ + "] index level setting."
+ );
+ }
+ }
+
+ public StringFilter convertToStringFilter(DocValueFormat format, IndexSettings indexSettings) {
if (isRegexBased()) {
- return new AutomatonBackedStringFilter(toAutomaton());
+ return new AutomatonBackedStringFilter(toAutomaton(indexSettings));
}
if (isPartitionBased()) {
return new PartitionedStringFilter();
@@ -624,10 +636,10 @@ private static SortedSet parseForDocValues(SortedSet endUser
return result;
}
- public OrdinalsFilter convertToOrdinalsFilter(DocValueFormat format) {
+ public OrdinalsFilter convertToOrdinalsFilter(DocValueFormat format, IndexSettings indexSettings) {
if (isRegexBased()) {
- return new AutomatonBackedOrdinalsFilter(toAutomaton());
+ return new AutomatonBackedOrdinalsFilter(toAutomaton(indexSettings));
}
if (isPartitionBased()) {
return new PartitionedOrdinalsFilter();
@@ -684,7 +696,7 @@ public LongFilter convertToDoubleFilter() {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (include != null) {
- builder.field(INCLUDE_FIELD.getPreferredName(), include.getOriginalString());
+ builder.field(INCLUDE_FIELD.getPreferredName(), include);
} else if (includeValues != null) {
builder.startArray(INCLUDE_FIELD.getPreferredName());
for (BytesRef value : includeValues) {
@@ -698,7 +710,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.endObject();
}
if (exclude != null) {
- builder.field(EXCLUDE_FIELD.getPreferredName(), exclude.getOriginalString());
+ builder.field(EXCLUDE_FIELD.getPreferredName(), exclude);
} else if (excludeValues != null) {
builder.startArray(EXCLUDE_FIELD.getPreferredName());
for (BytesRef value : excludeValues) {
@@ -711,14 +723,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
@Override
public int hashCode() {
- return Objects.hash(
- include == null ? null : include.getOriginalString(),
- exclude == null ? null : exclude.getOriginalString(),
- includeValues,
- excludeValues,
- incZeroBasedPartition,
- incNumPartitions
- );
+ return Objects.hash(include, exclude, includeValues, excludeValues, incZeroBasedPartition, incNumPartitions);
}
@Override
@@ -730,14 +735,8 @@ public boolean equals(Object obj) {
return false;
}
IncludeExclude other = (IncludeExclude) obj;
- return Objects.equals(
- include == null ? null : include.getOriginalString(),
- other.include == null ? null : other.include.getOriginalString()
- )
- && Objects.equals(
- exclude == null ? null : exclude.getOriginalString(),
- other.exclude == null ? null : other.exclude.getOriginalString()
- )
+ return Objects.equals(include, other.include)
+ && Objects.equals(exclude, other.exclude)
&& Objects.equals(includeValues, other.includeValues)
&& Objects.equals(excludeValues, other.excludeValues)
&& Objects.equals(incZeroBasedPartition, other.incZeroBasedPartition)
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/RareTermsAggregatorFactory.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/RareTermsAggregatorFactory.java
index 0e03f87b070e1..c0a5c77a98170 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/RareTermsAggregatorFactory.java
+++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/RareTermsAggregatorFactory.java
@@ -34,6 +34,7 @@
import org.opensearch.common.ParseField;
import org.opensearch.common.logging.DeprecationLogger;
+import org.opensearch.index.IndexSettings;
import org.opensearch.index.query.QueryShardContext;
import org.opensearch.search.DocValueFormat;
import org.opensearch.search.aggregations.Aggregator;
@@ -250,7 +251,10 @@ Aggregator create(
double precision,
CardinalityUpperBound cardinality
) throws IOException {
- final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(format);
+ IndexSettings indexSettings = context.getQueryShardContext().getIndexSettings();
+ final IncludeExclude.StringFilter filter = includeExclude == null
+ ? null
+ : includeExclude.convertToStringFilter(format, indexSettings);
return new StringRareTermsAggregator(
name,
factories,
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java
index db6106d3ce9bc..4b93121ae06ef 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java
+++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java
@@ -34,6 +34,7 @@
import org.opensearch.common.ParseField;
import org.opensearch.common.logging.DeprecationLogger;
+import org.opensearch.index.IndexSettings;
import org.opensearch.index.query.QueryBuilder;
import org.opensearch.index.query.QueryShardContext;
import org.opensearch.search.DocValueFormat;
@@ -325,8 +326,10 @@ Aggregator create(
CardinalityUpperBound cardinality,
Map metadata
) throws IOException {
-
- final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(format);
+ IndexSettings indexSettings = aggregationContext.getQueryShardContext().getIndexSettings();
+ final IncludeExclude.StringFilter filter = includeExclude == null
+ ? null
+ : includeExclude.convertToStringFilter(format, indexSettings);
return new MapStringTermsAggregator(
name,
factories,
@@ -364,8 +367,10 @@ Aggregator create(
CardinalityUpperBound cardinality,
Map metadata
) throws IOException {
-
- final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format);
+ IndexSettings indexSettings = aggregationContext.getQueryShardContext().getIndexSettings();
+ final IncludeExclude.OrdinalsFilter filter = includeExclude == null
+ ? null
+ : includeExclude.convertToOrdinalsFilter(format, indexSettings);
boolean remapGlobalOrd = true;
if (cardinality == CardinalityUpperBound.ONE && factories == AggregatorFactories.EMPTY && includeExclude == null) {
/*
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTextAggregatorFactory.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTextAggregatorFactory.java
index 85b4282e4c55b..992035f1fbe97 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTextAggregatorFactory.java
+++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTextAggregatorFactory.java
@@ -44,6 +44,7 @@
import org.opensearch.common.util.BigArrays;
import org.opensearch.common.util.BytesRefHash;
import org.opensearch.common.util.ObjectArray;
+import org.opensearch.index.IndexSettings;
import org.opensearch.index.mapper.MappedFieldType;
import org.opensearch.index.query.QueryBuilder;
import org.opensearch.index.query.QueryShardContext;
@@ -137,7 +138,10 @@ protected Aggregator createInternal(
// TODO - need to check with mapping that this is indeed a text field....
- IncludeExclude.StringFilter incExcFilter = includeExclude == null ? null : includeExclude.convertToStringFilter(DocValueFormat.RAW);
+ IndexSettings indexSettings = searchContext.getQueryShardContext().getIndexSettings();
+ IncludeExclude.StringFilter incExcFilter = includeExclude == null
+ ? null
+ : includeExclude.convertToStringFilter(DocValueFormat.RAW, indexSettings);
MapStringTermsAggregator.CollectorSource collectorSource = new SignificantTextCollectorSource(
queryShardContext.lookup().source(),
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java
index d2272d0a63042..17b412f87107c 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java
+++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java
@@ -34,6 +34,7 @@
import org.apache.lucene.search.IndexSearcher;
import org.opensearch.common.ParseField;
+import org.opensearch.index.IndexSettings;
import org.opensearch.index.query.QueryShardContext;
import org.opensearch.search.DocValueFormat;
import org.opensearch.search.aggregations.AggregationExecutionException;
@@ -380,7 +381,10 @@ Aggregator create(
CardinalityUpperBound cardinality,
Map metadata
) throws IOException {
- final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(format);
+ IndexSettings indexSettings = context.getQueryShardContext().getIndexSettings();
+ final IncludeExclude.StringFilter filter = includeExclude == null
+ ? null
+ : includeExclude.convertToStringFilter(format, indexSettings);
return new MapStringTermsAggregator(
name,
factories,
@@ -458,7 +462,10 @@ Aggregator create(
);
}
- final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format);
+ IndexSettings indexSettings = context.getQueryShardContext().getIndexSettings();
+ final IncludeExclude.OrdinalsFilter filter = includeExclude == null
+ ? null
+ : includeExclude.convertToOrdinalsFilter(format, indexSettings);
boolean remapGlobalOrds;
if (cardinality == CardinalityUpperBound.ONE && REMAP_GLOBAL_ORDS != null) {
/*
diff --git a/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java
index 2cc15d4c65b96..2fb5ababe19ad 100644
--- a/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java
+++ b/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java
@@ -96,16 +96,6 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
private QueryProfiler profiler;
private MutableQueryTimeout cancellable;
- public ContextIndexSearcher(
- IndexReader reader,
- Similarity similarity,
- QueryCache queryCache,
- QueryCachingPolicy queryCachingPolicy,
- boolean wrapWithExitableDirectoryReader
- ) throws IOException {
- this(reader, similarity, queryCache, queryCachingPolicy, new MutableQueryTimeout(), wrapWithExitableDirectoryReader, null);
- }
-
public ContextIndexSearcher(
IndexReader reader,
Similarity similarity,
@@ -233,6 +223,25 @@ public void search(
result.topDocs(new TopDocsAndMaxScore(mergedTopDocs, Float.NaN), formats);
}
+ public void search(
+ Query query,
+ CollectorManager, TopFieldDocs> manager,
+ QuerySearchResult result,
+ DocValueFormat[] formats,
+ TotalHits totalHits
+ ) throws IOException {
+ TopFieldDocs mergedTopDocs = search(query, manager);
+ // Lucene sets shards indexes during merging of topDocs from different collectors
+ // We need to reset shard index; OpenSearch will set shard index later during reduce stage
+ for (ScoreDoc scoreDoc : mergedTopDocs.scoreDocs) {
+ scoreDoc.shardIndex = -1;
+ }
+ if (totalHits != null) { // we have already precalculated totalHits for the whole index
+ mergedTopDocs = new TopFieldDocs(totalHits, mergedTopDocs.scoreDocs, mergedTopDocs.fields);
+ }
+ result.topDocs(new TopDocsAndMaxScore(mergedTopDocs, Float.NaN), formats);
+ }
+
@Override
protected void search(List leaves, Weight weight, Collector collector) throws IOException {
for (LeafReaderContext ctx : leaves) { // search each subreader
@@ -420,8 +429,4 @@ public void clear() {
runnables.clear();
}
}
-
- public boolean allowConcurrentSegmentSearch() {
- return (getExecutor() != null);
- }
}
diff --git a/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java b/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java
index 6d77558ec3bd0..961d45b0011ef 100644
--- a/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java
+++ b/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java
@@ -33,6 +33,7 @@
package org.opensearch.search.internal;
import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.CollectorManager;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Query;
import org.opensearch.action.search.SearchShardTask;
@@ -61,6 +62,7 @@
import org.opensearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.opensearch.search.profile.Profilers;
import org.opensearch.search.query.QuerySearchResult;
+import org.opensearch.search.query.ReduceableSearchResult;
import org.opensearch.search.rescore.RescoreContext;
import org.opensearch.search.sort.SortAndFormats;
import org.opensearch.search.suggest.SuggestionSearchContext;
@@ -492,8 +494,8 @@ public Profilers getProfilers() {
}
@Override
- public Map, Collector> queryCollectors() {
- return in.queryCollectors();
+ public Map, CollectorManager extends Collector, ReduceableSearchResult>> queryCollectorManagers() {
+ return in.queryCollectorManagers();
}
@Override
diff --git a/server/src/main/java/org/opensearch/search/internal/SearchContext.java b/server/src/main/java/org/opensearch/search/internal/SearchContext.java
index 7ff0eaed4be63..0c24fbee76335 100644
--- a/server/src/main/java/org/opensearch/search/internal/SearchContext.java
+++ b/server/src/main/java/org/opensearch/search/internal/SearchContext.java
@@ -32,6 +32,7 @@
package org.opensearch.search.internal;
import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.CollectorManager;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Query;
import org.opensearch.action.search.SearchShardTask;
@@ -66,6 +67,7 @@
import org.opensearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.opensearch.search.profile.Profilers;
import org.opensearch.search.query.QuerySearchResult;
+import org.opensearch.search.query.ReduceableSearchResult;
import org.opensearch.search.rescore.RescoreContext;
import org.opensearch.search.sort.SortAndFormats;
import org.opensearch.search.suggest.SuggestionSearchContext;
@@ -388,8 +390,8 @@ public final boolean hasOnlySuggest() {
*/
public abstract long getRelativeTimeInMillis();
- /** Return a view of the additional query collectors that should be run for this context. */
- public abstract Map, Collector> queryCollectors();
+ /** Return a view of the additional query collector managers that should be run for this context. */
+ public abstract Map, CollectorManager extends Collector, ReduceableSearchResult>> queryCollectorManagers();
public abstract QueryShardContext getQueryShardContext();
diff --git a/server/src/main/java/org/opensearch/search/lookup/LeafDocLookup.java b/server/src/main/java/org/opensearch/search/lookup/LeafDocLookup.java
index 82daa94d92146..716476101ac48 100644
--- a/server/src/main/java/org/opensearch/search/lookup/LeafDocLookup.java
+++ b/server/src/main/java/org/opensearch/search/lookup/LeafDocLookup.java
@@ -33,7 +33,6 @@
import org.apache.lucene.index.LeafReaderContext;
import org.opensearch.ExceptionsHelper;
-import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.index.fielddata.IndexFieldData;
import org.opensearch.index.fielddata.ScriptDocValues;
import org.opensearch.index.mapper.MappedFieldType;
@@ -50,10 +49,6 @@
public class LeafDocLookup implements Map> {
- private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(LeafDocLookup.class);
- static final String TYPES_DEPRECATION_KEY = "type-field-doc-lookup";
- static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Looking up doc types [_type] in scripts is deprecated.";
-
private final Map> localCacheFieldData = new HashMap<>(4);
private final MapperService mapperService;
@@ -78,10 +73,6 @@ public void setDocument(int docId) {
@Override
public ScriptDocValues> get(Object key) {
- // deprecate _type
- if ("_type".equals(key)) {
- DEPRECATION_LOGGER.deprecate(TYPES_DEPRECATION_KEY, TYPES_DEPRECATION_MESSAGE);
- }
// assume its a string...
String fieldName = key.toString();
ScriptDocValues> scriptValues = localCacheFieldData.get(fieldName);
diff --git a/server/src/main/java/org/opensearch/search/profile/Profilers.java b/server/src/main/java/org/opensearch/search/profile/Profilers.java
index 6b9be0167b50f..3cc9b1710d420 100644
--- a/server/src/main/java/org/opensearch/search/profile/Profilers.java
+++ b/server/src/main/java/org/opensearch/search/profile/Profilers.java
@@ -57,7 +57,7 @@ public Profilers(ContextIndexSearcher searcher) {
/** Switch to a new profile. */
public QueryProfiler addQueryProfiler() {
- QueryProfiler profiler = new QueryProfiler(searcher.allowConcurrentSegmentSearch());
+ QueryProfiler profiler = new QueryProfiler(searcher.getExecutor() != null);
searcher.setProfiler(profiler);
queryProfilers.add(profiler);
return profiler;
diff --git a/server/src/main/java/org/opensearch/search/profile/query/InternalProfileCollectorManager.java b/server/src/main/java/org/opensearch/search/profile/query/InternalProfileCollectorManager.java
new file mode 100644
index 0000000000000..a35c22a6a2457
--- /dev/null
+++ b/server/src/main/java/org/opensearch/search/profile/query/InternalProfileCollectorManager.java
@@ -0,0 +1,89 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.profile.query;
+
+import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.CollectorManager;
+import org.opensearch.search.query.EarlyTerminatingListener;
+import org.opensearch.search.query.ReduceableSearchResult;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+public class InternalProfileCollectorManager
+ implements
+ ProfileCollectorManager,
+ EarlyTerminatingListener {
+ private final CollectorManager extends Collector, ReduceableSearchResult> manager;
+ private final String reason;
+ private final List children;
+ private long time = 0;
+
+ public InternalProfileCollectorManager(
+ CollectorManager extends Collector, ReduceableSearchResult> manager,
+ String reason,
+ List children
+ ) {
+ this.manager = manager;
+ this.reason = reason;
+ this.children = children;
+ }
+
+ @Override
+ public InternalProfileCollector newCollector() throws IOException {
+ return new InternalProfileCollector(manager.newCollector(), reason, children);
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public ReduceableSearchResult reduce(Collection collectors) throws IOException {
+ final Collection subs = new ArrayList<>();
+
+ for (final InternalProfileCollector collector : collectors) {
+ subs.add(collector.getCollector());
+ time += collector.getTime();
+ }
+
+ return ((CollectorManager) manager).reduce(subs);
+ }
+
+ @Override
+ public String getReason() {
+ return reason;
+ }
+
+ @Override
+ public long getTime() {
+ return time;
+ }
+
+ @Override
+ public Collection extends InternalProfileComponent> children() {
+ return children;
+ }
+
+ @Override
+ public String getName() {
+ return manager.getClass().getSimpleName();
+ }
+
+ @Override
+ public CollectorResult getCollectorTree() {
+ return InternalProfileCollector.doGetCollectorTree(this);
+ }
+
+ @Override
+ public void onEarlyTermination(int maxCountHits, boolean forcedTermination) {
+ if (manager instanceof EarlyTerminatingListener) {
+ ((EarlyTerminatingListener) manager).onEarlyTermination(maxCountHits, forcedTermination);
+ }
+ }
+}
diff --git a/server/src/main/java/org/opensearch/search/profile/query/ProfileCollectorManager.java b/server/src/main/java/org/opensearch/search/profile/query/ProfileCollectorManager.java
new file mode 100644
index 0000000000000..7037988401fce
--- /dev/null
+++ b/server/src/main/java/org/opensearch/search/profile/query/ProfileCollectorManager.java
@@ -0,0 +1,17 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.profile.query;
+
+import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.CollectorManager;
+
+/**
+ * Collector manager which supports profiling
+ */
+public interface ProfileCollectorManager extends CollectorManager, InternalProfileComponent {}
diff --git a/server/src/main/java/org/opensearch/search/query/EarlyTerminatingCollector.java b/server/src/main/java/org/opensearch/search/query/EarlyTerminatingCollector.java
index 3ee8430522891..56cb49835dcc4 100644
--- a/server/src/main/java/org/opensearch/search/query/EarlyTerminatingCollector.java
+++ b/server/src/main/java/org/opensearch/search/query/EarlyTerminatingCollector.java
@@ -95,6 +95,10 @@ public void collect(int doc) throws IOException {
};
}
+ Collector getCollector() {
+ return in;
+ }
+
/**
* Returns true if this collector has early terminated.
*/
diff --git a/server/src/main/java/org/opensearch/search/query/EarlyTerminatingCollectorManager.java b/server/src/main/java/org/opensearch/search/query/EarlyTerminatingCollectorManager.java
new file mode 100644
index 0000000000000..32fbb24d16436
--- /dev/null
+++ b/server/src/main/java/org/opensearch/search/query/EarlyTerminatingCollectorManager.java
@@ -0,0 +1,74 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.query;
+
+import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.CollectorManager;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+public class EarlyTerminatingCollectorManager
+ implements
+ CollectorManager,
+ EarlyTerminatingListener {
+
+ private final CollectorManager manager;
+ private final int maxCountHits;
+ private boolean forceTermination;
+
+ EarlyTerminatingCollectorManager(CollectorManager manager, int maxCountHits, boolean forceTermination) {
+ this.manager = manager;
+ this.maxCountHits = maxCountHits;
+ this.forceTermination = forceTermination;
+ }
+
+ @Override
+ public EarlyTerminatingCollector newCollector() throws IOException {
+ return new EarlyTerminatingCollector(manager.newCollector(), maxCountHits, false /* forced termination is not supported */);
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public ReduceableSearchResult reduce(Collection collectors) throws IOException {
+ final List innerCollectors = new ArrayList<>(collectors.size());
+
+ boolean didTerminateEarly = false;
+ for (EarlyTerminatingCollector collector : collectors) {
+ innerCollectors.add((C) collector.getCollector());
+ if (collector.hasEarlyTerminated()) {
+ didTerminateEarly = true;
+ }
+ }
+
+ if (didTerminateEarly) {
+ onEarlyTermination(maxCountHits, forceTermination);
+
+ final ReduceableSearchResult result = manager.reduce(innerCollectors);
+ return new ReduceableSearchResult() {
+ @Override
+ public void reduce(QuerySearchResult r) throws IOException {
+ result.reduce(r);
+ r.terminatedEarly(true);
+ }
+ };
+ }
+
+ return manager.reduce(innerCollectors);
+ }
+
+ @Override
+ public void onEarlyTermination(int maxCountHits, boolean forcedTermination) {
+ if (manager instanceof EarlyTerminatingListener) {
+ ((EarlyTerminatingListener) manager).onEarlyTermination(maxCountHits, forcedTermination);
+ }
+ }
+}
diff --git a/server/src/main/java/org/opensearch/search/query/EarlyTerminatingListener.java b/server/src/main/java/org/opensearch/search/query/EarlyTerminatingListener.java
new file mode 100644
index 0000000000000..dd6793266a7ca
--- /dev/null
+++ b/server/src/main/java/org/opensearch/search/query/EarlyTerminatingListener.java
@@ -0,0 +1,22 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.query;
+
+/**
+ * Early termination event listener. It is used during concurrent segment search
+ * to propagate the early termination intent.
+ */
+public interface EarlyTerminatingListener {
+ /**
+ * Early termination event notification
+ * @param maxCountHits desired maximum number of hits
+ * @param forcedTermination :true" if forced termination has been requested, "false" otherwise
+ */
+ void onEarlyTermination(int maxCountHits, boolean forcedTermination);
+}
diff --git a/server/src/main/java/org/opensearch/search/query/FilteredCollectorManager.java b/server/src/main/java/org/opensearch/search/query/FilteredCollectorManager.java
new file mode 100644
index 0000000000000..ef47cf2a388f3
--- /dev/null
+++ b/server/src/main/java/org/opensearch/search/query/FilteredCollectorManager.java
@@ -0,0 +1,45 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.query;
+
+import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.CollectorManager;
+import org.apache.lucene.search.Weight;
+import org.opensearch.common.lucene.search.FilteredCollector;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+
+class FilteredCollectorManager implements CollectorManager {
+ private final CollectorManager extends Collector, ReduceableSearchResult> manager;
+ private final Weight filter;
+
+ FilteredCollectorManager(CollectorManager extends Collector, ReduceableSearchResult> manager, Weight filter) {
+ this.manager = manager;
+ this.filter = filter;
+ }
+
+ @Override
+ public FilteredCollector newCollector() throws IOException {
+ return new FilteredCollector(manager.newCollector(), filter);
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public ReduceableSearchResult reduce(Collection collectors) throws IOException {
+ final Collection subCollectors = new ArrayList<>();
+
+ for (final FilteredCollector collector : collectors) {
+ subCollectors.add(collector.getCollector());
+ }
+
+ return ((CollectorManager) manager).reduce(subCollectors);
+ }
+}
diff --git a/server/src/main/java/org/opensearch/search/query/MinimumCollectorManager.java b/server/src/main/java/org/opensearch/search/query/MinimumCollectorManager.java
new file mode 100644
index 0000000000000..22b25222b639d
--- /dev/null
+++ b/server/src/main/java/org/opensearch/search/query/MinimumCollectorManager.java
@@ -0,0 +1,44 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.query;
+
+import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.CollectorManager;
+import org.opensearch.common.lucene.MinimumScoreCollector;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+
+class MinimumCollectorManager implements CollectorManager {
+ private final CollectorManager extends Collector, ReduceableSearchResult> manager;
+ private final float minimumScore;
+
+ MinimumCollectorManager(CollectorManager extends Collector, ReduceableSearchResult> manager, float minimumScore) {
+ this.manager = manager;
+ this.minimumScore = minimumScore;
+ }
+
+ @Override
+ public MinimumScoreCollector newCollector() throws IOException {
+ return new MinimumScoreCollector(manager.newCollector(), minimumScore);
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public ReduceableSearchResult reduce(Collection collectors) throws IOException {
+ final Collection subCollectors = new ArrayList<>();
+
+ for (final MinimumScoreCollector collector : collectors) {
+ subCollectors.add(collector.getCollector());
+ }
+
+ return ((CollectorManager) manager).reduce(subCollectors);
+ }
+}
diff --git a/server/src/main/java/org/opensearch/search/query/MultiCollectorWrapper.java b/server/src/main/java/org/opensearch/search/query/MultiCollectorWrapper.java
new file mode 100644
index 0000000000000..0ee423b48caeb
--- /dev/null
+++ b/server/src/main/java/org/opensearch/search/query/MultiCollectorWrapper.java
@@ -0,0 +1,58 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.query;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.LeafCollector;
+import org.apache.lucene.search.MultiCollector;
+import org.apache.lucene.search.ScoreMode;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * Wraps MultiCollector and provide access to underlying collectors.
+ * Please check out https://github.com/apache/lucene/pull/455.
+ */
+public class MultiCollectorWrapper implements Collector {
+ private final MultiCollector delegate;
+ private final Collection collectors;
+
+ MultiCollectorWrapper(MultiCollector delegate, Collection collectors) {
+ this.delegate = delegate;
+ this.collectors = collectors;
+ }
+
+ @Override
+ public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
+ return delegate.getLeafCollector(context);
+ }
+
+ @Override
+ public ScoreMode scoreMode() {
+ return delegate.scoreMode();
+ }
+
+ public Collection getCollectors() {
+ return collectors;
+ }
+
+ public static Collector wrap(Collector... collectors) {
+ final List collectorsList = Arrays.asList(collectors);
+ final Collector collector = MultiCollector.wrap(collectorsList);
+ if (collector instanceof MultiCollector) {
+ return new MultiCollectorWrapper((MultiCollector) collector, collectorsList);
+ } else {
+ return collector;
+ }
+ }
+}
diff --git a/server/src/main/java/org/opensearch/search/query/QueryCollectorContext.java b/server/src/main/java/org/opensearch/search/query/QueryCollectorContext.java
index d1ff855888f0b..95ad514adf97d 100644
--- a/server/src/main/java/org/opensearch/search/query/QueryCollectorContext.java
+++ b/server/src/main/java/org/opensearch/search/query/QueryCollectorContext.java
@@ -33,6 +33,7 @@
package org.opensearch.search.query;
import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.CollectorManager;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MultiCollector;
import org.apache.lucene.search.Query;
@@ -42,6 +43,7 @@
import org.opensearch.common.lucene.MinimumScoreCollector;
import org.opensearch.common.lucene.search.FilteredCollector;
import org.opensearch.search.profile.query.InternalProfileCollector;
+import org.opensearch.search.profile.query.InternalProfileCollectorManager;
import java.io.IOException;
import java.util.ArrayList;
@@ -54,7 +56,7 @@
import static org.opensearch.search.profile.query.CollectorResult.REASON_SEARCH_POST_FILTER;
import static org.opensearch.search.profile.query.CollectorResult.REASON_SEARCH_TERMINATE_AFTER_COUNT;
-abstract class QueryCollectorContext {
+public abstract class QueryCollectorContext {
private static final Collector EMPTY_COLLECTOR = new SimpleCollector() {
@Override
public void collect(int doc) {}
@@ -77,6 +79,8 @@ public ScoreMode scoreMode() {
*/
abstract Collector create(Collector in) throws IOException;
+ abstract CollectorManager, ReduceableSearchResult> createManager(CollectorManager, ReduceableSearchResult> in) throws IOException;
+
/**
* Wraps this collector with a profiler
*/
@@ -85,6 +89,18 @@ protected InternalProfileCollector createWithProfiler(InternalProfileCollector i
return new InternalProfileCollector(collector, profilerName, in != null ? Collections.singletonList(in) : Collections.emptyList());
}
+ /**
+ * Wraps this collector manager with a profiler
+ */
+ protected InternalProfileCollectorManager createWithProfiler(InternalProfileCollectorManager in) throws IOException {
+ final CollectorManager extends Collector, ReduceableSearchResult> manager = createManager(in);
+ return new InternalProfileCollectorManager(
+ manager,
+ profilerName,
+ in != null ? Collections.singletonList(in) : Collections.emptyList()
+ );
+ }
+
/**
* Post-process result after search execution.
*
@@ -126,6 +142,11 @@ static QueryCollectorContext createMinScoreCollectorContext(float minScore) {
Collector create(Collector in) {
return new MinimumScoreCollector(in, minScore);
}
+
+ @Override
+ CollectorManager, ReduceableSearchResult> createManager(CollectorManager, ReduceableSearchResult> in) throws IOException {
+ return new MinimumCollectorManager(in, minScore);
+ }
};
}
@@ -139,35 +160,58 @@ Collector create(Collector in) throws IOException {
final Weight filterWeight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1f);
return new FilteredCollector(in, filterWeight);
}
+
+ @Override
+ CollectorManager, ReduceableSearchResult> createManager(CollectorManager, ReduceableSearchResult> in) throws IOException {
+ final Weight filterWeight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1f);
+ return new FilteredCollectorManager(in, filterWeight);
+ }
};
}
/**
- * Creates a multi collector from the provided subs
+ * Creates a multi collector manager from the provided subs
*/
- static QueryCollectorContext createMultiCollectorContext(Collection subs) {
+ static QueryCollectorContext createMultiCollectorContext(
+ Collection> subs
+ ) {
return new QueryCollectorContext(REASON_SEARCH_MULTI) {
@Override
- Collector create(Collector in) {
+ Collector create(Collector in) throws IOException {
List subCollectors = new ArrayList<>();
subCollectors.add(in);
- subCollectors.addAll(subs);
+ for (CollectorManager extends Collector, ReduceableSearchResult> manager : subs) {
+ subCollectors.add(manager.newCollector());
+ }
return MultiCollector.wrap(subCollectors);
}
@Override
- protected InternalProfileCollector createWithProfiler(InternalProfileCollector in) {
+ protected InternalProfileCollector createWithProfiler(InternalProfileCollector in) throws IOException {
final List subCollectors = new ArrayList<>();
subCollectors.add(in);
- if (subs.stream().anyMatch((col) -> col instanceof InternalProfileCollector == false)) {
- throw new IllegalArgumentException("non-profiling collector");
- }
- for (Collector collector : subs) {
+
+ for (CollectorManager extends Collector, ReduceableSearchResult> manager : subs) {
+ final Collector collector = manager.newCollector();
+ if (!(collector instanceof InternalProfileCollector)) {
+ throw new IllegalArgumentException("non-profiling collector");
+ }
subCollectors.add((InternalProfileCollector) collector);
}
+
final Collector collector = MultiCollector.wrap(subCollectors);
return new InternalProfileCollector(collector, REASON_SEARCH_MULTI, subCollectors);
}
+
+ @Override
+ CollectorManager extends Collector, ReduceableSearchResult> createManager(
+ CollectorManager extends Collector, ReduceableSearchResult> in
+ ) throws IOException {
+ final List> managers = new ArrayList<>();
+ managers.add(in);
+ managers.addAll(subs);
+ return QueryCollectorManagerContext.createOpaqueCollectorManager(managers);
+ }
};
}
@@ -192,6 +236,13 @@ Collector create(Collector in) {
this.collector = MultiCollector.wrap(subCollectors);
return collector;
}
+
+ @Override
+ CollectorManager extends Collector, ReduceableSearchResult> createManager(
+ CollectorManager extends Collector, ReduceableSearchResult> in
+ ) throws IOException {
+ return new EarlyTerminatingCollectorManager<>(in, numHits, true);
+ }
};
}
}
diff --git a/server/src/main/java/org/opensearch/search/query/QueryCollectorManagerContext.java b/server/src/main/java/org/opensearch/search/query/QueryCollectorManagerContext.java
new file mode 100644
index 0000000000000..c98f4884bb030
--- /dev/null
+++ b/server/src/main/java/org/opensearch/search/query/QueryCollectorManagerContext.java
@@ -0,0 +1,99 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.query;
+
+import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.CollectorManager;
+import org.apache.lucene.search.MultiCollectorManager;
+import org.opensearch.search.profile.query.InternalProfileCollectorManager;
+import org.opensearch.search.profile.query.ProfileCollectorManager;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+public abstract class QueryCollectorManagerContext {
+ private static class QueryCollectorManager implements CollectorManager {
+ private final MultiCollectorManager manager;
+
+ private QueryCollectorManager(Collection> managers) {
+ this.manager = new MultiCollectorManager(managers.toArray(new CollectorManager, ?>[0]));
+ }
+
+ @Override
+ public Collector newCollector() throws IOException {
+ return manager.newCollector();
+ }
+
+ @Override
+ public ReduceableSearchResult reduce(Collection collectors) throws IOException {
+ final Object[] results = manager.reduce(collectors);
+
+ final ReduceableSearchResult[] transformed = new ReduceableSearchResult[results.length];
+ for (int i = 0; i < results.length; ++i) {
+ assert results[i] instanceof ReduceableSearchResult;
+ transformed[i] = (ReduceableSearchResult) results[i];
+ }
+
+ return reduceWith(transformed);
+ }
+
+ protected ReduceableSearchResult reduceWith(final ReduceableSearchResult[] results) {
+ return (QuerySearchResult result) -> {
+ for (final ReduceableSearchResult r : results) {
+ r.reduce(result);
+ }
+ };
+ }
+ }
+
+ private static class OpaqueQueryCollectorManager extends QueryCollectorManager {
+ private OpaqueQueryCollectorManager(Collection> managers) {
+ super(managers);
+ }
+
+ @Override
+ protected ReduceableSearchResult reduceWith(final ReduceableSearchResult[] results) {
+ return (QuerySearchResult result) -> {};
+ }
+ }
+
+ public static CollectorManager extends Collector, ReduceableSearchResult> createOpaqueCollectorManager(
+ List> managers
+ ) throws IOException {
+ return new OpaqueQueryCollectorManager(managers);
+ }
+
+ public static CollectorManager extends Collector, ReduceableSearchResult> createMultiCollectorManager(
+ List collectors
+ ) throws IOException {
+ final Collection> managers = new ArrayList<>();
+
+ CollectorManager, ReduceableSearchResult> manager = null;
+ for (QueryCollectorContext ctx : collectors) {
+ manager = ctx.createManager(manager);
+ managers.add(manager);
+ }
+
+ return new QueryCollectorManager(managers);
+ }
+
+ public static ProfileCollectorManager extends Collector, ReduceableSearchResult> createQueryCollectorManagerWithProfiler(
+ List collectors
+ ) throws IOException {
+ InternalProfileCollectorManager manager = null;
+
+ for (QueryCollectorContext ctx : collectors) {
+ manager = ctx.createWithProfiler(manager);
+ }
+
+ return manager;
+ }
+}
diff --git a/server/src/main/java/org/opensearch/search/query/QueryPhase.java b/server/src/main/java/org/opensearch/search/query/QueryPhase.java
index 3edbc16cd613f..1501067ec7983 100644
--- a/server/src/main/java/org/opensearch/search/query/QueryPhase.java
+++ b/server/src/main/java/org/opensearch/search/query/QueryPhase.java
@@ -238,9 +238,9 @@ static boolean executeInternal(SearchContext searchContext, QueryPhaseSearcher q
// this collector can filter documents during the collection
hasFilterCollector = true;
}
- if (searchContext.queryCollectors().isEmpty() == false) {
+ if (searchContext.queryCollectorManagers().isEmpty() == false) {
// plug in additional collectors, like aggregations
- collectors.add(createMultiCollectorContext(searchContext.queryCollectors().values()));
+ collectors.add(createMultiCollectorContext(searchContext.queryCollectorManagers().values()));
}
if (searchContext.minimumScore() != null) {
// apply the minimum score after multi collector so we filter aggs as well
diff --git a/server/src/main/java/org/opensearch/search/query/ReduceableSearchResult.java b/server/src/main/java/org/opensearch/search/query/ReduceableSearchResult.java
new file mode 100644
index 0000000000000..48e8d7198ea3b
--- /dev/null
+++ b/server/src/main/java/org/opensearch/search/query/ReduceableSearchResult.java
@@ -0,0 +1,23 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.query;
+
+import java.io.IOException;
+
+/**
+ * The search result callback returned by reduce phase of the collector manager.
+ */
+public interface ReduceableSearchResult {
+ /**
+ * Apply the reduce operation to the query search results
+ * @param result query search results
+ * @throws IOException exception if reduce operation failed
+ */
+ void reduce(QuerySearchResult result) throws IOException;
+}
diff --git a/server/src/main/java/org/opensearch/search/query/TopDocsCollectorContext.java b/server/src/main/java/org/opensearch/search/query/TopDocsCollectorContext.java
index 9cf7dca3c4caf..5f19462a2c33a 100644
--- a/server/src/main/java/org/opensearch/search/query/TopDocsCollectorContext.java
+++ b/server/src/main/java/org/opensearch/search/query/TopDocsCollectorContext.java
@@ -44,6 +44,7 @@
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.CollectorManager;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.FieldDoc;
@@ -80,6 +81,9 @@
import org.opensearch.search.sort.SortAndFormats;
import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
import java.util.Objects;
import java.util.function.Supplier;
@@ -89,7 +93,7 @@
/**
* A {@link QueryCollectorContext} that creates top docs collector
*/
-abstract class TopDocsCollectorContext extends QueryCollectorContext {
+public abstract class TopDocsCollectorContext extends QueryCollectorContext {
protected final int numHits;
TopDocsCollectorContext(String profilerName, int numHits) {
@@ -107,7 +111,7 @@ final int numHits() {
/**
* Returns true if the top docs should be re-scored after initial search
*/
- boolean shouldRescore() {
+ public boolean shouldRescore() {
return false;
}
@@ -115,6 +119,8 @@ static class EmptyTopDocsCollectorContext extends TopDocsCollectorContext {
private final Sort sort;
private final Collector collector;
private final Supplier hitCountSupplier;
+ private final int trackTotalHitsUpTo;
+ private final int hitCount;
/**
* Ctr
@@ -132,16 +138,18 @@ private EmptyTopDocsCollectorContext(
) throws IOException {
super(REASON_SEARCH_COUNT, 0);
this.sort = sortAndFormats == null ? null : sortAndFormats.sort;
- if (trackTotalHitsUpTo == SearchContext.TRACK_TOTAL_HITS_DISABLED) {
+ this.trackTotalHitsUpTo = trackTotalHitsUpTo;
+ if (this.trackTotalHitsUpTo == SearchContext.TRACK_TOTAL_HITS_DISABLED) {
this.collector = new EarlyTerminatingCollector(new TotalHitCountCollector(), 0, false);
// for bwc hit count is set to 0, it will be converted to -1 by the coordinating node
this.hitCountSupplier = () -> new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO);
+ this.hitCount = Integer.MIN_VALUE;
} else {
TotalHitCountCollector hitCountCollector = new TotalHitCountCollector();
// implicit total hit counts are valid only when there is no filter collector in the chain
- int hitCount = hasFilterCollector ? -1 : shortcutTotalHitCount(reader, query);
- if (hitCount == -1) {
- if (trackTotalHitsUpTo == SearchContext.TRACK_TOTAL_HITS_ACCURATE) {
+ this.hitCount = hasFilterCollector ? -1 : shortcutTotalHitCount(reader, query);
+ if (this.hitCount == -1) {
+ if (this.trackTotalHitsUpTo == SearchContext.TRACK_TOTAL_HITS_ACCURATE) {
this.collector = hitCountCollector;
this.hitCountSupplier = () -> new TotalHits(hitCountCollector.getTotalHits(), TotalHits.Relation.EQUAL_TO);
} else {
@@ -159,6 +167,39 @@ private EmptyTopDocsCollectorContext(
}
}
+ @Override
+ CollectorManager, ReduceableSearchResult> createManager(CollectorManager, ReduceableSearchResult> in) throws IOException {
+ assert in == null;
+
+ CollectorManager, ReduceableSearchResult> manager = null;
+
+ if (trackTotalHitsUpTo == SearchContext.TRACK_TOTAL_HITS_DISABLED) {
+ manager = new EarlyTerminatingCollectorManager<>(
+ new TotalHitCountCollectorManager.Empty(new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), sort),
+ 0,
+ false
+ );
+ } else {
+ if (hitCount == -1) {
+ if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_ACCURATE) {
+ manager = new EarlyTerminatingCollectorManager<>(
+ new TotalHitCountCollectorManager(sort),
+ trackTotalHitsUpTo,
+ false
+ );
+ }
+ } else {
+ manager = new EarlyTerminatingCollectorManager<>(
+ new TotalHitCountCollectorManager.Empty(new TotalHits(hitCount, TotalHits.Relation.EQUAL_TO), sort),
+ 0,
+ false
+ );
+ }
+ }
+
+ return manager;
+ }
+
@Override
Collector create(Collector in) {
assert in == null;
@@ -181,7 +222,11 @@ void postProcess(QuerySearchResult result) {
static class CollapsingTopDocsCollectorContext extends TopDocsCollectorContext {
private final DocValueFormat[] sortFmt;
private final CollapsingTopDocsCollector> topDocsCollector;
+ private final Collector collector;
private final Supplier maxScoreSupplier;
+ private final CollapseContext collapseContext;
+ private final boolean trackMaxScore;
+ private final Sort sort;
/**
* Ctr
@@ -199,30 +244,94 @@ private CollapsingTopDocsCollectorContext(
super(REASON_SEARCH_TOP_HITS, numHits);
assert numHits > 0;
assert collapseContext != null;
- Sort sort = sortAndFormats == null ? Sort.RELEVANCE : sortAndFormats.sort;
+ this.sort = sortAndFormats == null ? Sort.RELEVANCE : sortAndFormats.sort;
this.sortFmt = sortAndFormats == null ? new DocValueFormat[] { DocValueFormat.RAW } : sortAndFormats.formats;
+ this.collapseContext = collapseContext;
this.topDocsCollector = collapseContext.createTopDocs(sort, numHits);
+ this.trackMaxScore = trackMaxScore;
- MaxScoreCollector maxScoreCollector;
+ MaxScoreCollector maxScoreCollector = null;
if (trackMaxScore) {
maxScoreCollector = new MaxScoreCollector();
maxScoreSupplier = maxScoreCollector::getMaxScore;
} else {
+ maxScoreCollector = null;
maxScoreSupplier = () -> Float.NaN;
}
+
+ this.collector = MultiCollector.wrap(topDocsCollector, maxScoreCollector);
}
@Override
Collector create(Collector in) throws IOException {
assert in == null;
- return topDocsCollector;
+ return collector;
}
@Override
void postProcess(QuerySearchResult result) throws IOException {
- CollapseTopFieldDocs topDocs = topDocsCollector.getTopDocs();
+ final CollapseTopFieldDocs topDocs = topDocsCollector.getTopDocs();
result.topDocs(new TopDocsAndMaxScore(topDocs, maxScoreSupplier.get()), sortFmt);
}
+
+ @Override
+ CollectorManager, ReduceableSearchResult> createManager(CollectorManager, ReduceableSearchResult> in) throws IOException {
+ return new CollectorManager() {
+ @Override
+ public Collector newCollector() throws IOException {
+ MaxScoreCollector maxScoreCollector = null;
+
+ if (trackMaxScore) {
+ maxScoreCollector = new MaxScoreCollector();
+ }
+
+ return MultiCollectorWrapper.wrap(collapseContext.createTopDocs(sort, numHits), maxScoreCollector);
+ }
+
+ @Override
+ public ReduceableSearchResult reduce(Collection collectors) throws IOException {
+ final Collection subs = new ArrayList<>();
+ for (final Collector collector : collectors) {
+ if (collector instanceof MultiCollectorWrapper) {
+ subs.addAll(((MultiCollectorWrapper) collector).getCollectors());
+ } else {
+ subs.add(collector);
+ }
+ }
+
+ final Collection topFieldDocs = new ArrayList();
+ float maxScore = Float.NaN;
+
+ for (final Collector collector : subs) {
+ if (collector instanceof CollapsingTopDocsCollector>) {
+ topFieldDocs.add(((CollapsingTopDocsCollector>) collector).getTopDocs());
+ } else if (collector instanceof MaxScoreCollector) {
+ float score = ((MaxScoreCollector) collector).getMaxScore();
+ if (Float.isNaN(maxScore)) {
+ maxScore = score;
+ } else {
+ maxScore = Math.max(maxScore, score);
+ }
+ }
+ }
+
+ return reduceWith(topFieldDocs, maxScore);
+ }
+ };
+ }
+
+ protected ReduceableSearchResult reduceWith(final Collection topFieldDocs, float maxScore) {
+ return (QuerySearchResult result) -> {
+ final CollapseTopFieldDocs topDocs = CollapseTopFieldDocs.merge(
+ sort,
+ 0,
+ numHits,
+ topFieldDocs.toArray(new CollapseTopFieldDocs[0]),
+ true
+ );
+ result.topDocs(new TopDocsAndMaxScore(topDocs, maxScore), sortFmt);
+ };
+ }
}
abstract static class SimpleTopDocsCollectorContext extends TopDocsCollectorContext {
@@ -240,11 +349,38 @@ private static TopDocsCollector> createCollector(
}
}
+ private static CollectorManager extends TopDocsCollector>, ? extends TopDocs> createCollectorManager(
+ @Nullable SortAndFormats sortAndFormats,
+ int numHits,
+ @Nullable ScoreDoc searchAfter,
+ int hitCountThreshold
+ ) {
+ if (sortAndFormats == null) {
+ // See please https://github.com/apache/lucene/pull/450, should be fixed in 9.x
+ if (searchAfter != null) {
+ return TopScoreDocCollector.createSharedManager(
+ numHits,
+ new FieldDoc(searchAfter.doc, searchAfter.score),
+ hitCountThreshold
+ );
+ } else {
+ return TopScoreDocCollector.createSharedManager(numHits, null, hitCountThreshold);
+ }
+ } else {
+ return TopFieldCollector.createSharedManager(sortAndFormats.sort, numHits, (FieldDoc) searchAfter, hitCountThreshold);
+ }
+ }
+
protected final @Nullable SortAndFormats sortAndFormats;
private final Collector collector;
private final Supplier totalHitsSupplier;
private final Supplier topDocsSupplier;
private final Supplier maxScoreSupplier;
+ private final ScoreDoc searchAfter;
+ private final int trackTotalHitsUpTo;
+ private final boolean trackMaxScore;
+ private final boolean hasInfMaxScore;
+ private final int hitCount;
/**
* Ctr
@@ -269,24 +405,30 @@ private SimpleTopDocsCollectorContext(
) throws IOException {
super(REASON_SEARCH_TOP_HITS, numHits);
this.sortAndFormats = sortAndFormats;
+ this.searchAfter = searchAfter;
+ this.trackTotalHitsUpTo = trackTotalHitsUpTo;
+ this.trackMaxScore = trackMaxScore;
+ this.hasInfMaxScore = hasInfMaxScore(query);
final TopDocsCollector> topDocsCollector;
- if ((sortAndFormats == null || SortField.FIELD_SCORE.equals(sortAndFormats.sort.getSort()[0])) && hasInfMaxScore(query)) {
+ if ((sortAndFormats == null || SortField.FIELD_SCORE.equals(sortAndFormats.sort.getSort()[0])) && hasInfMaxScore) {
// disable max score optimization since we have a mandatory clause
// that doesn't track the maximum score
topDocsCollector = createCollector(sortAndFormats, numHits, searchAfter, Integer.MAX_VALUE);
topDocsSupplier = new CachedSupplier<>(topDocsCollector::topDocs);
totalHitsSupplier = () -> topDocsSupplier.get().totalHits;
+ hitCount = Integer.MIN_VALUE;
} else if (trackTotalHitsUpTo == SearchContext.TRACK_TOTAL_HITS_DISABLED) {
// don't compute hit counts via the collector
topDocsCollector = createCollector(sortAndFormats, numHits, searchAfter, 1);
topDocsSupplier = new CachedSupplier<>(topDocsCollector::topDocs);
totalHitsSupplier = () -> new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO);
+ hitCount = -1;
} else {
// implicit total hit counts are valid only when there is no filter collector in the chain
- final int hitCount = hasFilterCollector ? -1 : shortcutTotalHitCount(reader, query);
- if (hitCount == -1) {
+ this.hitCount = hasFilterCollector ? -1 : shortcutTotalHitCount(reader, query);
+ if (this.hitCount == -1) {
topDocsCollector = createCollector(sortAndFormats, numHits, searchAfter, trackTotalHitsUpTo);
topDocsSupplier = new CachedSupplier<>(topDocsCollector::topDocs);
totalHitsSupplier = () -> topDocsSupplier.get().totalHits;
@@ -294,7 +436,7 @@ private SimpleTopDocsCollectorContext(
// don't compute hit counts via the collector
topDocsCollector = createCollector(sortAndFormats, numHits, searchAfter, 1);
topDocsSupplier = new CachedSupplier<>(topDocsCollector::topDocs);
- totalHitsSupplier = () -> new TotalHits(hitCount, TotalHits.Relation.EQUAL_TO);
+ totalHitsSupplier = () -> new TotalHits(this.hitCount, TotalHits.Relation.EQUAL_TO);
}
}
MaxScoreCollector maxScoreCollector = null;
@@ -315,7 +457,98 @@ private SimpleTopDocsCollectorContext(
}
this.collector = MultiCollector.wrap(topDocsCollector, maxScoreCollector);
+ }
+
+ private class SimpleTopDocsCollectorManager
+ implements
+ CollectorManager,
+ EarlyTerminatingListener {
+ private Integer terminatedAfter;
+ private final CollectorManager extends TopDocsCollector>, ? extends TopDocs> manager;
+
+ private SimpleTopDocsCollectorManager() {
+ if ((sortAndFormats == null || SortField.FIELD_SCORE.equals(sortAndFormats.sort.getSort()[0])) && hasInfMaxScore) {
+ // disable max score optimization since we have a mandatory clause
+ // that doesn't track the maximum score
+ manager = createCollectorManager(sortAndFormats, numHits, searchAfter, Integer.MAX_VALUE);
+ } else if (trackTotalHitsUpTo == SearchContext.TRACK_TOTAL_HITS_DISABLED) {
+ // don't compute hit counts via the collector
+ manager = createCollectorManager(sortAndFormats, numHits, searchAfter, 1);
+ } else {
+ // implicit total hit counts are valid only when there is no filter collector in the chain
+ if (hitCount == -1) {
+ manager = createCollectorManager(sortAndFormats, numHits, searchAfter, trackTotalHitsUpTo);
+ } else {
+ // don't compute hit counts via the collector
+ manager = createCollectorManager(sortAndFormats, numHits, searchAfter, 1);
+ }
+ }
+ }
+
+ @Override
+ public void onEarlyTermination(int maxCountHits, boolean forcedTermination) {
+ terminatedAfter = maxCountHits;
+ }
+
+ @Override
+ public Collector newCollector() throws IOException {
+ MaxScoreCollector maxScoreCollector = null;
+
+ if (sortAndFormats != null && trackMaxScore) {
+ maxScoreCollector = new MaxScoreCollector();
+ }
+
+ return MultiCollectorWrapper.wrap(manager.newCollector(), maxScoreCollector);
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public ReduceableSearchResult reduce(Collection collectors) throws IOException {
+ final Collection> topDocsCollectors = new ArrayList<>();
+ final Collection maxScoreCollectors = new ArrayList<>();
+
+ for (final Collector collector : collectors) {
+ if (collector instanceof MultiCollectorWrapper) {
+ for (final Collector sub : (((MultiCollectorWrapper) collector).getCollectors())) {
+ if (sub instanceof TopDocsCollector>) {
+ topDocsCollectors.add((TopDocsCollector>) sub);
+ } else if (sub instanceof MaxScoreCollector) {
+ maxScoreCollectors.add((MaxScoreCollector) sub);
+ }
+ }
+ } else if (collector instanceof TopDocsCollector>) {
+ topDocsCollectors.add((TopDocsCollector>) collector);
+ } else if (collector instanceof MaxScoreCollector) {
+ maxScoreCollectors.add((MaxScoreCollector) collector);
+ }
+ }
+
+ float maxScore = Float.NaN;
+ for (final MaxScoreCollector collector : maxScoreCollectors) {
+ float score = collector.getMaxScore();
+ if (Float.isNaN(maxScore)) {
+ maxScore = score;
+ } else {
+ maxScore = Math.max(maxScore, score);
+ }
+ }
+ final TopDocs topDocs = ((CollectorManager, ? extends TopDocs>) manager).reduce(topDocsCollectors);
+ return reduceWith(topDocs, maxScore, terminatedAfter);
+ }
+ }
+
+ @Override
+ CollectorManager, ReduceableSearchResult> createManager(CollectorManager, ReduceableSearchResult> in) throws IOException {
+ assert in == null;
+ return new SimpleTopDocsCollectorManager();
+ }
+
+ protected ReduceableSearchResult reduceWith(final TopDocs topDocs, final float maxScore, final Integer terminatedAfter) {
+ return (QuerySearchResult result) -> {
+ final TopDocsAndMaxScore topDocsAndMaxScore = newTopDocs(topDocs, maxScore, terminatedAfter);
+ result.topDocs(topDocsAndMaxScore, sortAndFormats == null ? null : sortAndFormats.formats);
+ };
}
@Override
@@ -324,6 +557,50 @@ Collector create(Collector in) {
return collector;
}
+ TopDocsAndMaxScore newTopDocs(final TopDocs topDocs, final float maxScore, final Integer terminatedAfter) {
+ TotalHits totalHits = null;
+
+ if ((sortAndFormats == null || SortField.FIELD_SCORE.equals(sortAndFormats.sort.getSort()[0])) && hasInfMaxScore) {
+ totalHits = topDocs.totalHits;
+ } else if (trackTotalHitsUpTo == SearchContext.TRACK_TOTAL_HITS_DISABLED) {
+ // don't compute hit counts via the collector
+ totalHits = new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO);
+ } else {
+ if (hitCount == -1) {
+ totalHits = topDocs.totalHits;
+ } else {
+ totalHits = new TotalHits(hitCount, TotalHits.Relation.EQUAL_TO);
+ }
+ }
+
+ // Since we cannot support early forced termination, we have to simulate it by
+ // artificially reducing the number of total hits and doc scores.
+ ScoreDoc[] scoreDocs = topDocs.scoreDocs;
+ if (terminatedAfter != null) {
+ if (totalHits.value > terminatedAfter) {
+ totalHits = new TotalHits(terminatedAfter, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO);
+ }
+
+ if (scoreDocs != null && scoreDocs.length > terminatedAfter) {
+ scoreDocs = Arrays.copyOf(scoreDocs, terminatedAfter);
+ }
+ }
+
+ final TopDocs newTopDocs;
+ if (topDocs instanceof TopFieldDocs) {
+ TopFieldDocs fieldDocs = (TopFieldDocs) topDocs;
+ newTopDocs = new TopFieldDocs(totalHits, scoreDocs, fieldDocs.fields);
+ } else {
+ newTopDocs = new TopDocs(totalHits, scoreDocs);
+ }
+
+ if (Float.isNaN(maxScore) && newTopDocs.scoreDocs.length > 0 && sortAndFormats == null) {
+ return new TopDocsAndMaxScore(newTopDocs, newTopDocs.scoreDocs[0].score);
+ } else {
+ return new TopDocsAndMaxScore(newTopDocs, maxScore);
+ }
+ }
+
TopDocsAndMaxScore newTopDocs() {
TopDocs in = topDocsSupplier.get();
float maxScore = maxScoreSupplier.get();
@@ -373,6 +650,35 @@ private ScrollingTopDocsCollectorContext(
this.numberOfShards = numberOfShards;
}
+ @Override
+ protected ReduceableSearchResult reduceWith(final TopDocs topDocs, final float maxScore, final Integer terminatedAfter) {
+ return (QuerySearchResult result) -> {
+ final TopDocsAndMaxScore topDocsAndMaxScore = newTopDocs(topDocs, maxScore, terminatedAfter);
+
+ if (scrollContext.totalHits == null) {
+ // first round
+ scrollContext.totalHits = topDocsAndMaxScore.topDocs.totalHits;
+ scrollContext.maxScore = topDocsAndMaxScore.maxScore;
+ } else {
+ // subsequent round: the total number of hits and
+ // the maximum score were computed on the first round
+ topDocsAndMaxScore.topDocs.totalHits = scrollContext.totalHits;
+ topDocsAndMaxScore.maxScore = scrollContext.maxScore;
+ }
+
+ if (numberOfShards == 1) {
+ // if we fetch the document in the same roundtrip, we already know the last emitted doc
+ if (topDocsAndMaxScore.topDocs.scoreDocs.length > 0) {
+ // set the last emitted doc
+ scrollContext.lastEmittedDoc = topDocsAndMaxScore.topDocs.scoreDocs[topDocsAndMaxScore.topDocs.scoreDocs.length
+ - 1];
+ }
+ }
+
+ result.topDocs(topDocsAndMaxScore, sortAndFormats == null ? null : sortAndFormats.formats);
+ };
+ }
+
@Override
void postProcess(QuerySearchResult result) throws IOException {
final TopDocsAndMaxScore topDocs = newTopDocs();
@@ -457,7 +763,7 @@ static int shortcutTotalHitCount(IndexReader reader, Query query) throws IOExcep
* Creates a {@link TopDocsCollectorContext} from the provided searchContext.
* @param hasFilterCollector True if the collector chain contains at least one collector that can filters document.
*/
- static TopDocsCollectorContext createTopDocsCollectorContext(SearchContext searchContext, boolean hasFilterCollector)
+ public static TopDocsCollectorContext createTopDocsCollectorContext(SearchContext searchContext, boolean hasFilterCollector)
throws IOException {
final IndexReader reader = searchContext.searcher().getIndexReader();
final Query query = searchContext.query();
@@ -515,7 +821,7 @@ static TopDocsCollectorContext createTopDocsCollectorContext(SearchContext searc
hasFilterCollector
) {
@Override
- boolean shouldRescore() {
+ public boolean shouldRescore() {
return rescore;
}
};
diff --git a/server/src/main/java/org/opensearch/search/query/TotalHitCountCollectorManager.java b/server/src/main/java/org/opensearch/search/query/TotalHitCountCollectorManager.java
new file mode 100644
index 0000000000000..6d4159c977743
--- /dev/null
+++ b/server/src/main/java/org/opensearch/search/query/TotalHitCountCollectorManager.java
@@ -0,0 +1,106 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.query;
+
+import org.apache.lucene.search.CollectorManager;
+import org.apache.lucene.search.ScoreMode;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.TopFieldDocs;
+import org.apache.lucene.search.TotalHitCountCollector;
+import org.apache.lucene.search.TotalHits;
+import org.opensearch.common.lucene.Lucene;
+import org.opensearch.common.lucene.search.TopDocsAndMaxScore;
+
+import java.io.IOException;
+import java.util.Collection;
+
+public class TotalHitCountCollectorManager
+ implements
+ CollectorManager,
+ EarlyTerminatingListener {
+
+ private static final TotalHitCountCollector EMPTY_COLLECTOR = new TotalHitCountCollector() {
+ @Override
+ public void collect(int doc) {}
+
+ @Override
+ public ScoreMode scoreMode() {
+ return ScoreMode.COMPLETE_NO_SCORES;
+ }
+ };
+
+ private final Sort sort;
+ private Integer terminatedAfter;
+
+ public TotalHitCountCollectorManager(final Sort sort) {
+ this.sort = sort;
+ }
+
+ @Override
+ public void onEarlyTermination(int maxCountHits, boolean forcedTermination) {
+ terminatedAfter = maxCountHits;
+ }
+
+ @Override
+ public TotalHitCountCollector newCollector() throws IOException {
+ return new TotalHitCountCollector();
+ }
+
+ @Override
+ public ReduceableSearchResult reduce(Collection collectors) throws IOException {
+ return (QuerySearchResult result) -> {
+ final TotalHits.Relation relation = (terminatedAfter != null)
+ ? TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO
+ : TotalHits.Relation.EQUAL_TO;
+
+ int totalHits = collectors.stream().mapToInt(TotalHitCountCollector::getTotalHits).sum();
+ if (terminatedAfter != null && totalHits > terminatedAfter) {
+ totalHits = terminatedAfter;
+ }
+
+ final TotalHits totalHitCount = new TotalHits(totalHits, relation);
+ final TopDocs topDocs = (sort != null)
+ ? new TopFieldDocs(totalHitCount, Lucene.EMPTY_SCORE_DOCS, sort.getSort())
+ : new TopDocs(totalHitCount, Lucene.EMPTY_SCORE_DOCS);
+
+ result.topDocs(new TopDocsAndMaxScore(topDocs, Float.NaN), null);
+ };
+ }
+
+ static class Empty implements CollectorManager {
+ private final TotalHits totalHits;
+ private final Sort sort;
+
+ Empty(final TotalHits totalHits, final Sort sort) {
+ this.totalHits = totalHits;
+ this.sort = sort;
+ }
+
+ @Override
+ public TotalHitCountCollector newCollector() throws IOException {
+ return EMPTY_COLLECTOR;
+ }
+
+ @Override
+ public ReduceableSearchResult reduce(Collection collectors) throws IOException {
+ return (QuerySearchResult result) -> {
+ final TopDocs topDocs;
+
+ if (sort != null) {
+ topDocs = new TopFieldDocs(totalHits, Lucene.EMPTY_SCORE_DOCS, sort.getSort());
+ } else {
+ topDocs = new TopDocs(totalHits, Lucene.EMPTY_SCORE_DOCS);
+ }
+
+ result.topDocs(new TopDocsAndMaxScore(topDocs, Float.NaN), null);
+ };
+ }
+ }
+}
diff --git a/server/src/main/java/org/opensearch/tasks/ResourceStats.java b/server/src/main/java/org/opensearch/tasks/ResourceStats.java
new file mode 100644
index 0000000000000..aab103ad08dcf
--- /dev/null
+++ b/server/src/main/java/org/opensearch/tasks/ResourceStats.java
@@ -0,0 +1,28 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.tasks;
+
+/**
+ * Different resource stats are defined.
+ */
+public enum ResourceStats {
+ CPU("cpu_time_in_nanos"),
+ MEMORY("memory_in_bytes");
+
+ private final String statsName;
+
+ ResourceStats(String statsName) {
+ this.statsName = statsName;
+ }
+
+ @Override
+ public String toString() {
+ return statsName;
+ }
+}
diff --git a/server/src/main/java/org/opensearch/tasks/ResourceStatsType.java b/server/src/main/java/org/opensearch/tasks/ResourceStatsType.java
new file mode 100644
index 0000000000000..c670ac5ba689c
--- /dev/null
+++ b/server/src/main/java/org/opensearch/tasks/ResourceStatsType.java
@@ -0,0 +1,32 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.tasks;
+
+/** Defines the different types of resource stats. */
+public enum ResourceStatsType {
+ // resource stats of the worker thread reported directly from runnable.
+ WORKER_STATS("worker_stats", false);
+
+ private final String statsType;
+ private final boolean onlyForAnalysis;
+
+ ResourceStatsType(String statsType, boolean onlyForAnalysis) {
+ this.statsType = statsType;
+ this.onlyForAnalysis = onlyForAnalysis;
+ }
+
+ public boolean isOnlyForAnalysis() {
+ return onlyForAnalysis;
+ }
+
+ @Override
+ public String toString() {
+ return statsType;
+ }
+}
diff --git a/server/src/main/java/org/opensearch/tasks/ResourceUsageInfo.java b/server/src/main/java/org/opensearch/tasks/ResourceUsageInfo.java
new file mode 100644
index 0000000000000..ae58f712b63c2
--- /dev/null
+++ b/server/src/main/java/org/opensearch/tasks/ResourceUsageInfo.java
@@ -0,0 +1,108 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.tasks;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.util.Collections;
+import java.util.EnumMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicLong;
+
+/**
+ * Thread resource usage information for particular resource stats type.
+ *
+ * It captures the resource usage information like memory, CPU about a particular execution of thread
+ * for a specific stats type.
+ */
+public class ResourceUsageInfo {
+ private static final Logger logger = LogManager.getLogger(ResourceUsageInfo.class);
+ private final EnumMap statsInfo = new EnumMap<>(ResourceStats.class);
+
+ public ResourceUsageInfo(ResourceUsageMetric... resourceUsageMetrics) {
+ for (ResourceUsageMetric resourceUsageMetric : resourceUsageMetrics) {
+ this.statsInfo.put(resourceUsageMetric.getStats(), new ResourceStatsInfo(resourceUsageMetric.getValue()));
+ }
+ }
+
+ public void recordResourceUsageMetrics(ResourceUsageMetric... resourceUsageMetrics) {
+ for (ResourceUsageMetric resourceUsageMetric : resourceUsageMetrics) {
+ final ResourceStatsInfo resourceStatsInfo = statsInfo.get(resourceUsageMetric.getStats());
+ if (resourceStatsInfo != null) {
+ updateResourceUsageInfo(resourceStatsInfo, resourceUsageMetric);
+ } else {
+ throw new IllegalStateException(
+ "cannot update ["
+ + resourceUsageMetric.getStats().toString()
+ + "] entry as its not present current_stats_info:"
+ + statsInfo
+ );
+ }
+ }
+ }
+
+ private void updateResourceUsageInfo(ResourceStatsInfo resourceStatsInfo, ResourceUsageMetric resourceUsageMetric) {
+ long currentEndValue;
+ long newEndValue;
+ do {
+ currentEndValue = resourceStatsInfo.endValue.get();
+ newEndValue = resourceUsageMetric.getValue();
+ if (currentEndValue > newEndValue) {
+ logger.debug(
+ "dropping resource usage update as the new value is lower than current value ["
+ + "resource_stats=[{}], "
+ + "current_end_value={}, "
+ + "new_end_value={}]",
+ resourceUsageMetric.getStats(),
+ currentEndValue,
+ newEndValue
+ );
+ return;
+ }
+ } while (!resourceStatsInfo.endValue.compareAndSet(currentEndValue, newEndValue));
+ logger.debug(
+ "updated resource usage info [resource_stats=[{}], " + "old_end_value={}, new_end_value={}]",
+ resourceUsageMetric.getStats(),
+ currentEndValue,
+ newEndValue
+ );
+ }
+
+ public Map getStatsInfo() {
+ return Collections.unmodifiableMap(statsInfo);
+ }
+
+ @Override
+ public String toString() {
+ return statsInfo.toString();
+ }
+
+ /**
+ * Defines resource stats information.
+ */
+ static class ResourceStatsInfo {
+ private final long startValue;
+ private final AtomicLong endValue;
+
+ private ResourceStatsInfo(long startValue) {
+ this.startValue = startValue;
+ this.endValue = new AtomicLong(startValue);
+ }
+
+ public long getTotalValue() {
+ return endValue.get() - startValue;
+ }
+
+ @Override
+ public String toString() {
+ return String.valueOf(getTotalValue());
+ }
+ }
+}
diff --git a/server/src/main/java/org/opensearch/tasks/ResourceUsageMetric.java b/server/src/main/java/org/opensearch/tasks/ResourceUsageMetric.java
new file mode 100644
index 0000000000000..0d13ffe6ec01a
--- /dev/null
+++ b/server/src/main/java/org/opensearch/tasks/ResourceUsageMetric.java
@@ -0,0 +1,27 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.tasks;
+
+public class ResourceUsageMetric {
+ private final ResourceStats stats;
+ private final long value;
+
+ public ResourceUsageMetric(ResourceStats stats, long value) {
+ this.stats = stats;
+ this.value = value;
+ }
+
+ public ResourceStats getStats() {
+ return stats;
+ }
+
+ public long getValue() {
+ return value;
+ }
+}
diff --git a/server/src/main/java/org/opensearch/tasks/Task.java b/server/src/main/java/org/opensearch/tasks/Task.java
index ad9d5c3f04411..62453d08724ce 100644
--- a/server/src/main/java/org/opensearch/tasks/Task.java
+++ b/server/src/main/java/org/opensearch/tasks/Task.java
@@ -32,6 +32,8 @@
package org.opensearch.tasks;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.opensearch.action.ActionResponse;
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.common.io.stream.NamedWriteable;
@@ -39,18 +41,27 @@
import org.opensearch.common.xcontent.ToXContentObject;
import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
/**
* Current task information
*/
public class Task {
+ private static final Logger logger = LogManager.getLogger(Task.class);
+
/**
* The request header to mark tasks with specific ids
*/
public static final String X_OPAQUE_ID = "X-Opaque-Id";
+ private static final String TOTAL = "total";
+
private final long id;
private final String type;
@@ -63,6 +74,8 @@ public class Task {
private final Map headers;
+ private final Map> resourceStats;
+
/**
* The task's start time as a wall clock time since epoch ({@link System#currentTimeMillis()} style).
*/
@@ -74,7 +87,7 @@ public class Task {
private final long startTimeNanos;
public Task(long id, String type, String action, String description, TaskId parentTask, Map headers) {
- this(id, type, action, description, parentTask, System.currentTimeMillis(), System.nanoTime(), headers);
+ this(id, type, action, description, parentTask, System.currentTimeMillis(), System.nanoTime(), headers, new ConcurrentHashMap<>());
}
public Task(
@@ -85,7 +98,8 @@ public Task(
TaskId parentTask,
long startTime,
long startTimeNanos,
- Map headers
+ Map headers,
+ ConcurrentHashMap> resourceStats
) {
this.id = id;
this.type = type;
@@ -95,6 +109,7 @@ public Task(
this.startTime = startTime;
this.startTimeNanos = startTimeNanos;
this.headers = headers;
+ this.resourceStats = resourceStats;
}
/**
@@ -108,19 +123,48 @@ public Task(
* generate data?
*/
public final TaskInfo taskInfo(String localNodeId, boolean detailed) {
+ return taskInfo(localNodeId, detailed, detailed == false);
+ }
+
+ /**
+ * Build a version of the task status you can throw over the wire and back
+ * with the option to include resource stats or not.
+ * This method is only used during creating TaskResult to avoid storing resource information into the task index.
+ *
+ * @param excludeStats should information exclude resource stats.
+ * By default, detailed flag is used to control including resource information.
+ * But inorder to avoid storing resource stats into task index as strict mapping is enforced and breaks when adding this field.
+ * In the future, task-index-mapping.json can be modified to add resource stats.
+ */
+ private TaskInfo taskInfo(String localNodeId, boolean detailed, boolean excludeStats) {
String description = null;
Task.Status status = null;
+ TaskResourceStats resourceStats = null;
if (detailed) {
description = getDescription();
status = getStatus();
}
- return taskInfo(localNodeId, description, status);
+ if (excludeStats == false) {
+ resourceStats = new TaskResourceStats(new HashMap<>() {
+ {
+ put(TOTAL, getTotalResourceStats());
+ }
+ });
+ }
+ return taskInfo(localNodeId, description, status, resourceStats);
}
/**
- * Build a proper {@link TaskInfo} for this task.
+ * Build a {@link TaskInfo} for this task without resource stats.
*/
protected final TaskInfo taskInfo(String localNodeId, String description, Status status) {
+ return taskInfo(localNodeId, description, status, null);
+ }
+
+ /**
+ * Build a proper {@link TaskInfo} for this task.
+ */
+ protected final TaskInfo taskInfo(String localNodeId, String description, Status status, TaskResourceStats resourceStats) {
return new TaskInfo(
new TaskId(localNodeId, getId()),
getType(),
@@ -132,7 +176,8 @@ protected final TaskInfo taskInfo(String localNodeId, String description, Status
this instanceof CancellableTask,
this instanceof CancellableTask && ((CancellableTask) this).isCancelled(),
parentTask,
- headers
+ headers,
+ resourceStats
);
}
@@ -195,6 +240,102 @@ public Status getStatus() {
return null;
}
+ /**
+ * Returns thread level resource consumption of the task
+ */
+ public Map> getResourceStats() {
+ return Collections.unmodifiableMap(resourceStats);
+ }
+
+ /**
+ * Returns current total resource usage of the task.
+ * Currently, this method is only called on demand, during get and listing of tasks.
+ * In the future, these values can be cached as an optimization.
+ */
+ public TaskResourceUsage getTotalResourceStats() {
+ return new TaskResourceUsage(getTotalResourceUtilization(ResourceStats.CPU), getTotalResourceUtilization(ResourceStats.MEMORY));
+ }
+
+ /**
+ * Returns total resource consumption for a specific task stat.
+ */
+ public long getTotalResourceUtilization(ResourceStats stats) {
+ long totalResourceConsumption = 0L;
+ for (List threadResourceInfosList : resourceStats.values()) {
+ for (ThreadResourceInfo threadResourceInfo : threadResourceInfosList) {
+ final ResourceUsageInfo.ResourceStatsInfo statsInfo = threadResourceInfo.getResourceUsageInfo().getStatsInfo().get(stats);
+ if (threadResourceInfo.getStatsType().isOnlyForAnalysis() == false && statsInfo != null) {
+ totalResourceConsumption += statsInfo.getTotalValue();
+ }
+ }
+ }
+ return totalResourceConsumption;
+ }
+
+ /**
+ * Adds thread's starting resource consumption information
+ * @param threadId ID of the thread
+ * @param statsType stats type
+ * @param resourceUsageMetrics resource consumption metrics of the thread
+ * @throws IllegalStateException matching active thread entry was found which is not expected.
+ */
+ public void startThreadResourceTracking(long threadId, ResourceStatsType statsType, ResourceUsageMetric... resourceUsageMetrics) {
+ final List threadResourceInfoList = resourceStats.computeIfAbsent(threadId, k -> new ArrayList<>());
+ // active thread entry should not be present in the list
+ for (ThreadResourceInfo threadResourceInfo : threadResourceInfoList) {
+ if (threadResourceInfo.getStatsType() == statsType && threadResourceInfo.isActive()) {
+ throw new IllegalStateException(
+ "unexpected active thread resource entry present [" + threadId + "]:[" + threadResourceInfo + "]"
+ );
+ }
+ }
+ threadResourceInfoList.add(new ThreadResourceInfo(statsType, resourceUsageMetrics));
+ }
+
+ /**
+ * This method is used to update the resource consumption stats so that the data isn't too stale for long-running task.
+ * If active thread entry is present in the list, the entry is updated. If one is not found, it throws an exception.
+ * @param threadId ID of the thread
+ * @param statsType stats type
+ * @param resourceUsageMetrics resource consumption metrics of the thread
+ * @throws IllegalStateException if no matching active thread entry was found.
+ */
+ public void updateThreadResourceStats(long threadId, ResourceStatsType statsType, ResourceUsageMetric... resourceUsageMetrics) {
+ final List threadResourceInfoList = resourceStats.get(threadId);
+ if (threadResourceInfoList != null) {
+ for (ThreadResourceInfo threadResourceInfo : threadResourceInfoList) {
+ // the active entry present in the list is updated
+ if (threadResourceInfo.getStatsType() == statsType && threadResourceInfo.isActive()) {
+ threadResourceInfo.recordResourceUsageMetrics(resourceUsageMetrics);
+ return;
+ }
+ }
+ }
+ throw new IllegalStateException("cannot update if active thread resource entry is not present");
+ }
+
+ /**
+ * Record the thread's final resource consumption values.
+ * If active thread entry is present in the list, the entry is updated. If one is not found, it throws an exception.
+ * @param threadId ID of the thread
+ * @param statsType stats type
+ * @param resourceUsageMetrics resource consumption metrics of the thread
+ * @throws IllegalStateException if no matching active thread entry was found.
+ */
+ public void stopThreadResourceTracking(long threadId, ResourceStatsType statsType, ResourceUsageMetric... resourceUsageMetrics) {
+ final List threadResourceInfoList = resourceStats.get(threadId);
+ if (threadResourceInfoList != null) {
+ for (ThreadResourceInfo threadResourceInfo : threadResourceInfoList) {
+ if (threadResourceInfo.getStatsType() == statsType && threadResourceInfo.isActive()) {
+ threadResourceInfo.setActive(false);
+ threadResourceInfo.recordResourceUsageMetrics(resourceUsageMetrics);
+ return;
+ }
+ }
+ }
+ throw new IllegalStateException("cannot update final values if active thread resource entry is not present");
+ }
+
/**
* Report of the internal status of a task. These can vary wildly from task
* to task because each task is implemented differently but we should try
@@ -217,12 +358,12 @@ public String getHeader(String header) {
}
public TaskResult result(DiscoveryNode node, Exception error) throws IOException {
- return new TaskResult(taskInfo(node.getId(), true), error);
+ return new TaskResult(taskInfo(node.getId(), true, true), error);
}
public TaskResult result(DiscoveryNode node, ActionResponse response) throws IOException {
if (response instanceof ToXContent) {
- return new TaskResult(taskInfo(node.getId(), true), (ToXContent) response);
+ return new TaskResult(taskInfo(node.getId(), true, true), (ToXContent) response);
} else {
throw new IllegalStateException("response has to implement ToXContent to be able to store the results");
}
diff --git a/server/src/main/java/org/opensearch/tasks/TaskInfo.java b/server/src/main/java/org/opensearch/tasks/TaskInfo.java
index cf77eaf540ee6..b6814282b5db4 100644
--- a/server/src/main/java/org/opensearch/tasks/TaskInfo.java
+++ b/server/src/main/java/org/opensearch/tasks/TaskInfo.java
@@ -86,6 +86,8 @@ public final class TaskInfo implements Writeable, ToXContentFragment {
private final Map headers;
+ private final TaskResourceStats resourceStats;
+
public TaskInfo(
TaskId taskId,
String type,
@@ -97,7 +99,8 @@ public TaskInfo(
boolean cancellable,
boolean cancelled,
TaskId parentTaskId,
- Map headers
+ Map headers,
+ TaskResourceStats resourceStats
) {
if (cancellable == false && cancelled == true) {
throw new IllegalArgumentException("task cannot be cancelled");
@@ -113,11 +116,13 @@ public TaskInfo(
this.cancelled = cancelled;
this.parentTaskId = parentTaskId;
this.headers = headers;
+ this.resourceStats = resourceStats;
}
/**
* Read from a stream.
*/
+ @SuppressWarnings("unchecked")
public TaskInfo(StreamInput in) throws IOException {
taskId = TaskId.readFromStream(in);
type = in.readString();
@@ -137,6 +142,11 @@ public TaskInfo(StreamInput in) throws IOException {
}
parentTaskId = TaskId.readFromStream(in);
headers = in.readMap(StreamInput::readString, StreamInput::readString);
+ if (in.getVersion().onOrAfter(Version.V_2_1_0)) {
+ resourceStats = in.readOptionalWriteable(TaskResourceStats::new);
+ } else {
+ resourceStats = null;
+ }
}
@Override
@@ -154,6 +164,9 @@ public void writeTo(StreamOutput out) throws IOException {
}
parentTaskId.writeTo(out);
out.writeMap(headers, StreamOutput::writeString, StreamOutput::writeString);
+ if (out.getVersion().onOrAfter(Version.V_2_1_0)) {
+ out.writeOptionalWriteable(resourceStats);
+ }
}
public TaskId getTaskId() {
@@ -226,6 +239,13 @@ public Map getHeaders() {
return headers;
}
+ /**
+ * Returns the task resource information
+ */
+ public TaskResourceStats getResourceStats() {
+ return resourceStats;
+ }
+
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("node", taskId.getNodeId());
@@ -253,6 +273,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.field(attribute.getKey(), attribute.getValue());
}
builder.endObject();
+ if (resourceStats != null) {
+ builder.startObject("resource_stats");
+ resourceStats.toXContent(builder, params);
+ builder.endObject();
+ }
return builder;
}
@@ -278,6 +303,8 @@ public static TaskInfo fromXContent(XContentParser parser) {
// This might happen if we are reading an old version of task info
headers = Collections.emptyMap();
}
+ @SuppressWarnings("unchecked")
+ TaskResourceStats resourceStats = (TaskResourceStats) a[i++];
RawTaskStatus status = statusBytes == null ? null : new RawTaskStatus(statusBytes);
TaskId parentTaskId = parentTaskIdString == null ? TaskId.EMPTY_TASK_ID : new TaskId(parentTaskIdString);
return new TaskInfo(
@@ -291,7 +318,8 @@ public static TaskInfo fromXContent(XContentParser parser) {
cancellable,
cancelled,
parentTaskId,
- headers
+ headers,
+ resourceStats
);
});
static {
@@ -309,6 +337,7 @@ public static TaskInfo fromXContent(XContentParser parser) {
PARSER.declareBoolean(optionalConstructorArg(), new ParseField("cancelled"));
PARSER.declareString(optionalConstructorArg(), new ParseField("parent_task_id"));
PARSER.declareObject(optionalConstructorArg(), (p, c) -> p.mapStrings(), new ParseField("headers"));
+ PARSER.declareObject(optionalConstructorArg(), (p, c) -> TaskResourceStats.fromXContent(p), new ParseField("resource_stats"));
}
@Override
@@ -333,7 +362,8 @@ public boolean equals(Object obj) {
&& Objects.equals(cancellable, other.cancellable)
&& Objects.equals(cancelled, other.cancelled)
&& Objects.equals(status, other.status)
- && Objects.equals(headers, other.headers);
+ && Objects.equals(headers, other.headers)
+ && Objects.equals(resourceStats, other.resourceStats);
}
@Override
@@ -349,7 +379,8 @@ public int hashCode() {
cancellable,
cancelled,
status,
- headers
+ headers,
+ resourceStats
);
}
}
diff --git a/server/src/main/java/org/opensearch/tasks/TaskResourceStats.java b/server/src/main/java/org/opensearch/tasks/TaskResourceStats.java
new file mode 100644
index 0000000000000..c35e08ebb34ec
--- /dev/null
+++ b/server/src/main/java/org/opensearch/tasks/TaskResourceStats.java
@@ -0,0 +1,106 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.tasks;
+
+import org.opensearch.common.Strings;
+import org.opensearch.common.io.stream.StreamInput;
+import org.opensearch.common.io.stream.StreamOutput;
+import org.opensearch.common.io.stream.Writeable;
+import org.opensearch.common.xcontent.ToXContentFragment;
+import org.opensearch.common.xcontent.XContentBuilder;
+import org.opensearch.common.xcontent.XContentParser;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * Resource information about a currently running task.
+ *
+ * Writeable TaskResourceStats objects are used to represent resource
+ * snapshot information about currently running task.
+ */
+public class TaskResourceStats implements Writeable, ToXContentFragment {
+ private final Map resourceUsage;
+
+ public TaskResourceStats(Map resourceUsage) {
+ this.resourceUsage = Objects.requireNonNull(resourceUsage, "resource usage is required");
+ }
+
+ /**
+ * Read from a stream.
+ */
+ public TaskResourceStats(StreamInput in) throws IOException {
+ resourceUsage = in.readMap(StreamInput::readString, TaskResourceUsage::readFromStream);
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeMap(resourceUsage, StreamOutput::writeString, (stream, stats) -> stats.writeTo(stream));
+ }
+
+ public Map getResourceUsageInfo() {
+ return resourceUsage;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ for (Map.Entry resourceUsageEntry : resourceUsage.entrySet()) {
+ builder.startObject(resourceUsageEntry.getKey());
+ if (resourceUsageEntry.getValue() != null) {
+ resourceUsageEntry.getValue().toXContent(builder, params);
+ }
+ builder.endObject();
+ }
+ return builder;
+ }
+
+ public static TaskResourceStats fromXContent(XContentParser parser) throws IOException {
+ XContentParser.Token token = parser.currentToken();
+ if (token == null) {
+ token = parser.nextToken();
+ }
+ if (token == XContentParser.Token.START_OBJECT) {
+ token = parser.nextToken();
+ }
+ final Map resourceStats = new HashMap<>();
+ if (token == XContentParser.Token.FIELD_NAME) {
+ assert parser.currentToken() == XContentParser.Token.FIELD_NAME : "Expected field name but saw [" + parser.currentToken() + "]";
+ do {
+ // Must point to field name
+ String fieldName = parser.currentName();
+ // And then the value
+ TaskResourceUsage value = TaskResourceUsage.fromXContent(parser);
+ resourceStats.put(fieldName, value);
+ } while (parser.nextToken() == XContentParser.Token.FIELD_NAME);
+ }
+ return new TaskResourceStats(resourceStats);
+ }
+
+ @Override
+ public String toString() {
+ return Strings.toString(this, true, true);
+ }
+
+ // Implements equals and hashcode for testing
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null || obj.getClass() != TaskResourceStats.class) {
+ return false;
+ }
+ TaskResourceStats other = (TaskResourceStats) obj;
+ return Objects.equals(resourceUsage, other.resourceUsage);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(resourceUsage);
+ }
+}
diff --git a/server/src/main/java/org/opensearch/tasks/TaskResourceUsage.java b/server/src/main/java/org/opensearch/tasks/TaskResourceUsage.java
new file mode 100644
index 0000000000000..6af3de2b78c06
--- /dev/null
+++ b/server/src/main/java/org/opensearch/tasks/TaskResourceUsage.java
@@ -0,0 +1,105 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.tasks;
+
+import org.opensearch.common.ParseField;
+import org.opensearch.common.Strings;
+import org.opensearch.common.io.stream.StreamInput;
+import org.opensearch.common.io.stream.StreamOutput;
+import org.opensearch.common.io.stream.Writeable;
+import org.opensearch.common.xcontent.ConstructingObjectParser;
+import org.opensearch.common.xcontent.ToXContentFragment;
+import org.opensearch.common.xcontent.XContentBuilder;
+import org.opensearch.common.xcontent.XContentParser;
+
+import java.io.IOException;
+import java.util.Objects;
+
+import static org.opensearch.common.xcontent.ConstructingObjectParser.constructorArg;
+
+/**
+ * Task resource usage information
+ *
+ * Writeable TaskResourceUsage objects are used to represent resource usage
+ * information of running tasks.
+ */
+public class TaskResourceUsage implements Writeable, ToXContentFragment {
+ private static final ParseField CPU_TIME_IN_NANOS = new ParseField("cpu_time_in_nanos");
+ private static final ParseField MEMORY_IN_BYTES = new ParseField("memory_in_bytes");
+
+ private final long cpuTimeInNanos;
+ private final long memoryInBytes;
+
+ public TaskResourceUsage(long cpuTimeInNanos, long memoryInBytes) {
+ this.cpuTimeInNanos = cpuTimeInNanos;
+ this.memoryInBytes = memoryInBytes;
+ }
+
+ /**
+ * Read from a stream.
+ */
+ public static TaskResourceUsage readFromStream(StreamInput in) throws IOException {
+ return new TaskResourceUsage(in.readVLong(), in.readVLong());
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeVLong(cpuTimeInNanos);
+ out.writeVLong(memoryInBytes);
+ }
+
+ public long getCpuTimeInNanos() {
+ return cpuTimeInNanos;
+ }
+
+ public long getMemoryInBytes() {
+ return memoryInBytes;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.field(CPU_TIME_IN_NANOS.getPreferredName(), cpuTimeInNanos);
+ builder.field(MEMORY_IN_BYTES.getPreferredName(), memoryInBytes);
+ return builder;
+ }
+
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ "task_resource_usage",
+ a -> new TaskResourceUsage((Long) a[0], (Long) a[1])
+ );
+
+ static {
+ PARSER.declareLong(constructorArg(), CPU_TIME_IN_NANOS);
+ PARSER.declareLong(constructorArg(), MEMORY_IN_BYTES);
+ }
+
+ public static TaskResourceUsage fromXContent(XContentParser parser) {
+ return PARSER.apply(parser, null);
+ }
+
+ @Override
+ public String toString() {
+ return Strings.toString(this, true, true);
+ }
+
+ // Implements equals and hashcode for testing
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null || obj.getClass() != TaskResourceUsage.class) {
+ return false;
+ }
+ TaskResourceUsage other = (TaskResourceUsage) obj;
+ return Objects.equals(cpuTimeInNanos, other.cpuTimeInNanos) && Objects.equals(memoryInBytes, other.memoryInBytes);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(cpuTimeInNanos, memoryInBytes);
+ }
+}
diff --git a/server/src/main/java/org/opensearch/tasks/ThreadResourceInfo.java b/server/src/main/java/org/opensearch/tasks/ThreadResourceInfo.java
new file mode 100644
index 0000000000000..8b45c38c8fb63
--- /dev/null
+++ b/server/src/main/java/org/opensearch/tasks/ThreadResourceInfo.java
@@ -0,0 +1,54 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.tasks;
+
+/**
+ * Resource consumption information about a particular execution of thread.
+ *
+ * It captures the resource usage information about a particular execution of thread
+ * for a specific stats type like worker_stats or response_stats etc.,
+ */
+public class ThreadResourceInfo {
+ private volatile boolean isActive = true;
+ private final ResourceStatsType statsType;
+ private final ResourceUsageInfo resourceUsageInfo;
+
+ public ThreadResourceInfo(ResourceStatsType statsType, ResourceUsageMetric... resourceUsageMetrics) {
+ this.statsType = statsType;
+ this.resourceUsageInfo = new ResourceUsageInfo(resourceUsageMetrics);
+ }
+
+ /**
+ * Updates thread's resource consumption information.
+ */
+ public void recordResourceUsageMetrics(ResourceUsageMetric... resourceUsageMetrics) {
+ resourceUsageInfo.recordResourceUsageMetrics(resourceUsageMetrics);
+ }
+
+ public void setActive(boolean isActive) {
+ this.isActive = isActive;
+ }
+
+ public boolean isActive() {
+ return isActive;
+ }
+
+ public ResourceStatsType getStatsType() {
+ return statsType;
+ }
+
+ public ResourceUsageInfo getResourceUsageInfo() {
+ return resourceUsageInfo;
+ }
+
+ @Override
+ public String toString() {
+ return resourceUsageInfo + ", stats_type=" + statsType + ", is_active=" + isActive;
+ }
+}
diff --git a/server/src/main/java/org/opensearch/transport/InboundDecoder.java b/server/src/main/java/org/opensearch/transport/InboundDecoder.java
index bd1d384fd37da..342a076774896 100644
--- a/server/src/main/java/org/opensearch/transport/InboundDecoder.java
+++ b/server/src/main/java/org/opensearch/transport/InboundDecoder.java
@@ -55,6 +55,8 @@ public class InboundDecoder implements Releasable {
private int bytesConsumed = 0;
private boolean isClosed = false;
+ private static Version V_4_0_0 = Version.fromId(4000099 ^ Version.MASK);
+
public InboundDecoder(Version version, PageCacheRecycler recycler) {
this.version = version;
this.recycler = recycler;
@@ -217,8 +219,8 @@ static IllegalStateException ensureVersionCompatibility(Version remoteVersion, V
// handshake. This looks odd but it's required to establish the connection correctly we check for real compatibility
// once the connection is established
final Version compatibilityVersion = isHandshake ? currentVersion.minimumCompatibilityVersion() : currentVersion;
- if ((currentVersion.equals(Version.V_2_0_0) && remoteVersion.equals(Version.fromId(6079999))) == false
- && remoteVersion.isCompatible(compatibilityVersion) == false) {
+ boolean v3x = currentVersion.onOrAfter(Version.V_3_0_0) && currentVersion.before(V_4_0_0);
+ if ((v3x && remoteVersion.equals(Version.fromId(7099999)) == false) && remoteVersion.isCompatible(compatibilityVersion) == false) {
final Version minCompatibilityVersion = isHandshake ? compatibilityVersion : compatibilityVersion.minimumCompatibilityVersion();
String msg = "Received " + (isHandshake ? "handshake " : "") + "message from unsupported version: [";
return new IllegalStateException(msg + remoteVersion + "] minimal compatible version is: [" + minCompatibilityVersion + "]");
diff --git a/server/src/main/java/org/opensearch/transport/TransportHandshaker.java b/server/src/main/java/org/opensearch/transport/TransportHandshaker.java
index c85a8eebd8fbd..1b6a2580fcf77 100644
--- a/server/src/main/java/org/opensearch/transport/TransportHandshaker.java
+++ b/server/src/main/java/org/opensearch/transport/TransportHandshaker.java
@@ -63,9 +63,6 @@ final class TransportHandshaker {
private final ThreadPool threadPool;
private final HandshakeRequestSender handshakeRequestSender;
- // @todo remove in 3.0.0
- static final Version V_3_0_0 = Version.fromId(3000099 ^ Version.MASK);
-
TransportHandshaker(Version version, ThreadPool threadPool, HandshakeRequestSender handshakeRequestSender) {
this.version = version;
this.threadPool = threadPool;
@@ -95,7 +92,7 @@ void sendHandshake(long requestId, DiscoveryNode node, TcpChannel channel, TimeV
// Sending only BC version to ElasticSearch node provide easy deprecation path for this BC version logic
// in OpenSearch 2.0.0.
minCompatVersion = Version.fromId(6079999);
- } else if (version.onOrAfter(Version.V_2_0_0)) {
+ } else if (version.before(Version.V_3_0_0)) {
minCompatVersion = Version.fromId(7099999);
}
handshakeRequestSender.sendRequest(node, channel, requestId, minCompatVersion);
@@ -134,7 +131,7 @@ void handleHandshake(TransportChannel channel, long requestId, StreamInput strea
// 1. if remote node is 7.x, then StreamInput version would be 6.8.0
// 2. if remote node is 6.8 then it would be 5.6.0
// 3. if remote node is OpenSearch 1.x then it would be 6.7.99
- if ((this.version.onOrAfter(Version.V_1_0_0) && this.version.before(V_3_0_0))
+ if ((this.version.onOrAfter(Version.V_1_0_0) && this.version.before(Version.V_3_0_0))
&& (stream.getVersion().equals(LegacyESVersion.fromId(6080099)) || stream.getVersion().equals(Version.fromId(5060099)))) {
// send 7.10.2 in response to ensure compatibility w/ Legacy 7.10.x nodes for rolling upgrade support
channel.sendResponse(new HandshakeResponse(LegacyESVersion.V_7_10_2));
diff --git a/server/src/test/java/org/opensearch/LegacyESVersionTests.java b/server/src/test/java/org/opensearch/LegacyESVersionTests.java
deleted file mode 100644
index 8fb3636dd8b2c..0000000000000
--- a/server/src/test/java/org/opensearch/LegacyESVersionTests.java
+++ /dev/null
@@ -1,294 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- *
- * The OpenSearch Contributors require contributions made to
- * this file be licensed under the Apache-2.0 license or a
- * compatible open source license.
- */
-
-package org.opensearch;
-
-import org.opensearch.cluster.metadata.IndexMetadata;
-import org.opensearch.common.settings.Settings;
-import org.opensearch.test.OpenSearchTestCase;
-import org.opensearch.test.VersionUtils;
-
-import java.lang.reflect.Modifier;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.greaterThan;
-import static org.hamcrest.Matchers.is;
-import static org.hamcrest.Matchers.lessThan;
-import static org.hamcrest.Matchers.sameInstance;
-import static org.opensearch.LegacyESVersion.V_7_0_0;
-import static org.opensearch.test.VersionUtils.randomLegacyVersion;
-import static org.opensearch.VersionTests.isCompatible;
-
-/**
- * tests LegacyESVersion utilities.
- * note: legacy version compatibility is already tested by e predecessor
- */
-public class LegacyESVersionTests extends OpenSearchTestCase {
-
- public void testVersionComparison() {
- Version V_6_8_15 = LegacyESVersion.fromString("6.8.15");
- assertThat(V_6_8_15.before(V_7_0_0), is(true));
- assertThat(V_6_8_15.before(V_6_8_15), is(false));
- assertThat(V_7_0_0.before(V_6_8_15), is(false));
-
- assertThat(V_6_8_15.onOrBefore(V_7_0_0), is(true));
- assertThat(V_6_8_15.onOrBefore(V_6_8_15), is(true));
- assertThat(V_7_0_0.onOrBefore(V_6_8_15), is(false));
-
- assertThat(V_6_8_15.after(V_7_0_0), is(false));
- assertThat(V_6_8_15.after(V_6_8_15), is(false));
- assertThat(V_7_0_0.after(V_6_8_15), is(true));
-
- assertThat(V_6_8_15.onOrAfter(V_7_0_0), is(false));
- assertThat(V_6_8_15.onOrAfter(V_6_8_15), is(true));
- assertThat(V_7_0_0.onOrAfter(V_6_8_15), is(true));
-
- assertTrue(LegacyESVersion.fromString("5.0.0-alpha2").onOrAfter(LegacyESVersion.fromString("5.0.0-alpha1")));
- assertTrue(LegacyESVersion.fromString("5.0.0").onOrAfter(LegacyESVersion.fromString("5.0.0-beta2")));
- assertTrue(LegacyESVersion.fromString("5.0.0-rc1").onOrAfter(LegacyESVersion.fromString("5.0.0-beta24")));
- assertTrue(LegacyESVersion.fromString("5.0.0-alpha24").before(LegacyESVersion.fromString("5.0.0-beta0")));
-
- assertThat(V_6_8_15, is(lessThan(V_7_0_0)));
- assertThat(V_7_0_0, is(greaterThan(V_6_8_15)));
-
- // compare opensearch version to LegacyESVersion
- assertThat(Version.V_1_0_0.compareMajor(LegacyESVersion.V_7_0_0), is(0));
- assertThat(Version.V_1_0_0.compareMajor(LegacyESVersion.fromString("6.3.0")), is(1));
- assertThat(LegacyESVersion.fromString("6.3.0").compareMajor(Version.V_1_0_0), is(-1));
- }
-
- public void testMin() {
- assertEquals(VersionUtils.getPreviousVersion(), LegacyESVersion.min(Version.CURRENT, VersionUtils.getPreviousVersion()));
- assertEquals(LegacyESVersion.fromString("7.0.1"), LegacyESVersion.min(LegacyESVersion.fromString("7.0.1"), Version.CURRENT));
- Version legacyVersion = VersionUtils.randomLegacyVersion(random());
- Version opensearchVersion = VersionUtils.randomOpenSearchVersion(random());
- assertEquals(legacyVersion, Version.min(opensearchVersion, legacyVersion));
- }
-
- public void testMax() {
- assertEquals(Version.CURRENT, Version.max(Version.CURRENT, VersionUtils.randomLegacyVersion(random())));
- assertEquals(Version.CURRENT, Version.max(LegacyESVersion.fromString("1.0.1"), Version.CURRENT));
- Version legacyVersion = VersionUtils.randomOpenSearchVersion(random());
- Version opensearchVersion = VersionUtils.randomLegacyVersion(random());
- assertEquals(legacyVersion, Version.max(opensearchVersion, legacyVersion));
- }
-
- public void testMinimumIndexCompatibilityVersion() {
- assertEquals(LegacyESVersion.fromId(5000099), LegacyESVersion.fromId(6000026).minimumIndexCompatibilityVersion());
- assertEquals(LegacyESVersion.fromId(2000099), LegacyESVersion.fromId(5000099).minimumIndexCompatibilityVersion());
- assertEquals(LegacyESVersion.fromId(2000099), LegacyESVersion.fromId(5010000).minimumIndexCompatibilityVersion());
- assertEquals(LegacyESVersion.fromId(2000099), LegacyESVersion.fromId(5000001).minimumIndexCompatibilityVersion());
- }
-
- public void testVersionFromString() {
- final int iters = scaledRandomIntBetween(100, 1000);
- for (int i = 0; i < iters; i++) {
- LegacyESVersion version = randomLegacyVersion(random());
- assertThat(LegacyESVersion.fromString(version.toString()), sameInstance(version));
- }
- }
-
- public void testTooLongVersionFromString() {
- Exception e = expectThrows(IllegalArgumentException.class, () -> LegacyESVersion.fromString("1.0.0.1.3"));
- assertThat(e.getMessage(), containsString("needs to contain major, minor, and revision"));
- }
-
- public void testTooShortVersionFromString() {
- Exception e = expectThrows(IllegalArgumentException.class, () -> LegacyESVersion.fromString("1.0"));
- assertThat(e.getMessage(), containsString("needs to contain major, minor, and revision"));
- }
-
- public void testWrongVersionFromString() {
- Exception e = expectThrows(IllegalArgumentException.class, () -> LegacyESVersion.fromString("WRONG.VERSION"));
- assertThat(e.getMessage(), containsString("needs to contain major, minor, and revision"));
- }
-
- public void testVersionNoPresentInSettings() {
- Exception e = expectThrows(IllegalStateException.class, () -> LegacyESVersion.indexCreated(Settings.builder().build()));
- assertThat(e.getMessage(), containsString("[index.version.created] is not present"));
- }
-
- public void testIndexCreatedVersion() {
- // an actual index has a IndexMetadata.SETTING_INDEX_UUID
- final LegacyESVersion version = (LegacyESVersion) LegacyESVersion.fromId(6000026);
- assertEquals(
- version,
- LegacyESVersion.indexCreated(
- Settings.builder().put(IndexMetadata.SETTING_INDEX_UUID, "foo").put(IndexMetadata.SETTING_VERSION_CREATED, version).build()
- )
- );
- }
-
- public void testMinCompatVersion() {
- Version major = LegacyESVersion.fromString("6.8.0");
- assertThat(LegacyESVersion.fromString("1.0.0").minimumCompatibilityVersion(), equalTo(major));
- assertThat(LegacyESVersion.fromString("1.2.0").minimumCompatibilityVersion(), equalTo(major));
- assertThat(LegacyESVersion.fromString("1.3.0").minimumCompatibilityVersion(), equalTo(major));
-
- Version major5x = LegacyESVersion.fromString("5.0.0");
- assertThat(LegacyESVersion.fromString("5.0.0").minimumCompatibilityVersion(), equalTo(major5x));
- assertThat(LegacyESVersion.fromString("5.2.0").minimumCompatibilityVersion(), equalTo(major5x));
- assertThat(LegacyESVersion.fromString("5.3.0").minimumCompatibilityVersion(), equalTo(major5x));
-
- Version major56x = LegacyESVersion.fromString("5.6.0");
- assertThat(LegacyESVersion.fromString("6.5.0").minimumCompatibilityVersion(), equalTo(major56x));
- assertThat(LegacyESVersion.fromString("6.3.1").minimumCompatibilityVersion(), equalTo(major56x));
-
- // from 7.0 on we are supporting the latest minor of the previous major... this might fail once we add a new version ie. 5.x is
- // released since we need to bump the supported minor in Version#minimumCompatibilityVersion()
- Version lastVersion = LegacyESVersion.fromString("6.8.0"); // TODO: remove this once min compat version is a constant instead of
- // method
- assertEquals(lastVersion.major, LegacyESVersion.V_7_0_0.minimumCompatibilityVersion().major);
- assertEquals(
- "did you miss to bump the minor in Version#minimumCompatibilityVersion()",
- lastVersion.minor,
- LegacyESVersion.V_7_0_0.minimumCompatibilityVersion().minor
- );
- assertEquals(0, LegacyESVersion.V_7_0_0.minimumCompatibilityVersion().revision);
- }
-
- public void testToString() {
- // with 2.0.beta we lowercase
- assertEquals("2.0.0-beta1", LegacyESVersion.fromString("2.0.0-beta1").toString());
- assertEquals("5.0.0-alpha1", LegacyESVersion.fromId(5000001).toString());
- assertEquals("2.3.0", LegacyESVersion.fromString("2.3.0").toString());
- assertEquals("0.90.0.Beta1", LegacyESVersion.fromString("0.90.0.Beta1").toString());
- assertEquals("1.0.0.Beta1", LegacyESVersion.fromString("1.0.0.Beta1").toString());
- assertEquals("2.0.0-beta1", LegacyESVersion.fromString("2.0.0-beta1").toString());
- assertEquals("5.0.0-beta1", LegacyESVersion.fromString("5.0.0-beta1").toString());
- assertEquals("5.0.0-alpha1", LegacyESVersion.fromString("5.0.0-alpha1").toString());
- }
-
- public void testIsRc() {
- assertTrue(LegacyESVersion.fromString("2.0.0-rc1").isRC());
- assertTrue(LegacyESVersion.fromString("1.0.0.RC1").isRC());
-
- for (int i = 0; i < 25; i++) {
- assertEquals(LegacyESVersion.fromString("5.0.0-rc" + i).id, LegacyESVersion.fromId(5000000 + i + 50).id);
- assertEquals("5.0.0-rc" + i, LegacyESVersion.fromId(5000000 + i + 50).toString());
-
- // legacy RC versioning
- assertEquals(LegacyESVersion.fromString("1.0.0.RC" + i).id, LegacyESVersion.fromId(1000000 + i + 50).id);
- assertEquals("1.0.0.RC" + i, LegacyESVersion.fromId(1000000 + i + 50).toString());
- }
- }
-
- public void testIsBeta() {
- assertTrue(LegacyESVersion.fromString("2.0.0-beta1").isBeta());
- assertTrue(LegacyESVersion.fromString("1.0.0.Beta1").isBeta());
- assertTrue(LegacyESVersion.fromString("0.90.0.Beta1").isBeta());
-
- for (int i = 0; i < 25; i++) {
- assertEquals(LegacyESVersion.fromString("5.0.0-beta" + i).id, LegacyESVersion.fromId(5000000 + i + 25).id);
- assertEquals("5.0.0-beta" + i, LegacyESVersion.fromId(5000000 + i + 25).toString());
- }
- }
-
- public void testIsAlpha() {
- assertTrue(new LegacyESVersion(5000001, org.apache.lucene.util.Version.LUCENE_8_0_0).isAlpha());
- assertFalse(new LegacyESVersion(4000002, org.apache.lucene.util.Version.LUCENE_8_0_0).isAlpha());
- assertTrue(new LegacyESVersion(4000002, org.apache.lucene.util.Version.LUCENE_8_0_0).isBeta());
- assertTrue(LegacyESVersion.fromString("5.0.0-alpha14").isAlpha());
- assertEquals(5000014, LegacyESVersion.fromString("5.0.0-alpha14").id);
- assertTrue(LegacyESVersion.fromId(5000015).isAlpha());
-
- for (int i = 0; i < 25; i++) {
- assertEquals(LegacyESVersion.fromString("5.0.0-alpha" + i).id, LegacyESVersion.fromId(5000000 + i).id);
- assertEquals("5.0.0-alpha" + i, LegacyESVersion.fromId(5000000 + i).toString());
- }
- }
-
- public void testParseVersion() {
- final int iters = scaledRandomIntBetween(100, 1000);
- for (int i = 0; i < iters; i++) {
- LegacyESVersion version = randomLegacyVersion(random());
- LegacyESVersion parsedVersion = (LegacyESVersion) LegacyESVersion.fromString(version.toString());
- assertEquals(version, parsedVersion);
- }
-
- expectThrows(IllegalArgumentException.class, () -> { LegacyESVersion.fromString("5.0.0-alph2"); });
- assertEquals(LegacyESVersion.fromString("2.0.0-SNAPSHOT"), LegacyESVersion.fromId(2000099));
- expectThrows(IllegalArgumentException.class, () -> { LegacyESVersion.fromString("5.0.0-SNAPSHOT"); });
- }
-
- public void testAllVersionsMatchId() throws Exception {
- final Set releasedVersions = new HashSet<>(VersionUtils.allReleasedVersions());
- final Set unreleasedVersions = new HashSet<>(VersionUtils.allUnreleasedVersions());
- Map maxBranchVersions = new HashMap<>();
- for (java.lang.reflect.Field field : Version.class.getFields()) {
- if (field.getName().matches("_ID")) {
- assertTrue(field.getName() + " should be static", Modifier.isStatic(field.getModifiers()));
- assertTrue(field.getName() + " should be final", Modifier.isFinal(field.getModifiers()));
- int versionId = (Integer) field.get(Version.class);
-
- String constantName = field.getName().substring(0, field.getName().indexOf("_ID"));
- java.lang.reflect.Field versionConstant = Version.class.getField(constantName);
- assertTrue(constantName + " should be static", Modifier.isStatic(versionConstant.getModifiers()));
- assertTrue(constantName + " should be final", Modifier.isFinal(versionConstant.getModifiers()));
-
- Version v = (Version) versionConstant.get(null);
- logger.debug("Checking {}", v);
- if (field.getName().endsWith("_UNRELEASED")) {
- assertTrue(unreleasedVersions.contains(v));
- } else {
- assertTrue(releasedVersions.contains(v));
- }
- assertEquals("Version id " + field.getName() + " does not point to " + constantName, v, Version.fromId(versionId));
- assertEquals("Version " + constantName + " does not have correct id", versionId, v.id);
- if (v.major >= 2) {
- String number = v.toString();
- if (v.isBeta()) {
- number = number.replace("-beta", "_beta");
- } else if (v.isRC()) {
- number = number.replace("-rc", "_rc");
- } else if (v.isAlpha()) {
- number = number.replace("-alpha", "_alpha");
- }
- assertEquals("V_" + number.replace('.', '_'), constantName);
- } else {
- assertEquals("V_" + v.toString().replace('.', '_'), constantName);
- }
-
- // only the latest version for a branch should be a snapshot (ie unreleased)
- String branchName = "" + v.major + "." + v.minor;
- Version maxBranchVersion = maxBranchVersions.get(branchName);
- if (maxBranchVersion == null) {
- maxBranchVersions.put(branchName, v);
- } else if (v.after(maxBranchVersion)) {
- if (v == Version.CURRENT) {
- // Current is weird - it counts as released even though it shouldn't.
- continue;
- }
- assertFalse(
- "Version " + maxBranchVersion + " cannot be a snapshot because version " + v + " exists",
- VersionUtils.allUnreleasedVersions().contains(maxBranchVersion)
- );
- maxBranchVersions.put(branchName, v);
- }
- }
- }
- }
-
- public void testIsCompatible() {
- assertTrue(isCompatible(LegacyESVersion.fromString("6.8.0"), LegacyESVersion.V_7_0_0));
- assertFalse(isCompatible(LegacyESVersion.fromString("6.6.0"), LegacyESVersion.V_7_0_0));
- assertFalse(isCompatible(LegacyESVersion.fromString("6.7.0"), LegacyESVersion.V_7_0_0));
-
- assertFalse(isCompatible(LegacyESVersion.fromId(5000099), LegacyESVersion.fromString("6.0.0")));
- assertFalse(isCompatible(LegacyESVersion.fromId(5000099), LegacyESVersion.fromString("7.0.0")));
-
- Version a = randomLegacyVersion(random());
- Version b = randomLegacyVersion(random());
- assertThat(a.isCompatible(b), equalTo(b.isCompatible(a)));
- }
-}
diff --git a/server/src/test/java/org/opensearch/VersionTests.java b/server/src/test/java/org/opensearch/VersionTests.java
index beff71eceab0d..5b3213ded1c02 100644
--- a/server/src/test/java/org/opensearch/VersionTests.java
+++ b/server/src/test/java/org/opensearch/VersionTests.java
@@ -213,11 +213,11 @@ public void testOpenSearchMinCompatVersion() {
int opensearchMajor = opensearchVersion.major;
int major = opensearchMajor - 1;
if (opensearchMajor == 1) {
- major = 7;
+ major = 6;
} else if (opensearchMajor == 2) {
- major = 8;
+ major = 7;
}
- assertEquals(VersionUtils.lastFirstReleasedMinorFromMajor(candidates, major - 1), opensearchVersion.minimumCompatibilityVersion());
+ assertEquals(VersionUtils.lastFirstReleasedMinorFromMajor(candidates, major), opensearchVersion.minimumCompatibilityVersion());
}
/** test opensearch min index compatibility */
@@ -230,14 +230,7 @@ public void testOpenSearchMinIndexCompatVersion() {
// opensearch 3.x minCompat is 1.{last minor version}.0
// until 3.0 is staged the following line will only return legacy versions
List candidates = opensearchVersion.major >= 3 ? VersionUtils.allOpenSearchVersions() : VersionUtils.allLegacyVersions();
- int opensearchMajor = opensearchVersion.major;
- int major = opensearchMajor - 1;
- if (opensearchMajor == 1) {
- major = 7;
- } else if (opensearchMajor == 2) {
- major = 8;
- }
- Version expected = VersionUtils.getFirstVersionOfMajor(candidates, major - 1);
+ Version expected = VersionUtils.getFirstVersionOfMajor(candidates, opensearchVersion.major - 1);
Version actual = opensearchVersion.minimumIndexCompatibilityVersion();
// since some legacy versions still support build (alpha, beta, RC) we check major minor revision only
assertEquals(expected.major, actual.major);
@@ -245,6 +238,26 @@ public void testOpenSearchMinIndexCompatVersion() {
assertEquals(expected.revision, actual.revision);
}
+ /** test first version of opensearch compatibility that does not support legacy versions */
+ public void testOpenSearchPreLegacyRemoval() {
+ Version opensearchVersion = Version.fromString("3.0.0");
+ int opensearchMajor = opensearchVersion.major;
+ List candidates = VersionUtils.allOpenSearchVersions();
+ Version expectedMinIndexCompat = VersionUtils.getFirstVersionOfMajor(candidates, opensearchMajor - 1);
+ Version actualMinIndexCompat = opensearchVersion.minimumIndexCompatibilityVersion();
+
+ Version expectedMinCompat = VersionUtils.lastFirstReleasedMinorFromMajor(VersionUtils.allOpenSearchVersions(), opensearchMajor - 1);
+ Version actualMinCompat = opensearchVersion.minimumCompatibilityVersion();
+ // since some legacy versions still support build (alpha, beta, RC) we check major minor revision only
+ assertEquals(expectedMinIndexCompat.major, actualMinIndexCompat.major);
+ assertEquals(expectedMinIndexCompat.minor, actualMinIndexCompat.minor);
+ assertEquals(expectedMinIndexCompat.revision, actualMinIndexCompat.revision);
+
+ assertEquals(expectedMinCompat.major, actualMinCompat.major);
+ assertEquals(expectedMinCompat.minor, actualMinCompat.minor);
+ assertEquals(expectedMinCompat.revision, actualMinCompat.revision);
+ }
+
public void testToString() {
assertEquals("2.0.0-beta1", Version.fromString("2.0.0-beta1").toString());
assertEquals("5.0.0-alpha1", Version.fromId(5000001).toString());
@@ -413,7 +426,7 @@ public void testIsCompatible() {
} else {
currentOrNextMajorVersion = currentMajorVersion;
}
- final Version lastMinorFromPreviousMajor = VersionUtils.allReleasedVersions()
+ final Version lastMinorFromPreviousMajor = VersionUtils.allOpenSearchVersions()
.stream()
.filter(v -> v.major == (currentOrNextMajorVersion.major == 1 ? 7 : currentOrNextMajorVersion.major - 1))
.max(Version::compareTo)
diff --git a/server/src/test/java/org/opensearch/action/RenamedTimeoutRequestParameterTests.java b/server/src/test/java/org/opensearch/action/RenamedTimeoutRequestParameterTests.java
new file mode 100644
index 0000000000000..86529d96573f8
--- /dev/null
+++ b/server/src/test/java/org/opensearch/action/RenamedTimeoutRequestParameterTests.java
@@ -0,0 +1,653 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.action;
+
+import org.junit.After;
+import org.opensearch.OpenSearchParseException;
+import org.opensearch.action.support.master.MasterNodeRequest;
+import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.bytes.BytesArray;
+import org.opensearch.common.logging.DeprecationLogger;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.settings.SettingsFilter;
+import org.opensearch.common.xcontent.NamedXContentRegistry;
+import org.opensearch.common.xcontent.XContentType;
+import org.opensearch.rest.BaseRestHandler;
+import org.opensearch.rest.action.admin.cluster.RestClusterGetSettingsAction;
+import org.opensearch.rest.action.admin.cluster.RestClusterHealthAction;
+import org.opensearch.rest.action.admin.cluster.RestClusterRerouteAction;
+import org.opensearch.rest.action.admin.cluster.RestClusterStateAction;
+import org.opensearch.rest.action.admin.cluster.RestClusterUpdateSettingsAction;
+import org.opensearch.rest.action.admin.cluster.dangling.RestDeleteDanglingIndexAction;
+import org.opensearch.rest.action.admin.cluster.dangling.RestImportDanglingIndexAction;
+import org.opensearch.rest.action.admin.indices.RestAddIndexBlockAction;
+import org.opensearch.rest.action.admin.indices.RestCloseIndexAction;
+import org.opensearch.rest.action.admin.indices.RestCreateIndexAction;
+import org.opensearch.rest.action.admin.indices.RestDeleteIndexAction;
+import org.opensearch.rest.action.admin.indices.RestGetIndicesAction;
+import org.opensearch.rest.action.admin.indices.RestGetMappingAction;
+import org.opensearch.rest.action.admin.indices.RestGetSettingsAction;
+import org.opensearch.rest.action.admin.indices.RestIndexDeleteAliasesAction;
+import org.opensearch.rest.action.admin.indices.RestIndexPutAliasAction;
+import org.opensearch.rest.action.admin.indices.RestIndicesAliasesAction;
+import org.opensearch.rest.action.admin.indices.RestOpenIndexAction;
+import org.opensearch.rest.action.admin.indices.RestPutMappingAction;
+import org.opensearch.rest.action.admin.indices.RestResizeHandler;
+import org.opensearch.rest.action.admin.indices.RestRolloverIndexAction;
+import org.opensearch.rest.action.admin.indices.RestUpdateSettingsAction;
+import org.opensearch.rest.action.admin.indices.RestDeleteComponentTemplateAction;
+import org.opensearch.rest.action.admin.indices.RestDeleteComposableIndexTemplateAction;
+import org.opensearch.rest.action.admin.indices.RestDeleteIndexTemplateAction;
+import org.opensearch.rest.action.admin.indices.RestGetComponentTemplateAction;
+import org.opensearch.rest.action.admin.indices.RestGetComposableIndexTemplateAction;
+import org.opensearch.rest.action.admin.indices.RestGetIndexTemplateAction;
+import org.opensearch.rest.action.admin.indices.RestPutComponentTemplateAction;
+import org.opensearch.rest.action.admin.indices.RestPutComposableIndexTemplateAction;
+import org.opensearch.rest.action.admin.indices.RestPutIndexTemplateAction;
+import org.opensearch.rest.action.admin.indices.RestSimulateIndexTemplateAction;
+import org.opensearch.rest.action.admin.indices.RestSimulateTemplateAction;
+import org.opensearch.rest.action.admin.cluster.RestCleanupRepositoryAction;
+import org.opensearch.rest.action.admin.cluster.RestCloneSnapshotAction;
+import org.opensearch.rest.action.admin.cluster.RestCreateSnapshotAction;
+import org.opensearch.rest.action.admin.cluster.RestDeleteRepositoryAction;
+import org.opensearch.rest.action.admin.cluster.RestDeleteSnapshotAction;
+import org.opensearch.rest.action.admin.cluster.RestGetRepositoriesAction;
+import org.opensearch.rest.action.admin.cluster.RestGetSnapshotsAction;
+import org.opensearch.rest.action.admin.cluster.RestPutRepositoryAction;
+import org.opensearch.rest.action.admin.cluster.RestRestoreSnapshotAction;
+import org.opensearch.rest.action.admin.cluster.RestSnapshotsStatusAction;
+import org.opensearch.rest.action.admin.cluster.RestVerifyRepositoryAction;
+import org.opensearch.rest.action.cat.RestAllocationAction;
+import org.opensearch.rest.action.cat.RestRepositoriesAction;
+import org.opensearch.rest.action.cat.RestThreadPoolAction;
+import org.opensearch.rest.action.cat.RestMasterAction;
+import org.opensearch.rest.action.cat.RestShardsAction;
+import org.opensearch.rest.action.cat.RestPluginsAction;
+import org.opensearch.rest.action.cat.RestNodeAttrsAction;
+import org.opensearch.rest.action.cat.RestNodesAction;
+import org.opensearch.rest.action.cat.RestIndicesAction;
+import org.opensearch.rest.action.cat.RestTemplatesAction;
+import org.opensearch.rest.action.cat.RestPendingClusterTasksAction;
+import org.opensearch.rest.action.cat.RestSegmentsAction;
+import org.opensearch.rest.action.cat.RestSnapshotAction;
+import org.opensearch.test.OpenSearchTestCase;
+import org.opensearch.test.rest.FakeRestRequest;
+import org.opensearch.threadpool.TestThreadPool;
+
+import java.io.IOException;
+import java.util.Collections;
+
+import static org.hamcrest.Matchers.containsString;
+import static org.opensearch.cluster.metadata.IndexMetadata.INDEX_READ_ONLY_SETTING;
+
+/**
+ * As of 2.0, the request parameter 'master_timeout' in all applicable REST APIs is deprecated,
+ * and alternative parameter 'cluster_manager_timeout' is added.
+ * The tests are used to validate the behavior about the renamed request parameter.
+ * Remove the test after removing MASTER_ROLE and 'master_timeout'.
+ */
+public class RenamedTimeoutRequestParameterTests extends OpenSearchTestCase {
+ private final TestThreadPool threadPool = new TestThreadPool(RenamedTimeoutRequestParameterTests.class.getName());
+ private final NodeClient client = new NodeClient(Settings.EMPTY, threadPool);
+ private final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RenamedTimeoutRequestParameterTests.class);
+
+ private static final String DUPLICATE_PARAMETER_ERROR_MESSAGE =
+ "Please only use one of the request parameters [master_timeout, cluster_manager_timeout].";
+ private static final String MASTER_TIMEOUT_DEPRECATED_MESSAGE =
+ "Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead.";
+
+ @After
+ public void terminateThreadPool() {
+ terminate(threadPool);
+ }
+
+ public void testNoWarningsForNewParam() {
+ BaseRestHandler.parseDeprecatedMasterTimeoutParameter(
+ getMasterNodeRequest(),
+ getRestRequestWithNewParam(),
+ deprecationLogger,
+ "test"
+ );
+ }
+
+ public void testDeprecationWarningForOldParam() {
+ BaseRestHandler.parseDeprecatedMasterTimeoutParameter(
+ getMasterNodeRequest(),
+ getRestRequestWithDeprecatedParam(),
+ deprecationLogger,
+ "test"
+ );
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testBothParamsNotValid() {
+ Exception e = assertThrows(
+ OpenSearchParseException.class,
+ () -> BaseRestHandler.parseDeprecatedMasterTimeoutParameter(
+ getMasterNodeRequest(),
+ getRestRequestWithBothParams(),
+ deprecationLogger,
+ "test"
+ )
+ );
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testCatAllocation() {
+ RestAllocationAction action = new RestAllocationAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.doCatRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testCatIndices() {
+ RestIndicesAction action = new RestIndicesAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.doCatRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testCatClusterManager() {
+ RestMasterAction action = new RestMasterAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.doCatRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testCatNodeattrs() {
+ RestNodeAttrsAction action = new RestNodeAttrsAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.doCatRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testCatNodes() {
+ RestNodesAction action = new RestNodesAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.doCatRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testCatPendingTasks() {
+ RestPendingClusterTasksAction action = new RestPendingClusterTasksAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.doCatRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testCatPlugins() {
+ RestPluginsAction action = new RestPluginsAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.doCatRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testCatRepositories() {
+ RestRepositoriesAction action = new RestRepositoriesAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.doCatRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testCatShards() {
+ RestShardsAction action = new RestShardsAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.doCatRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testCatSnapshots() {
+ RestSnapshotAction action = new RestSnapshotAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.doCatRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testCatTemplates() {
+ RestTemplatesAction action = new RestTemplatesAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.doCatRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testCatThreadPool() {
+ RestThreadPoolAction action = new RestThreadPoolAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.doCatRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testCatSegments() {
+ RestSegmentsAction action = new RestSegmentsAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.doCatRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testClusterHealth() {
+ Exception e = assertThrows(
+ OpenSearchParseException.class,
+ () -> RestClusterHealthAction.fromRequest(getRestRequestWithBodyWithBothParams())
+ );
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testClusterReroute() throws IOException {
+ final SettingsFilter filter = new SettingsFilter(Collections.singleton("foo.filtered"));
+ RestClusterRerouteAction action = new RestClusterRerouteAction(filter);
+ Exception e = assertThrows(
+ OpenSearchParseException.class,
+ () -> action.prepareRequest(getRestRequestWithBodyWithBothParams(), client)
+ );
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testClusterState() throws IOException {
+ final SettingsFilter filter = new SettingsFilter(Collections.singleton("foo.filtered"));
+ RestClusterStateAction action = new RestClusterStateAction(filter);
+ Exception e = assertThrows(
+ OpenSearchParseException.class,
+ () -> action.prepareRequest(getRestRequestWithBodyWithBothParams(), client)
+ );
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testClusterGetSettings() throws IOException {
+ final SettingsFilter filter = new SettingsFilter(Collections.singleton("foo.filtered"));
+ RestClusterGetSettingsAction action = new RestClusterGetSettingsAction(null, null, filter);
+ Exception e = assertThrows(
+ OpenSearchParseException.class,
+ () -> action.prepareRequest(getRestRequestWithBodyWithBothParams(), client)
+ );
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testClusterUpdateSettings() throws IOException {
+ RestClusterUpdateSettingsAction action = new RestClusterUpdateSettingsAction();
+ Exception e = assertThrows(
+ OpenSearchParseException.class,
+ () -> action.prepareRequest(getRestRequestWithBodyWithBothParams(), client)
+ );
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testClusterPendingTasks() {
+ RestPendingClusterTasksAction action = new RestPendingClusterTasksAction();
+ Exception e = assertThrows(
+ OpenSearchParseException.class,
+ () -> action.prepareRequest(getRestRequestWithBodyWithBothParams(), client)
+ );
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testAddIndexBlock() {
+ FakeRestRequest request = new FakeRestRequest();
+ request.params().put("cluster_manager_timeout", "1h");
+ request.params().put("master_timeout", "3s");
+ request.params().put("block", "metadata");
+ NodeClient client = new NodeClient(Settings.builder().put(INDEX_READ_ONLY_SETTING.getKey(), Boolean.FALSE).build(), threadPool);
+ RestAddIndexBlockAction action = new RestAddIndexBlockAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(request, client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testCloseIndex() {
+ RestCloseIndexAction action = new RestCloseIndexAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testCreateIndex() {
+ RestCreateIndexAction action = new RestCreateIndexAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testDeleteIndex() {
+ RestDeleteIndexAction action = new RestDeleteIndexAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testGetIndices() {
+ RestGetIndicesAction action = new RestGetIndicesAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testGetMapping() {
+ RestGetMappingAction action = new RestGetMappingAction(threadPool);
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testGetSettings() {
+ RestGetSettingsAction action = new RestGetSettingsAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testIndexDeleteAliases() {
+ FakeRestRequest request = new FakeRestRequest();
+ request.params().put("cluster_manager_timeout", "1h");
+ request.params().put("master_timeout", "3s");
+ request.params().put("name", "*");
+ request.params().put("index", "test");
+ RestIndexDeleteAliasesAction action = new RestIndexDeleteAliasesAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(request, client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testIndexPutAlias() {
+ RestIndexPutAliasAction action = new RestIndexPutAliasAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testIndicesAliases() {
+ RestIndicesAliasesAction action = new RestIndicesAliasesAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testOpenIndex() {
+ RestOpenIndexAction action = new RestOpenIndexAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testPutMapping() {
+ RestPutMappingAction action = new RestPutMappingAction();
+ Exception e = assertThrows(
+ OpenSearchParseException.class,
+ () -> action.prepareRequest(getRestRequestWithBodyWithBothParams(), client)
+ );
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testShrinkIndex() {
+ RestResizeHandler.RestShrinkIndexAction action = new RestResizeHandler.RestShrinkIndexAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testSplitIndex() {
+ RestResizeHandler.RestSplitIndexAction action = new RestResizeHandler.RestSplitIndexAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testCloneIndex() {
+ RestResizeHandler.RestCloneIndexAction action = new RestResizeHandler.RestCloneIndexAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testRolloverIndex() {
+ RestRolloverIndexAction action = new RestRolloverIndexAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testUpdateSettings() {
+ RestUpdateSettingsAction action = new RestUpdateSettingsAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testDeleteDanglingIndex() {
+ FakeRestRequest request = new FakeRestRequest();
+ request.params().put("cluster_manager_timeout", "1h");
+ request.params().put("master_timeout", "3s");
+ request.params().put("index_uuid", "test");
+ RestDeleteDanglingIndexAction action = new RestDeleteDanglingIndexAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(request, client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testImportDanglingIndex() {
+ FakeRestRequest request = new FakeRestRequest();
+ request.params().put("cluster_manager_timeout", "1h");
+ request.params().put("master_timeout", "3s");
+ request.params().put("index_uuid", "test");
+ RestImportDanglingIndexAction action = new RestImportDanglingIndexAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(request, client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testDeleteComponentTemplate() {
+ RestDeleteComponentTemplateAction action = new RestDeleteComponentTemplateAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testDeleteComposableIndexTemplate() {
+ RestDeleteComposableIndexTemplateAction action = new RestDeleteComposableIndexTemplateAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testDeleteIndexTemplate() {
+ RestDeleteIndexTemplateAction action = new RestDeleteIndexTemplateAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testGetComponentTemplate() {
+ RestGetComponentTemplateAction action = new RestGetComponentTemplateAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testGetComposableIndexTemplate() {
+ RestGetComposableIndexTemplateAction action = new RestGetComposableIndexTemplateAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testGetIndexTemplate() {
+ RestGetIndexTemplateAction action = new RestGetIndexTemplateAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testPutComponentTemplate() {
+ RestPutComponentTemplateAction action = new RestPutComponentTemplateAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testPutComposableIndexTemplate() {
+ RestPutComposableIndexTemplateAction action = new RestPutComposableIndexTemplateAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testPutIndexTemplate() {
+ RestPutIndexTemplateAction action = new RestPutIndexTemplateAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testSimulateIndexTemplate() {
+ FakeRestRequest request = new FakeRestRequest();
+ request.params().put("cluster_manager_timeout", randomFrom("1h", "2m"));
+ request.params().put("master_timeout", "3s");
+ request.params().put("name", "test");
+ RestSimulateIndexTemplateAction action = new RestSimulateIndexTemplateAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(request, client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testSimulateTemplate() {
+ RestSimulateTemplateAction action = new RestSimulateTemplateAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testCleanupRepository() {
+ RestCleanupRepositoryAction action = new RestCleanupRepositoryAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testCloneSnapshot() {
+ RestCloneSnapshotAction action = new RestCloneSnapshotAction();
+ Exception e = assertThrows(
+ OpenSearchParseException.class,
+ () -> action.prepareRequest(getRestRequestWithBodyWithBothParams(), client)
+ );
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testCreateSnapshot() {
+ RestCreateSnapshotAction action = new RestCreateSnapshotAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testDeleteRepository() {
+ RestDeleteRepositoryAction action = new RestDeleteRepositoryAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testDeleteSnapshot() {
+ RestDeleteSnapshotAction action = new RestDeleteSnapshotAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testGetRepositories() {
+ final SettingsFilter filter = new SettingsFilter(Collections.singleton("foo.filtered"));
+ RestGetRepositoriesAction action = new RestGetRepositoriesAction(filter);
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testGetSnapshots() {
+ RestGetSnapshotsAction action = new RestGetSnapshotsAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testPutRepository() {
+ RestPutRepositoryAction action = new RestPutRepositoryAction();
+ Exception e = assertThrows(
+ OpenSearchParseException.class,
+ () -> action.prepareRequest(getRestRequestWithBodyWithBothParams(), client)
+ );
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testRestoreSnapshot() {
+ RestRestoreSnapshotAction action = new RestRestoreSnapshotAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testSnapshotsStatus() {
+ RestSnapshotsStatusAction action = new RestSnapshotsStatusAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ public void testVerifyRepository() {
+ RestVerifyRepositoryAction action = new RestVerifyRepositoryAction();
+ Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client));
+ assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE));
+ assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE);
+ }
+
+ private MasterNodeRequest getMasterNodeRequest() {
+ return new MasterNodeRequest() {
+ @Override
+ public ActionRequestValidationException validate() {
+ return null;
+ }
+ };
+ }
+
+ private FakeRestRequest getRestRequestWithBothParams() {
+ FakeRestRequest request = new FakeRestRequest();
+ request.params().put("cluster_manager_timeout", "1h");
+ request.params().put("master_timeout", "3s");
+ return request;
+ }
+
+ private FakeRestRequest getRestRequestWithDeprecatedParam() {
+ FakeRestRequest request = new FakeRestRequest();
+ request.params().put("master_timeout", "3s");
+ return request;
+ }
+
+ private FakeRestRequest getRestRequestWithNewParam() {
+ FakeRestRequest request = new FakeRestRequest();
+ request.params().put("cluster_manager_timeout", "2m");
+ return request;
+ }
+
+ private FakeRestRequest getRestRequestWithBodyWithBothParams() {
+ FakeRestRequest request = getFakeRestRequestWithBody();
+ request.params().put("cluster_manager_timeout", "2m");
+ request.params().put("master_timeout", "3s");
+ return request;
+ }
+
+ private FakeRestRequest getFakeRestRequestWithBody() {
+ return new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withContent(new BytesArray("{}"), XContentType.JSON).build();
+ }
+}
diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequestTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequestTests.java
index 8da65ba13b9cb..a92e4e4a6c536 100644
--- a/server/src/test/java/org/opensearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequestTests.java
+++ b/server/src/test/java/org/opensearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequestTests.java
@@ -163,7 +163,7 @@ public void testResolve() {
IllegalArgumentException.class,
() -> makeRequestWithNodeDescriptions("not-a-node").resolveVotingConfigExclusions(clusterState)
).getMessage(),
- equalTo("add voting config exclusions request for [not-a-node] matched no master-eligible nodes")
+ equalTo("add voting config exclusions request for [not-a-node] matched no cluster-manager-eligible nodes")
);
assertWarnings(AddVotingConfigExclusionsRequest.DEPRECATION_MESSAGE);
}
diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsActionTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsActionTests.java
index a570db040a805..bff0689a153b3 100644
--- a/server/src/test/java/org/opensearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsActionTests.java
+++ b/server/src/test/java/org/opensearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsActionTests.java
@@ -344,7 +344,7 @@ public void testReturnsErrorIfNoMatchingNodeDescriptions() throws InterruptedExc
assertThat(rootCause, instanceOf(IllegalArgumentException.class));
assertThat(
rootCause.getMessage(),
- equalTo("add voting config exclusions request for [not-a-node] matched no master-eligible nodes")
+ equalTo("add voting config exclusions request for [not-a-node] matched no cluster-manager-eligible nodes")
);
assertWarnings(AddVotingConfigExclusionsRequest.DEPRECATION_MESSAGE);
}
@@ -368,7 +368,7 @@ public void testOnlyMatchesMasterEligibleNodes() throws InterruptedException {
assertThat(rootCause, instanceOf(IllegalArgumentException.class));
assertThat(
rootCause.getMessage(),
- equalTo("add voting config exclusions request for [_all, master:false] matched no master-eligible nodes")
+ equalTo("add voting config exclusions request for [_all, master:false] matched no cluster-manager-eligible nodes")
);
assertWarnings(AddVotingConfigExclusionsRequest.DEPRECATION_MESSAGE);
}
diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/health/ClusterHealthRequestTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/health/ClusterHealthRequestTests.java
index c84279a0782c3..2576823578630 100644
--- a/server/src/test/java/org/opensearch/action/admin/cluster/health/ClusterHealthRequestTests.java
+++ b/server/src/test/java/org/opensearch/action/admin/cluster/health/ClusterHealthRequestTests.java
@@ -32,7 +32,6 @@
package org.opensearch.action.admin.cluster.health;
-import org.opensearch.LegacyESVersion;
import org.opensearch.action.support.IndicesOptions;
import org.opensearch.cluster.health.ClusterHealthStatus;
import org.opensearch.common.Priority;
@@ -40,12 +39,9 @@
import org.opensearch.common.io.stream.BytesStreamOutput;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.test.OpenSearchTestCase;
-import org.opensearch.test.VersionUtils;
import java.util.Locale;
-import static org.opensearch.test.VersionUtils.getPreviousVersion;
-import static org.opensearch.test.VersionUtils.randomVersionBetween;
import static org.hamcrest.core.IsEqual.equalTo;
public class ClusterHealthRequestTests extends OpenSearchTestCase {
@@ -74,84 +70,6 @@ public void testRequestReturnsHiddenIndicesByDefault() {
assertTrue(defaultRequest.indicesOptions().expandWildcardsHidden());
}
- public void testBwcSerialization() throws Exception {
- for (int runs = 0; runs < randomIntBetween(5, 20); runs++) {
- // Generate a random cluster health request in version < 7.2.0 and serializes it
- final BytesStreamOutput out = new BytesStreamOutput();
- out.setVersion(randomVersionBetween(random(), VersionUtils.getFirstVersion(), getPreviousVersion(LegacyESVersion.V_7_2_0)));
-
- final ClusterHealthRequest expected = randomRequest();
- {
- expected.getParentTask().writeTo(out);
- out.writeTimeValue(expected.masterNodeTimeout());
- out.writeBoolean(expected.local());
- if (expected.indices() == null) {
- out.writeVInt(0);
- } else {
- out.writeVInt(expected.indices().length);
- for (String index : expected.indices()) {
- out.writeString(index);
- }
- }
- out.writeTimeValue(expected.timeout());
- if (expected.waitForStatus() == null) {
- out.writeBoolean(false);
- } else {
- out.writeBoolean(true);
- out.writeByte(expected.waitForStatus().value());
- }
- out.writeBoolean(expected.waitForNoRelocatingShards());
- expected.waitForActiveShards().writeTo(out);
- out.writeString(expected.waitForNodes());
- if (expected.waitForEvents() == null) {
- out.writeBoolean(false);
- } else {
- out.writeBoolean(true);
- Priority.writeTo(expected.waitForEvents(), out);
- }
- out.writeBoolean(expected.waitForNoInitializingShards());
- }
-
- // Deserialize and check the cluster health request
- final StreamInput in = out.bytes().streamInput();
- in.setVersion(out.getVersion());
- final ClusterHealthRequest actual = new ClusterHealthRequest(in);
-
- assertThat(actual.waitForStatus(), equalTo(expected.waitForStatus()));
- assertThat(actual.waitForNodes(), equalTo(expected.waitForNodes()));
- assertThat(actual.waitForNoInitializingShards(), equalTo(expected.waitForNoInitializingShards()));
- assertThat(actual.waitForNoRelocatingShards(), equalTo(expected.waitForNoRelocatingShards()));
- assertThat(actual.waitForActiveShards(), equalTo(expected.waitForActiveShards()));
- assertThat(actual.waitForEvents(), equalTo(expected.waitForEvents()));
- assertIndicesEquals(actual.indices(), expected.indices());
- assertThat(actual.indicesOptions(), equalTo(IndicesOptions.lenientExpandOpen()));
- }
-
- for (int runs = 0; runs < randomIntBetween(5, 20); runs++) {
- // Generate a random cluster health request in current version
- final ClusterHealthRequest expected = randomRequest();
-
- // Serialize to node in version < 7.2.0
- final BytesStreamOutput out = new BytesStreamOutput();
- out.setVersion(randomVersionBetween(random(), VersionUtils.getFirstVersion(), getPreviousVersion(LegacyESVersion.V_7_2_0)));
- expected.writeTo(out);
-
- // Deserialize and check the cluster health request
- final StreamInput in = out.bytes().streamInput();
- in.setVersion(out.getVersion());
- final ClusterHealthRequest actual = new ClusterHealthRequest(in);
-
- assertThat(actual.waitForStatus(), equalTo(expected.waitForStatus()));
- assertThat(actual.waitForNodes(), equalTo(expected.waitForNodes()));
- assertThat(actual.waitForNoInitializingShards(), equalTo(expected.waitForNoInitializingShards()));
- assertThat(actual.waitForNoRelocatingShards(), equalTo(expected.waitForNoRelocatingShards()));
- assertThat(actual.waitForActiveShards(), equalTo(expected.waitForActiveShards()));
- assertThat(actual.waitForEvents(), equalTo(expected.waitForEvents()));
- assertIndicesEquals(actual.indices(), expected.indices());
- assertThat(actual.indicesOptions(), equalTo(IndicesOptions.lenientExpandOpen()));
- }
- }
-
private ClusterHealthRequest randomRequest() {
ClusterHealthRequest request = new ClusterHealthRequest();
request.waitForStatus(randomFrom(ClusterHealthStatus.values()));
diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/health/ClusterHealthResponsesTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/health/ClusterHealthResponsesTests.java
index decad9d6f840e..3db20fd3404a7 100644
--- a/server/src/test/java/org/opensearch/action/admin/cluster/health/ClusterHealthResponsesTests.java
+++ b/server/src/test/java/org/opensearch/action/admin/cluster/health/ClusterHealthResponsesTests.java
@@ -32,7 +32,6 @@
package org.opensearch.action.admin.cluster.health;
-import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.cluster.ClusterName;
import org.opensearch.cluster.ClusterState;
@@ -58,7 +57,6 @@
import org.opensearch.test.OpenSearchTestCase;
import org.hamcrest.Matchers;
-import org.opensearch.test.VersionUtils;
import java.io.IOException;
import java.util.Collections;
@@ -149,69 +147,6 @@ private void assertClusterHealth(ClusterHealthResponse clusterHealth) {
assertThat(clusterHealth.hasDiscoveredMaster(), Matchers.equalTo(clusterStateHealth.hasDiscoveredMaster()));
}
- public void testVersionCompatibleSerialization() throws IOException {
- boolean hasDiscoveredMaster = false;
- int indicesSize = randomInt(20);
- Map indices = new HashMap<>(indicesSize);
- if ("indices".equals(level) || "shards".equals(level)) {
- for (int i = 0; i < indicesSize; i++) {
- String indexName = randomAlphaOfLengthBetween(1, 5) + i;
- indices.put(indexName, ClusterIndexHealthTests.randomIndexHealth(indexName, level));
- }
- }
- ClusterStateHealth stateHealth = new ClusterStateHealth(
- randomInt(100),
- randomInt(100),
- randomInt(100),
- randomInt(100),
- randomInt(100),
- randomInt(100),
- randomInt(100),
- hasDiscoveredMaster,
- randomDoubleBetween(0d, 100d, true),
- randomFrom(ClusterHealthStatus.values()),
- indices
- );
- // Create the Cluster Health Response object with discovered master as false,
- // to verify serialization puts default value for the field
- ClusterHealthResponse clusterHealth = new ClusterHealthResponse(
- "test-cluster",
- randomInt(100),
- randomInt(100),
- randomInt(100),
- TimeValue.timeValueMillis(randomInt(10000)),
- randomBoolean(),
- stateHealth
- );
-
- BytesStreamOutput out_lt_1_0 = new BytesStreamOutput();
- Version old_version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, LegacyESVersion.V_7_8_0);
- out_lt_1_0.setVersion(old_version);
- clusterHealth.writeTo(out_lt_1_0);
-
- BytesStreamOutput out_gt_1_0 = new BytesStreamOutput();
- Version new_version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
- out_gt_1_0.setVersion(new_version);
- clusterHealth.writeTo(out_gt_1_0);
-
- // The serialized output byte stream will not be same; and different by a boolean field "discovered_master"
- assertNotEquals(out_lt_1_0.size(), out_gt_1_0.size());
- assertThat(out_gt_1_0.size() - out_lt_1_0.size(), Matchers.equalTo(1));
-
- // Input stream constructed from Version 6_8 or less will not have field "discovered_master";
- // hence fallback to default as no value retained
- StreamInput in_lt_6_8 = out_lt_1_0.bytes().streamInput();
- in_lt_6_8.setVersion(old_version);
- clusterHealth = ClusterHealthResponse.readResponseFrom(in_lt_6_8);
- assertThat(clusterHealth.hasDiscoveredMaster(), Matchers.equalTo(true));
-
- // Input stream constructed from Version 7_0 and above will have field "discovered_master"; hence value will be retained
- StreamInput in_gt_7_0 = out_gt_1_0.bytes().streamInput();
- in_gt_7_0.setVersion(new_version);
- clusterHealth = ClusterHealthResponse.readResponseFrom(in_gt_7_0);
- assertThat(clusterHealth.hasDiscoveredMaster(), Matchers.equalTo(hasDiscoveredMaster));
- }
-
ClusterHealthResponse maybeSerialize(ClusterHealthResponse clusterHealth) throws IOException {
if (randomBoolean()) {
BytesStreamOutput out = new BytesStreamOutput();
@@ -228,7 +163,7 @@ public void testParseFromXContentWithDiscoveredMasterField() throws IOException
NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
"{\"cluster_name\":\"535799904437:7-1-3-node\",\"status\":\"green\","
- + "\"timed_out\":false,\"number_of_nodes\":6,\"number_of_data_nodes\":3,\"discovered_master\":false,"
+ + "\"timed_out\":false,\"number_of_nodes\":6,\"number_of_data_nodes\":3,\"discovered_master\":true,"
+ "\"active_primary_shards\":4,\"active_shards\":5,\"relocating_shards\":0,\"initializing_shards\":0,"
+ "\"unassigned_shards\":0,\"delayed_unassigned_shards\":0,\"number_of_pending_tasks\":0,"
+ "\"number_of_in_flight_fetch\":0,\"task_max_waiting_in_queue_millis\":0,"
@@ -236,6 +171,27 @@ public void testParseFromXContentWithDiscoveredMasterField() throws IOException
)
) {
+ ClusterHealthResponse clusterHealth = ClusterHealthResponse.fromXContent(parser);
+ assertNotNull(clusterHealth);
+ assertThat(clusterHealth.getClusterName(), Matchers.equalTo("535799904437:7-1-3-node"));
+ assertThat(clusterHealth.getNumberOfNodes(), Matchers.equalTo(6));
+ assertThat(clusterHealth.hasDiscoveredMaster(), Matchers.equalTo(true));
+ }
+ }
+
+ public void testParseFromXContentWithoutDiscoveredMasterField() throws IOException {
+ try (
+ XContentParser parser = JsonXContent.jsonXContent.createParser(
+ NamedXContentRegistry.EMPTY,
+ DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
+ "{\"cluster_name\":\"535799904437:7-1-3-node\",\"status\":\"green\","
+ + "\"timed_out\":false,\"number_of_nodes\":6,\"number_of_data_nodes\":3,"
+ + "\"active_primary_shards\":4,\"active_shards\":5,\"relocating_shards\":0,\"initializing_shards\":0,"
+ + "\"unassigned_shards\":0,\"delayed_unassigned_shards\":0,\"number_of_pending_tasks\":0,"
+ + "\"number_of_in_flight_fetch\":0,\"task_max_waiting_in_queue_millis\":0,"
+ + "\"active_shards_percent_as_number\":100}"
+ )
+ ) {
ClusterHealthResponse clusterHealth = ClusterHealthResponse.fromXContent(parser);
assertNotNull(clusterHealth);
assertThat(clusterHealth.getClusterName(), Matchers.equalTo("535799904437:7-1-3-node"));
diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TaskTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TaskTests.java
index 5f8d5992c9f2f..45db94577f15f 100644
--- a/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TaskTests.java
+++ b/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TaskTests.java
@@ -31,16 +31,23 @@
package org.opensearch.action.admin.cluster.node.tasks;
+import org.opensearch.action.search.SearchAction;
import org.opensearch.common.bytes.BytesArray;
import org.opensearch.common.xcontent.XContentHelper;
+import org.opensearch.tasks.Task;
import org.opensearch.tasks.TaskId;
import org.opensearch.tasks.TaskInfo;
+import org.opensearch.tasks.ResourceUsageMetric;
+import org.opensearch.tasks.ResourceStats;
+import org.opensearch.tasks.ResourceStatsType;
import org.opensearch.test.OpenSearchTestCase;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.Map;
+import static org.opensearch.tasks.TaskInfoTests.randomResourceStats;
+
public class TaskTests extends OpenSearchTestCase {
public void testTaskInfoToString() {
@@ -61,7 +68,8 @@ public void testTaskInfoToString() {
cancellable,
cancelled,
TaskId.EMPTY_TASK_ID,
- Collections.singletonMap("foo", "bar")
+ Collections.singletonMap("foo", "bar"),
+ randomResourceStats(randomBoolean())
);
String taskInfoString = taskInfo.toString();
Map map = XContentHelper.convertToMap(new BytesArray(taskInfoString.getBytes(StandardCharsets.UTF_8)), true).v2();
@@ -94,7 +102,8 @@ public void testCancellableOptionWhenCancelledTrue() {
cancellable,
cancelled,
TaskId.EMPTY_TASK_ID,
- Collections.singletonMap("foo", "bar")
+ Collections.singletonMap("foo", "bar"),
+ randomResourceStats(randomBoolean())
);
String taskInfoString = taskInfo.toString();
Map map = XContentHelper.convertToMap(new BytesArray(taskInfoString.getBytes(StandardCharsets.UTF_8)), true).v2();
@@ -120,7 +129,8 @@ public void testCancellableOptionWhenCancelledFalse() {
cancellable,
cancelled,
TaskId.EMPTY_TASK_ID,
- Collections.singletonMap("foo", "bar")
+ Collections.singletonMap("foo", "bar"),
+ randomResourceStats(randomBoolean())
);
String taskInfoString = taskInfo.toString();
Map map = XContentHelper.convertToMap(new BytesArray(taskInfoString.getBytes(StandardCharsets.UTF_8)), true).v2();
@@ -148,9 +158,75 @@ public void testNonCancellableOption() {
cancellable,
cancelled,
TaskId.EMPTY_TASK_ID,
- Collections.singletonMap("foo", "bar")
+ Collections.singletonMap("foo", "bar"),
+ randomResourceStats(randomBoolean())
)
);
assertEquals(e.getMessage(), "task cannot be cancelled");
}
+
+ public void testTaskResourceStats() {
+ final Task task = new Task(
+ randomLong(),
+ "transport",
+ SearchAction.NAME,
+ "description",
+ new TaskId(randomLong() + ":" + randomLong()),
+ Collections.emptyMap()
+ );
+
+ long totalMemory = 0L;
+ long totalCPU = 0L;
+
+ // reporting resource consumption events and checking total consumption values
+ for (int i = 0; i < randomInt(10); i++) {
+ long initial_memory = randomLongBetween(1, 100);
+ long initial_cpu = randomLongBetween(1, 100);
+
+ ResourceUsageMetric[] initialTaskResourceMetrics = new ResourceUsageMetric[] {
+ new ResourceUsageMetric(ResourceStats.MEMORY, initial_memory),
+ new ResourceUsageMetric(ResourceStats.CPU, initial_cpu) };
+ task.startThreadResourceTracking(i, ResourceStatsType.WORKER_STATS, initialTaskResourceMetrics);
+
+ long memory = initial_memory + randomLongBetween(1, 10000);
+ long cpu = initial_cpu + randomLongBetween(1, 10000);
+
+ totalMemory += memory - initial_memory;
+ totalCPU += cpu - initial_cpu;
+
+ ResourceUsageMetric[] taskResourceMetrics = new ResourceUsageMetric[] {
+ new ResourceUsageMetric(ResourceStats.MEMORY, memory),
+ new ResourceUsageMetric(ResourceStats.CPU, cpu) };
+ task.updateThreadResourceStats(i, ResourceStatsType.WORKER_STATS, taskResourceMetrics);
+ task.stopThreadResourceTracking(i, ResourceStatsType.WORKER_STATS);
+ }
+ assertEquals(task.getTotalResourceStats().getMemoryInBytes(), totalMemory);
+ assertEquals(task.getTotalResourceStats().getCpuTimeInNanos(), totalCPU);
+
+ // updating should throw an IllegalStateException when active entry is not present.
+ try {
+ task.updateThreadResourceStats(randomInt(), ResourceStatsType.WORKER_STATS);
+ fail("update should not be successful as active entry is not present!");
+ } catch (IllegalStateException e) {
+ // pass
+ }
+
+ // re-adding a thread entry that is already present, should throw an exception
+ int threadId = randomInt();
+ task.startThreadResourceTracking(threadId, ResourceStatsType.WORKER_STATS, new ResourceUsageMetric(ResourceStats.MEMORY, 100));
+ try {
+ task.startThreadResourceTracking(threadId, ResourceStatsType.WORKER_STATS);
+ fail("add/start should not be successful as active entry is already present!");
+ } catch (IllegalStateException e) {
+ // pass
+ }
+
+ // existing active entry is present only for memory, update cannot be called with cpu values.
+ try {
+ task.updateThreadResourceStats(threadId, ResourceStatsType.WORKER_STATS, new ResourceUsageMetric(ResourceStats.CPU, 200));
+ fail("update should not be successful as entry for CPU is not present!");
+ } catch (IllegalStateException e) {
+ // pass
+ }
+ }
}
diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java
index 3b7f2ff7f7ae2..6f62883ff436c 100644
--- a/server/src/test/java/org/opensearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java
+++ b/server/src/test/java/org/opensearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java
@@ -232,7 +232,7 @@ private RestRequest toRestRequest(ClusterRerouteRequest original) throws IOExcep
params.put("retry_failed", Boolean.toString(original.isRetryFailed()));
}
if (false == original.masterNodeTimeout().equals(MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT) || randomBoolean()) {
- params.put("master_timeout", original.masterNodeTimeout().toString());
+ params.put("cluster_manager_timeout", original.masterNodeTimeout().toString());
}
if (original.getCommands() != null) {
hasBody = true;
diff --git a/server/src/test/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java b/server/src/test/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java
index 6265bcab82966..fd052308ed87b 100644
--- a/server/src/test/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java
+++ b/server/src/test/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java
@@ -596,7 +596,7 @@ public void testRolloverClusterState() throws Exception {
try {
ClusterService clusterService = ClusterServiceUtils.createClusterService(testThreadPool);
Environment env = mock(Environment.class);
- when(env.sharedDataFile()).thenReturn(null);
+ when(env.sharedDataDir()).thenReturn(null);
AllocationService allocationService = mock(AllocationService.class);
when(allocationService.reroute(any(ClusterState.class), any(String.class))).then(i -> i.getArguments()[0]);
IndicesService indicesService = mockIndicesServices();
@@ -722,7 +722,7 @@ public void testRolloverClusterStateForDataStream() throws Exception {
ClusterService clusterService = ClusterServiceUtils.createClusterService(testThreadPool);
Environment env = mock(Environment.class);
- when(env.sharedDataFile()).thenReturn(null);
+ when(env.sharedDataDir()).thenReturn(null);
AllocationService allocationService = mock(AllocationService.class);
when(allocationService.reroute(any(ClusterState.class), any(String.class))).then(i -> i.getArguments()[0]);
DocumentMapper documentMapper = mock(DocumentMapper.class);
diff --git a/server/src/test/java/org/opensearch/action/ingest/SimulateProcessorResultTests.java b/server/src/test/java/org/opensearch/action/ingest/SimulateProcessorResultTests.java
index 6fa043761882e..b75c9893eda4b 100644
--- a/server/src/test/java/org/opensearch/action/ingest/SimulateProcessorResultTests.java
+++ b/server/src/test/java/org/opensearch/action/ingest/SimulateProcessorResultTests.java
@@ -32,14 +32,12 @@
package org.opensearch.action.ingest;
-import org.opensearch.LegacyESVersion;
import org.opensearch.common.collect.Tuple;
import org.opensearch.common.io.stream.BytesStreamOutput;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.ingest.IngestDocument;
import org.opensearch.test.AbstractXContentTestCase;
-import org.opensearch.test.VersionUtils;
import java.io.IOException;
import java.util.StringJoiner;
@@ -85,21 +83,6 @@ public void testSerialization() throws IOException {
}
}
- public void testBWCDescription() throws IOException {
- boolean isSuccessful = randomBoolean();
- boolean isIgnoredException = randomBoolean();
- boolean hasCondition = randomBoolean();
- SimulateProcessorResult simulateProcessorResult = createTestInstance(isSuccessful, isIgnoredException, hasCondition);
-
- BytesStreamOutput out = new BytesStreamOutput();
- out.setVersion(VersionUtils.getPreviousVersion(LegacyESVersion.V_7_9_0));
- simulateProcessorResult.writeTo(out);
- StreamInput in = out.bytes().streamInput();
- in.setVersion(VersionUtils.getPreviousVersion(LegacyESVersion.V_7_9_0));
- SimulateProcessorResult otherSimulateProcessorResult = new SimulateProcessorResult(in);
- assertNull(otherSimulateProcessorResult.getDescription());
- }
-
static SimulateProcessorResult createTestInstance(boolean isSuccessful, boolean isIgnoredException, boolean hasCondition) {
String type = randomAlphaOfLengthBetween(1, 10);
String processorTag = randomAlphaOfLengthBetween(1, 10);
diff --git a/server/src/test/java/org/opensearch/action/main/MainResponseTests.java b/server/src/test/java/org/opensearch/action/main/MainResponseTests.java
index b333118c4e070..6e2dbe4399410 100644
--- a/server/src/test/java/org/opensearch/action/main/MainResponseTests.java
+++ b/server/src/test/java/org/opensearch/action/main/MainResponseTests.java
@@ -58,7 +58,7 @@ protected MainResponse createTestInstance() {
ClusterName clusterName = new ClusterName(randomAlphaOfLength(10));
String nodeName = randomAlphaOfLength(10);
final String date = new Date(randomNonNegativeLong()).toString();
- Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Build build = new Build(
Build.Type.UNKNOWN,
randomAlphaOfLength(8),
diff --git a/server/src/test/java/org/opensearch/action/support/IndicesOptionsTests.java b/server/src/test/java/org/opensearch/action/support/IndicesOptionsTests.java
index 21ce9c29c6a03..1f037d2d58e11 100644
--- a/server/src/test/java/org/opensearch/action/support/IndicesOptionsTests.java
+++ b/server/src/test/java/org/opensearch/action/support/IndicesOptionsTests.java
@@ -32,7 +32,6 @@
package org.opensearch.action.support;
-import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.action.support.IndicesOptions.Option;
import org.opensearch.action.support.IndicesOptions.WildcardStates;
@@ -60,14 +59,13 @@
import static org.opensearch.test.VersionUtils.randomVersionBetween;
import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.Matchers.is;
public class IndicesOptionsTests extends OpenSearchTestCase {
public void testSerialization() throws Exception {
int iterations = randomIntBetween(5, 20);
for (int i = 0; i < iterations; i++) {
- Version version = randomVersionBetween(random(), LegacyESVersion.V_7_0_0, null);
+ Version version = randomVersionBetween(random(), Version.V_1_0_0, null);
IndicesOptions indicesOptions = IndicesOptions.fromOptions(
randomBoolean(),
randomBoolean(),
@@ -92,15 +90,9 @@ public void testSerialization() throws Exception {
assertThat(indicesOptions2.allowNoIndices(), equalTo(indicesOptions.allowNoIndices()));
assertThat(indicesOptions2.expandWildcardsOpen(), equalTo(indicesOptions.expandWildcardsOpen()));
assertThat(indicesOptions2.expandWildcardsClosed(), equalTo(indicesOptions.expandWildcardsClosed()));
- if (version.before(LegacyESVersion.V_7_7_0)) {
- assertThat(indicesOptions2.expandWildcardsHidden(), is(true));
- } else {
- assertThat(indicesOptions2.expandWildcardsHidden(), equalTo(indicesOptions.expandWildcardsHidden()));
- }
-
+ assertThat(indicesOptions2.expandWildcardsHidden(), equalTo(indicesOptions.expandWildcardsHidden()));
assertThat(indicesOptions2.forbidClosedIndices(), equalTo(indicesOptions.forbidClosedIndices()));
assertThat(indicesOptions2.allowAliasesToMultipleIndices(), equalTo(indicesOptions.allowAliasesToMultipleIndices()));
-
assertEquals(indicesOptions2.ignoreAliases(), indicesOptions.ignoreAliases());
}
}
diff --git a/server/src/test/java/org/opensearch/bootstrap/BootstrapChecksTests.java b/server/src/test/java/org/opensearch/bootstrap/BootstrapChecksTests.java
index d941c624509da..c59ca1dd60dc7 100644
--- a/server/src/test/java/org/opensearch/bootstrap/BootstrapChecksTests.java
+++ b/server/src/test/java/org/opensearch/bootstrap/BootstrapChecksTests.java
@@ -818,5 +818,7 @@ public void testDiscoveryConfiguredCheck() throws NodeValidationException {
ensureChecksPass.accept(Settings.builder().putList(ClusterBootstrapService.INITIAL_CLUSTER_MANAGER_NODES_SETTING.getKey()));
ensureChecksPass.accept(Settings.builder().putList(DiscoveryModule.DISCOVERY_SEED_PROVIDERS_SETTING.getKey()));
ensureChecksPass.accept(Settings.builder().putList(SettingsBasedSeedHostsProvider.DISCOVERY_SEED_HOSTS_SETTING.getKey()));
+ // Validate the deprecated setting is still valid during the node bootstrap.
+ ensureChecksPass.accept(Settings.builder().putList(ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING.getKey()));
}
}
diff --git a/server/src/test/java/org/opensearch/cluster/coordination/ClusterBootstrapServiceTests.java b/server/src/test/java/org/opensearch/cluster/coordination/ClusterBootstrapServiceTests.java
index 079b31f31f599..3e4148cef61cd 100644
--- a/server/src/test/java/org/opensearch/cluster/coordination/ClusterBootstrapServiceTests.java
+++ b/server/src/test/java/org/opensearch/cluster/coordination/ClusterBootstrapServiceTests.java
@@ -166,10 +166,19 @@ public void testDoesNothingByDefaultIfSeedHostsConfigured() {
testDoesNothingWithSettings(builder().putList(DISCOVERY_SEED_HOSTS_SETTING.getKey()));
}
- public void testDoesNothingByDefaultIfMasterNodesConfigured() {
+ public void testDoesNothingByDefaultIfClusterManagerNodesConfigured() {
testDoesNothingWithSettings(builder().putList(INITIAL_CLUSTER_MANAGER_NODES_SETTING.getKey()));
}
+ // Validate the deprecated setting is still valid during the cluster bootstrap.
+ public void testDoesNothingByDefaultIfMasterNodesConfigured() {
+ testDoesNothingWithSettings(builder().putList(INITIAL_MASTER_NODES_SETTING.getKey()));
+ assertWarnings(
+ "[cluster.initial_master_nodes] setting was deprecated in OpenSearch and will be removed in a future release! "
+ + "See the breaking changes documentation for the next major version."
+ );
+ }
+
public void testDoesNothingByDefaultOnMasterIneligibleNodes() {
localNode = new DiscoveryNode(
"local",
@@ -705,7 +714,7 @@ public void testFailBootstrapNonMasterEligibleNodeWithSingleNodeDiscovery() {
IllegalArgumentException.class,
() -> new ClusterBootstrapService(settings.build(), transportService, () -> emptyList(), () -> false, vc -> fail())
).getMessage(),
- containsString("node with [discovery.type] set to [single-node] must be master-eligible")
+ containsString("node with [discovery.type] set to [single-node] must be cluster-manager-eligible")
);
}
}
diff --git a/server/src/test/java/org/opensearch/cluster/coordination/ClusterFormationFailureHelperTests.java b/server/src/test/java/org/opensearch/cluster/coordination/ClusterFormationFailureHelperTests.java
index 13cdc640008cb..391d7b0e56332 100644
--- a/server/src/test/java/org/opensearch/cluster/coordination/ClusterFormationFailureHelperTests.java
+++ b/server/src/test/java/org/opensearch/cluster/coordination/ClusterFormationFailureHelperTests.java
@@ -191,7 +191,7 @@ public void testDescriptionOnMasterIneligibleNodes() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered yet: have discovered []; discovery will continue using [] from hosts providers "
+ "cluster-manager not discovered yet: have discovered []; discovery will continue using [] from hosts providers "
+ "and [] from last-known cluster state; node term 15, last-accepted version 12 in term 4"
)
);
@@ -208,7 +208,7 @@ public void testDescriptionOnMasterIneligibleNodes() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered yet: have discovered []; discovery will continue using ["
+ "cluster-manager not discovered yet: have discovered []; discovery will continue using ["
+ otherAddress
+ "] from hosts providers and [] from last-known cluster state; node term 16, last-accepted version 12 in term 4"
)
@@ -226,7 +226,7 @@ public void testDescriptionOnMasterIneligibleNodes() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered yet: have discovered ["
+ "cluster-manager not discovered yet: have discovered ["
+ otherNode
+ "]; discovery will continue using [] from hosts providers "
+ "and [] from last-known cluster state; node term 17, last-accepted version 12 in term 4"
@@ -257,7 +257,7 @@ public void testDescriptionForBWCState() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered yet: have discovered []; discovery will continue using [] from hosts providers "
+ "cluster-manager not discovered yet: have discovered []; discovery will continue using [] from hosts providers "
+ "and [] from last-known cluster state; node term 15, last-accepted version 42 in term 0"
)
);
@@ -328,7 +328,7 @@ public void testDescriptionBeforeBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered yet, this node has not previously joined a bootstrapped cluster, and "
+ "cluster-manager not discovered yet, this node has not previously joined a bootstrapped cluster, and "
+ "[cluster.initial_cluster_manager_nodes] is empty on this node: have discovered []; "
+ "discovery will continue using [] from hosts providers and ["
+ localNode
@@ -348,7 +348,7 @@ public void testDescriptionBeforeBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered yet, this node has not previously joined a bootstrapped cluster, and "
+ "cluster-manager not discovered yet, this node has not previously joined a bootstrapped cluster, and "
+ "[cluster.initial_cluster_manager_nodes] is empty on this node: have discovered []; "
+ "discovery will continue using ["
+ otherAddress
@@ -370,7 +370,7 @@ public void testDescriptionBeforeBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered yet, this node has not previously joined a bootstrapped cluster, and "
+ "cluster-manager not discovered yet, this node has not previously joined a bootstrapped cluster, and "
+ "[cluster.initial_cluster_manager_nodes] is empty on this node: have discovered ["
+ otherNode
+ "]; "
@@ -391,8 +391,8 @@ public void testDescriptionBeforeBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered yet, this node has not previously joined a bootstrapped cluster, and "
- + "this node must discover master-eligible nodes [other] to bootstrap a cluster: have discovered []; "
+ "cluster-manager not discovered yet, this node has not previously joined a bootstrapped cluster, and "
+ + "this node must discover cluster-manager-eligible nodes [other] to bootstrap a cluster: have discovered []; "
+ "discovery will continue using [] from hosts providers and ["
+ localNode
+ "] from last-known cluster state; node term 4, last-accepted version 7 in term 4"
@@ -442,7 +442,7 @@ public void testDescriptionAfterDetachCluster() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered yet and this node was detached from its previous cluster, "
+ "cluster-manager not discovered yet and this node was detached from its previous cluster, "
+ "have discovered []; "
+ "discovery will continue using [] from hosts providers and ["
+ localNode
@@ -462,7 +462,7 @@ public void testDescriptionAfterDetachCluster() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered yet and this node was detached from its previous cluster, "
+ "cluster-manager not discovered yet and this node was detached from its previous cluster, "
+ "have discovered []; "
+ "discovery will continue using ["
+ otherAddress
@@ -484,7 +484,7 @@ public void testDescriptionAfterDetachCluster() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered yet and this node was detached from its previous cluster, "
+ "cluster-manager not discovered yet and this node was detached from its previous cluster, "
+ "have discovered ["
+ otherNode
+ "]; "
@@ -506,7 +506,7 @@ public void testDescriptionAfterDetachCluster() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered yet and this node was detached from its previous cluster, "
+ "cluster-manager not discovered yet and this node was detached from its previous cluster, "
+ "have discovered ["
+ yetAnotherNode
+ "]; "
@@ -534,7 +534,7 @@ public void testDescriptionAfterBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered or elected yet, an election requires a node with id [otherNode], "
+ "cluster-manager not discovered or elected yet, an election requires a node with id [otherNode], "
+ "have discovered [] which is not a quorum; "
+ "discovery will continue using [] from hosts providers and ["
+ localNode
@@ -554,7 +554,7 @@ public void testDescriptionAfterBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered or elected yet, an election requires a node with id [otherNode], "
+ "cluster-manager not discovered or elected yet, an election requires a node with id [otherNode], "
+ "have discovered [] which is not a quorum; "
+ "discovery will continue using ["
+ otherAddress
@@ -576,7 +576,7 @@ public void testDescriptionAfterBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered or elected yet, an election requires a node with id [otherNode], "
+ "cluster-manager not discovered or elected yet, an election requires a node with id [otherNode], "
+ "have discovered ["
+ otherNode
+ "] which is a quorum; "
@@ -598,7 +598,7 @@ public void testDescriptionAfterBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered or elected yet, an election requires a node with id [otherNode], "
+ "cluster-manager not discovered or elected yet, an election requires a node with id [otherNode], "
+ "have discovered ["
+ yetAnotherNode
+ "] which is not a quorum; "
@@ -619,7 +619,7 @@ public void testDescriptionAfterBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered or elected yet, an election requires two nodes with ids [n1, n2], "
+ "cluster-manager not discovered or elected yet, an election requires two nodes with ids [n1, n2], "
+ "have discovered [] which is not a quorum; "
+ "discovery will continue using [] from hosts providers and ["
+ localNode
@@ -638,7 +638,7 @@ public void testDescriptionAfterBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered or elected yet, an election requires at least 2 nodes with ids from [n1, n2, n3], "
+ "cluster-manager not discovered or elected yet, an election requires at least 2 nodes with ids from [n1, n2, n3], "
+ "have discovered [] which is not a quorum; "
+ "discovery will continue using [] from hosts providers and ["
+ localNode
@@ -657,7 +657,7 @@ public void testDescriptionAfterBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered or elected yet, an election requires 2 nodes with ids [n1, n2], "
+ "cluster-manager not discovered or elected yet, an election requires 2 nodes with ids [n1, n2], "
+ "have discovered [] which is not a quorum; "
+ "discovery will continue using [] from hosts providers and ["
+ localNode
@@ -676,7 +676,7 @@ public void testDescriptionAfterBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered or elected yet, an election requires at least 3 nodes with ids from [n1, n2, n3, n4], "
+ "cluster-manager not discovered or elected yet, an election requires at least 3 nodes with ids from [n1, n2, n3, n4], "
+ "have discovered [] which is not a quorum; "
+ "discovery will continue using [] from hosts providers and ["
+ localNode
@@ -695,7 +695,7 @@ public void testDescriptionAfterBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered or elected yet, an election requires at least 3 nodes with ids from [n1, n2, n3, n4, n5], "
+ "cluster-manager not discovered or elected yet, an election requires at least 3 nodes with ids from [n1, n2, n3, n4, n5], "
+ "have discovered [] which is not a quorum; "
+ "discovery will continue using [] from hosts providers and ["
+ localNode
@@ -714,7 +714,7 @@ public void testDescriptionAfterBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered or elected yet, an election requires at least 3 nodes with ids from [n1, n2, n3, n4], "
+ "cluster-manager not discovered or elected yet, an election requires at least 3 nodes with ids from [n1, n2, n3, n4], "
+ "have discovered [] which is not a quorum; "
+ "discovery will continue using [] from hosts providers and ["
+ localNode
@@ -733,7 +733,7 @@ public void testDescriptionAfterBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered or elected yet, an election requires 3 nodes with ids [n1, n2, n3], "
+ "cluster-manager not discovered or elected yet, an election requires 3 nodes with ids [n1, n2, n3], "
+ "have discovered [] which is not a quorum; "
+ "discovery will continue using [] from hosts providers and ["
+ localNode
@@ -752,7 +752,7 @@ public void testDescriptionAfterBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered or elected yet, an election requires a node with id [n1], "
+ "cluster-manager not discovered or elected yet, an election requires a node with id [n1], "
+ "have discovered [] which is not a quorum; "
+ "discovery will continue using [] from hosts providers and ["
+ localNode
@@ -771,7 +771,7 @@ public void testDescriptionAfterBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered or elected yet, an election requires a node with id [n1] and a node with id [n2], "
+ "cluster-manager not discovered or elected yet, an election requires a node with id [n1] and a node with id [n2], "
+ "have discovered [] which is not a quorum; "
+ "discovery will continue using [] from hosts providers and ["
+ localNode
@@ -790,7 +790,7 @@ public void testDescriptionAfterBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered or elected yet, an election requires a node with id [n1] and two nodes with ids [n2, n3], "
+ "cluster-manager not discovered or elected yet, an election requires a node with id [n1] and two nodes with ids [n2, n3], "
+ "have discovered [] which is not a quorum; "
+ "discovery will continue using [] from hosts providers and ["
+ localNode
@@ -809,7 +809,7 @@ public void testDescriptionAfterBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered or elected yet, an election requires a node with id [n1] and "
+ "cluster-manager not discovered or elected yet, an election requires a node with id [n1] and "
+ "at least 2 nodes with ids from [n2, n3, n4], "
+ "have discovered [] which is not a quorum; "
+ "discovery will continue using [] from hosts providers and ["
@@ -859,7 +859,7 @@ public void testDescriptionAfterBootstrapping() {
// nodes from last-known cluster state could be in either order
is(
oneOf(
- "master not discovered or elected yet, an election requires two nodes with ids [n1, n2], "
+ "cluster-manager not discovered or elected yet, an election requires two nodes with ids [n1, n2], "
+ "have discovered [] which is not a quorum; "
+ "discovery will continue using [] from hosts providers and ["
+ localNode
@@ -867,7 +867,7 @@ public void testDescriptionAfterBootstrapping() {
+ otherMasterNode
+ "] from last-known cluster state; node term 0, last-accepted version 0 in term 0",
- "master not discovered or elected yet, an election requires two nodes with ids [n1, n2], "
+ "cluster-manager not discovered or elected yet, an election requires two nodes with ids [n1, n2], "
+ "have discovered [] which is not a quorum; "
+ "discovery will continue using [] from hosts providers and ["
+ otherMasterNode
@@ -889,8 +889,8 @@ public void testDescriptionAfterBootstrapping() {
new StatusInfo(HEALTHY, "healthy-info")
).getDescription(),
is(
- "master not discovered or elected yet, an election requires one or more nodes that have already participated as "
- + "master-eligible nodes in the cluster but this node was not master-eligible the last time it joined the cluster, "
+ "cluster-manager not discovered or elected yet, an election requires one or more nodes that have already participated as "
+ + "cluster-manager-eligible nodes in the cluster but this node was not cluster-manager-eligible the last time it joined the cluster, "
+ "have discovered [] which is not a quorum; "
+ "discovery will continue using [] from hosts providers and ["
+ localNode
diff --git a/server/src/test/java/org/opensearch/cluster/coordination/CoordinatorTests.java b/server/src/test/java/org/opensearch/cluster/coordination/CoordinatorTests.java
index 1cdea588564c4..f43d6ff4e6c02 100644
--- a/server/src/test/java/org/opensearch/cluster/coordination/CoordinatorTests.java
+++ b/server/src/test/java/org/opensearch/cluster/coordination/CoordinatorTests.java
@@ -109,7 +109,7 @@ public class CoordinatorTests extends AbstractCoordinatorTestCase {
/**
* This test was added to verify that state recovery is properly reset on a node after it has become master and successfully
* recovered a state (see {@link GatewayService}). The situation which triggers this with a decent likelihood is as follows:
- * 3 master-eligible nodes (leader, follower1, follower2), the followers are shut down (leader remains), when followers come back
+ * 3 cluster-manager-eligible nodes (leader, follower1, follower2), the followers are shut down (leader remains), when followers come back
* one of them becomes leader and publishes first state (with STATE_NOT_RECOVERED_BLOCK) to old leader, which accepts it.
* Old leader is initiating an election at the same time, and wins election. It becomes leader again, but as it previously
* successfully completed state recovery, is never reset to a state where state recovery can be retried.
@@ -1558,7 +1558,9 @@ public void match(LogEvent event) {
final String message = event.getMessage().getFormattedMessage();
assertThat(
message,
- startsWith("master not discovered or elected yet, an election requires at least 2 nodes with ids from [")
+ startsWith(
+ "cluster-manager not discovered or elected yet, an election requires at least 2 nodes with ids from ["
+ )
);
final List matchingNodes = cluster.clusterNodes.stream()
@@ -1729,7 +1731,7 @@ public void testDoesNotPerformElectionWhenRestartingFollower() {
if (cluster.clusterNodes.stream().filter(n -> n.getLocalNode().isMasterNode()).count() == 2) {
// in the 2-node case, auto-shrinking the voting configuration is required to reduce the voting configuration down to just
- // the leader, otherwise restarting the other master-eligible node triggers an election
+ // the leader, otherwise restarting the other cluster-manager-eligible node triggers an election
leader.submitSetAutoShrinkVotingConfiguration(true);
cluster.stabilise(2 * DEFAULT_CLUSTER_STATE_UPDATE_DELAY); // 1st delay for the setting update, 2nd for the reconfiguration
}
diff --git a/server/src/test/java/org/opensearch/cluster/coordination/JoinTaskExecutorTests.java b/server/src/test/java/org/opensearch/cluster/coordination/JoinTaskExecutorTests.java
index a019235c99743..49ef48cd1e9c6 100644
--- a/server/src/test/java/org/opensearch/cluster/coordination/JoinTaskExecutorTests.java
+++ b/server/src/test/java/org/opensearch/cluster/coordination/JoinTaskExecutorTests.java
@@ -47,20 +47,17 @@
import org.opensearch.common.settings.Settings;
import org.opensearch.test.OpenSearchTestCase;
import org.opensearch.test.VersionUtils;
-import org.opensearch.transport.TransportService;
-import java.util.HashMap;
import java.util.HashSet;
-import java.util.Map;
+import static org.opensearch.test.VersionUtils.allVersions;
import static org.opensearch.test.VersionUtils.maxCompatibleVersion;
import static org.opensearch.test.VersionUtils.randomCompatibleVersion;
-import static org.opensearch.test.VersionUtils.randomVersion;
+import static org.opensearch.test.VersionUtils.randomOpenSearchVersion;
import static org.opensearch.test.VersionUtils.randomVersionBetween;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.mockito.Mockito.any;
-import static org.mockito.Mockito.anyBoolean;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -99,7 +96,7 @@ public void testPreventJoinClusterWithUnsupportedIndices() {
public void testPreventJoinClusterWithUnsupportedNodeVersions() {
DiscoveryNodes.Builder builder = DiscoveryNodes.builder();
- final Version version = randomVersion(random());
+ final Version version = randomOpenSearchVersion(random());
builder.add(new DiscoveryNode(UUIDs.base64UUID(), buildNewFakeTransportAddress(), version));
builder.add(new DiscoveryNode(UUIDs.base64UUID(), buildNewFakeTransportAddress(), randomCompatibleVersion(random(), version)));
DiscoveryNodes nodes = builder.build();
@@ -117,14 +114,19 @@ public void testPreventJoinClusterWithUnsupportedNodeVersions() {
});
}
- if (minNodeVersion.onOrAfter(LegacyESVersion.V_7_0_0)) {
+ if (minNodeVersion.onOrAfter(LegacyESVersion.V_7_0_0) && minNodeVersion.before(Version.V_3_0_0)) {
Version oldMajor = minNodeVersion.minimumCompatibilityVersion();
expectThrows(IllegalStateException.class, () -> JoinTaskExecutor.ensureMajorVersionBarrier(oldMajor, minNodeVersion));
}
- final Version minGoodVersion = maxNodeVersion.compareMajor(minNodeVersion) == 0 ?
- // we have to stick with the same major
- minNodeVersion : maxNodeVersion.minimumCompatibilityVersion();
+ final Version minGoodVersion;
+ if (maxNodeVersion.compareMajor(minNodeVersion) == 0) {
+ // we have to stick with the same major
+ minGoodVersion = minNodeVersion;
+ } else {
+ Version minCompatVersion = maxNodeVersion.minimumCompatibilityVersion();
+ minGoodVersion = minCompatVersion.before(allVersions().get(0)) ? allVersions().get(0) : minCompatVersion;
+ }
final Version justGood = randomVersionBetween(random(), minGoodVersion, maxCompatibleVersion(minNodeVersion));
if (randomBoolean()) {
@@ -196,80 +198,4 @@ public void testUpdatesNodeWithNewRoles() throws Exception {
assertThat(result.resultingState.getNodes().get(actualNode.getId()).getRoles(), equalTo(actualNode.getRoles()));
}
-
- public void testUpdatesNodeWithOpenSearchVersionForExistingAndNewNodes() throws Exception {
- // During the upgrade from Elasticsearch, OpenSearch node send their version as 7.10.2 to Elasticsearch master
- // in order to successfully join the cluster. But as soon as OpenSearch node becomes the master, cluster state
- // should show the OpenSearch nodes version as 1.x. As the cluster state was carry forwarded from ES master,
- // version in DiscoveryNode is stale 7.10.2.
- final AllocationService allocationService = mock(AllocationService.class);
- when(allocationService.adaptAutoExpandReplicas(any())).then(invocationOnMock -> invocationOnMock.getArguments()[0]);
- when(allocationService.disassociateDeadNodes(any(), anyBoolean(), any())).then(
- invocationOnMock -> invocationOnMock.getArguments()[0]
- );
- final RerouteService rerouteService = (reason, priority, listener) -> listener.onResponse(null);
- Map channelVersions = new HashMap<>();
- String node_1 = UUIDs.base64UUID(); // OpenSearch node running BWC version
- String node_2 = UUIDs.base64UUID(); // OpenSearch node running BWC version
- String node_3 = UUIDs.base64UUID(); // OpenSearch node running BWC version, sending new join request and no active channel
- String node_4 = UUIDs.base64UUID(); // ES node 7.10.2
- String node_5 = UUIDs.base64UUID(); // ES node 7.10.2 in cluster but missing channel version
- String node_6 = UUIDs.base64UUID(); // ES node 7.9.0
- String node_7 = UUIDs.base64UUID(); // ES node 7.9.0 in cluster but missing channel version
- channelVersions.put(node_1, Version.CURRENT);
- channelVersions.put(node_2, Version.CURRENT);
- channelVersions.put(node_4, LegacyESVersion.V_7_10_2);
- channelVersions.put(node_6, LegacyESVersion.V_7_10_0);
-
- final TransportService transportService = mock(TransportService.class);
- when(transportService.getChannelVersion(any())).thenReturn(channelVersions);
- DiscoveryNodes.Builder nodes = new DiscoveryNodes.Builder().localNodeId(node_1);
- nodes.add(new DiscoveryNode(node_1, buildNewFakeTransportAddress(), LegacyESVersion.V_7_10_2));
- nodes.add(new DiscoveryNode(node_2, buildNewFakeTransportAddress(), LegacyESVersion.V_7_10_2));
- nodes.add(new DiscoveryNode(node_3, buildNewFakeTransportAddress(), LegacyESVersion.V_7_10_2));
- nodes.add(new DiscoveryNode(node_4, buildNewFakeTransportAddress(), LegacyESVersion.V_7_10_2));
- nodes.add(new DiscoveryNode(node_5, buildNewFakeTransportAddress(), LegacyESVersion.V_7_10_2));
- nodes.add(new DiscoveryNode(node_6, buildNewFakeTransportAddress(), LegacyESVersion.V_7_10_1));
- nodes.add(new DiscoveryNode(node_7, buildNewFakeTransportAddress(), LegacyESVersion.V_7_10_0));
- final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).nodes(nodes).build();
- final JoinTaskExecutor joinTaskExecutor = new JoinTaskExecutor(
- Settings.EMPTY,
- allocationService,
- logger,
- rerouteService,
- transportService
- );
- final DiscoveryNode existing_node_3 = clusterState.nodes().get(node_3);
- final DiscoveryNode node_3_new_join = new DiscoveryNode(
- existing_node_3.getName(),
- existing_node_3.getId(),
- existing_node_3.getEphemeralId(),
- existing_node_3.getHostName(),
- existing_node_3.getHostAddress(),
- existing_node_3.getAddress(),
- existing_node_3.getAttributes(),
- existing_node_3.getRoles(),
- Version.CURRENT
- );
-
- final ClusterStateTaskExecutor.ClusterTasksResult result = joinTaskExecutor.execute(
- clusterState,
- List.of(
- new JoinTaskExecutor.Task(node_3_new_join, "test"),
- JoinTaskExecutor.newBecomeMasterTask(),
- JoinTaskExecutor.newFinishElectionTask()
- )
- );
- final ClusterStateTaskExecutor.TaskResult taskResult = result.executionResults.values().iterator().next();
- assertTrue(taskResult.isSuccess());
- DiscoveryNodes resultNodes = result.resultingState.getNodes();
- assertEquals(Version.CURRENT, resultNodes.get(node_1).getVersion());
- assertEquals(Version.CURRENT, resultNodes.get(node_2).getVersion());
- assertEquals(Version.CURRENT, resultNodes.get(node_3).getVersion()); // 7.10.2 in old state but sent new join and processed
- assertEquals(LegacyESVersion.V_7_10_2, resultNodes.get(node_4).getVersion());
- assertFalse(resultNodes.nodeExists(node_5)); // 7.10.2 node without active channel will be removed and should rejoin
- assertEquals(LegacyESVersion.V_7_10_0, resultNodes.get(node_6).getVersion());
- // 7.9.0 node without active channel but shouldn't get removed
- assertEquals(LegacyESVersion.V_7_10_0, resultNodes.get(node_7).getVersion());
- }
}
diff --git a/server/src/test/java/org/opensearch/cluster/metadata/AutoExpandReplicasTests.java b/server/src/test/java/org/opensearch/cluster/metadata/AutoExpandReplicasTests.java
index d6c62e4bb0903..aafd507aef7cd 100644
--- a/server/src/test/java/org/opensearch/cluster/metadata/AutoExpandReplicasTests.java
+++ b/server/src/test/java/org/opensearch/cluster/metadata/AutoExpandReplicasTests.java
@@ -31,7 +31,6 @@
package org.opensearch.cluster.metadata;
-import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.action.admin.cluster.reroute.ClusterRerouteRequest;
import org.opensearch.action.admin.indices.create.CreateIndexRequest;
@@ -244,7 +243,7 @@ public void testOnlyAutoExpandAllocationFilteringAfterAllNodesUpgraded() {
try {
List allNodes = new ArrayList<>();
DiscoveryNode oldNode = createNode(
- VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, LegacyESVersion.V_7_5_1),
+ VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.V_1_2_1),
DiscoveryNodeRole.CLUSTER_MANAGER_ROLE,
DiscoveryNodeRole.DATA_ROLE
); // local node is the master
@@ -266,11 +265,11 @@ public void testOnlyAutoExpandAllocationFilteringAfterAllNodesUpgraded() {
state = cluster.reroute(state, new ClusterRerouteRequest());
}
- DiscoveryNode newNode = createNode(
- LegacyESVersion.V_7_6_0,
- DiscoveryNodeRole.CLUSTER_MANAGER_ROLE,
- DiscoveryNodeRole.DATA_ROLE
- ); // local node is the cluster_manager
+ DiscoveryNode newNode = createNode(Version.V_1_3_0, DiscoveryNodeRole.CLUSTER_MANAGER_ROLE, DiscoveryNodeRole.DATA_ROLE); // local
+ // node
+ // is
+ // the
+ // cluster_manager
state = cluster.addNodes(state, Collections.singletonList(newNode));
diff --git a/server/src/test/java/org/opensearch/cluster/metadata/DataStreamTemplateTests.java b/server/src/test/java/org/opensearch/cluster/metadata/DataStreamTemplateTests.java
index 23afc23c80279..7f8a32ac84ee5 100644
--- a/server/src/test/java/org/opensearch/cluster/metadata/DataStreamTemplateTests.java
+++ b/server/src/test/java/org/opensearch/cluster/metadata/DataStreamTemplateTests.java
@@ -8,19 +8,13 @@
package org.opensearch.cluster.metadata;
-import org.opensearch.Version;
import org.opensearch.cluster.metadata.ComposableIndexTemplate.DataStreamTemplate;
-import org.opensearch.common.io.stream.BytesStreamOutput;
-import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.Writeable;
import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.test.AbstractSerializingTestCase;
-import org.opensearch.test.VersionUtils;
import java.io.IOException;
-import static org.hamcrest.Matchers.equalTo;
-
public class DataStreamTemplateTests extends AbstractSerializingTestCase {
@Override
@@ -38,20 +32,4 @@ protected DataStreamTemplate createTestInstance() {
return new DataStreamTemplate(new DataStream.TimestampField("timestamp_" + randomAlphaOfLength(5)));
}
- public void testBackwardCompatibleSerialization() throws Exception {
- Version version = VersionUtils.getPreviousVersion(Version.V_1_0_0);
- BytesStreamOutput out = new BytesStreamOutput();
- out.setVersion(version);
-
- DataStreamTemplate outTemplate = new DataStreamTemplate();
- outTemplate.writeTo(out);
- assertThat(out.size(), equalTo(0));
-
- StreamInput in = out.bytes().streamInput();
- in.setVersion(version);
- DataStreamTemplate inTemplate = new DataStreamTemplate(in);
-
- assertThat(inTemplate, equalTo(outTemplate));
- }
-
}
diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java
index 5bea69c5bbd66..89550b491500d 100644
--- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java
+++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java
@@ -36,7 +36,6 @@
import org.hamcrest.Matchers;
import org.junit.Before;
import org.opensearch.ExceptionsHelper;
-import org.opensearch.LegacyESVersion;
import org.opensearch.ResourceAlreadyExistsException;
import org.opensearch.Version;
import org.opensearch.action.admin.indices.alias.Alias;
@@ -204,14 +203,6 @@ public static boolean isSplitable(int source, int target) {
return source * x == target;
}
- public void testNumberOfShards() {
- {
- final Version versionCreated = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT);
- final Settings.Builder indexSettingsBuilder = Settings.builder().put(SETTING_VERSION_CREATED, versionCreated);
- assertThat(MetadataCreateIndexService.getNumberOfShards(indexSettingsBuilder), equalTo(1));
- }
- }
-
public void testValidateShrinkIndex() {
int numShards = randomIntBetween(2, 42);
ClusterState state = createClusterState(
diff --git a/server/src/test/java/org/opensearch/common/lucene/uid/VersionsTests.java b/server/src/test/java/org/opensearch/common/lucene/uid/VersionsTests.java
index 4c2a500bb5cd3..dace484f80c2b 100644
--- a/server/src/test/java/org/opensearch/common/lucene/uid/VersionsTests.java
+++ b/server/src/test/java/org/opensearch/common/lucene/uid/VersionsTests.java
@@ -216,8 +216,8 @@ public void testCacheFilterReader() throws Exception {
public void testLuceneVersionOnUnknownVersions() {
// between two known versions, should use the lucene version of the previous version
- Version version = LegacyESVersion.fromString("7.10.50");
- assertEquals(VersionUtils.getPreviousVersion(Version.fromString("7.10.3")).luceneVersion, version.luceneVersion);
+ Version version = Version.fromString("1.1.50");
+ assertEquals(VersionUtils.getPreviousVersion(Version.fromString("1.1.3")).luceneVersion, version.luceneVersion);
// too old version, major should be the oldest supported lucene version minus 1
version = LegacyESVersion.fromString("5.2.1");
diff --git a/server/src/test/java/org/opensearch/discovery/AbstractDisruptionTestCase.java b/server/src/test/java/org/opensearch/discovery/AbstractDisruptionTestCase.java
index 5a61300caa89e..e690770b3d0a5 100644
--- a/server/src/test/java/org/opensearch/discovery/AbstractDisruptionTestCase.java
+++ b/server/src/test/java/org/opensearch/discovery/AbstractDisruptionTestCase.java
@@ -167,7 +167,7 @@ void assertNoMaster(final String node, @Nullable final ClusterBlock expectedBloc
assertBusy(() -> {
ClusterState state = getNodeClusterState(node);
final DiscoveryNodes nodes = state.nodes();
- assertNull("node [" + node + "] still has [" + nodes.getMasterNode() + "] as master", nodes.getMasterNode());
+ assertNull("node [" + node + "] still has [" + nodes.getMasterNode() + "] as cluster-manager", nodes.getMasterNode());
if (expectedBlocks != null) {
for (ClusterBlockLevel level : expectedBlocks.levels()) {
assertTrue(
diff --git a/server/src/test/java/org/opensearch/env/EnvironmentTests.java b/server/src/test/java/org/opensearch/env/EnvironmentTests.java
index a5d6021b29376..0e343a6e43ba7 100644
--- a/server/src/test/java/org/opensearch/env/EnvironmentTests.java
+++ b/server/src/test/java/org/opensearch/env/EnvironmentTests.java
@@ -117,28 +117,28 @@ public void testPathLogsWhenNotSet() {
final Path pathHome = createTempDir().toAbsolutePath();
final Settings settings = Settings.builder().put("path.home", pathHome).build();
final Environment environment = new Environment(settings, null);
- assertThat(environment.logsFile(), equalTo(pathHome.resolve("logs")));
+ assertThat(environment.logsDir(), equalTo(pathHome.resolve("logs")));
}
public void testDefaultConfigPath() {
final Path path = createTempDir().toAbsolutePath();
final Settings settings = Settings.builder().put("path.home", path).build();
final Environment environment = new Environment(settings, null);
- assertThat(environment.configFile(), equalTo(path.resolve("config")));
+ assertThat(environment.configDir(), equalTo(path.resolve("config")));
}
public void testConfigPath() {
final Path configPath = createTempDir().toAbsolutePath();
final Settings settings = Settings.builder().put("path.home", createTempDir().toAbsolutePath()).build();
final Environment environment = new Environment(settings, configPath);
- assertThat(environment.configFile(), equalTo(configPath));
+ assertThat(environment.configDir(), equalTo(configPath));
}
public void testConfigPathWhenNotSet() {
final Path pathHome = createTempDir().toAbsolutePath();
final Settings settings = Settings.builder().put("path.home", pathHome).build();
final Environment environment = new Environment(settings, null);
- assertThat(environment.configFile(), equalTo(pathHome.resolve("config")));
+ assertThat(environment.configDir(), equalTo(pathHome.resolve("config")));
}
public void testNodeDoesNotRequireLocalStorage() {
@@ -164,7 +164,7 @@ public void testNodeDoesNotRequireLocalStorageButHasPathData() {
public void testNonExistentTempPathValidation() {
Settings build = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build();
Environment environment = new Environment(build, null, true, createTempDir().resolve("this_does_not_exist"));
- FileNotFoundException e = expectThrows(FileNotFoundException.class, environment::validateTmpFile);
+ FileNotFoundException e = expectThrows(FileNotFoundException.class, environment::validateTmpDir);
assertThat(e.getMessage(), startsWith("Temporary file directory ["));
assertThat(e.getMessage(), endsWith("this_does_not_exist] does not exist or is not accessible"));
}
@@ -172,7 +172,7 @@ public void testNonExistentTempPathValidation() {
public void testTempPathValidationWhenRegularFile() throws IOException {
Settings build = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build();
Environment environment = new Environment(build, null, true, createTempFile("something", ".test"));
- IOException e = expectThrows(IOException.class, environment::validateTmpFile);
+ IOException e = expectThrows(IOException.class, environment::validateTmpDir);
assertThat(e.getMessage(), startsWith("Configured temporary file directory ["));
assertThat(e.getMessage(), endsWith(".test] is not a directory"));
}
diff --git a/server/src/test/java/org/opensearch/env/NodeMetadataTests.java b/server/src/test/java/org/opensearch/env/NodeMetadataTests.java
index cd3b9c6205220..92ec33d7e78e0 100644
--- a/server/src/test/java/org/opensearch/env/NodeMetadataTests.java
+++ b/server/src/test/java/org/opensearch/env/NodeMetadataTests.java
@@ -31,23 +31,17 @@
package org.opensearch.env;
-import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.common.collect.Tuple;
-import org.opensearch.gateway.MetadataStateFormat;
import org.opensearch.test.OpenSearchTestCase;
import org.opensearch.test.EqualsHashCodeTestUtils;
import org.opensearch.test.VersionUtils;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.file.Files;
import java.nio.file.Path;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;
public class NodeMetadataTests extends OpenSearchTestCase {
@@ -77,22 +71,6 @@ public void testEqualsHashcodeSerialization() {
});
}
- public void testReadsFormatWithoutVersion() throws IOException {
- // the behaviour tested here is only appropriate if the current version is compatible with versions 7 and earlier
- assertTrue(Version.CURRENT.minimumIndexCompatibilityVersion().onOrBefore(LegacyESVersion.V_7_0_0));
- // when the current version is incompatible with version 7, the behaviour should change to reject files like the given resource
- // which do not have the version field
-
- final Path tempDir = createTempDir();
- final Path stateDir = Files.createDirectory(tempDir.resolve(MetadataStateFormat.STATE_DIR_NAME));
- final InputStream resource = this.getClass().getResourceAsStream("testReadsFormatWithoutVersion.binary");
- assertThat(resource, notNullValue());
- Files.copy(resource, stateDir.resolve(NodeMetadata.FORMAT.getStateFileName(between(0, Integer.MAX_VALUE))));
- final NodeMetadata nodeMetadata = NodeMetadata.FORMAT.loadLatestState(logger, xContentRegistry(), tempDir);
- assertThat(nodeMetadata.nodeId(), equalTo("y6VUVMSaStO4Tz-B5BxcOw"));
- assertThat(nodeMetadata.nodeVersion(), equalTo(Version.V_EMPTY));
- }
-
public void testUpgradesLegitimateVersions() {
final String nodeId = randomAlphaOfLength(10);
final NodeMetadata nodeMetadata = new NodeMetadata(
diff --git a/server/src/test/java/org/opensearch/index/IndexSettingsTests.java b/server/src/test/java/org/opensearch/index/IndexSettingsTests.java
index d67534bbfbddf..71433673eef5a 100644
--- a/server/src/test/java/org/opensearch/index/IndexSettingsTests.java
+++ b/server/src/test/java/org/opensearch/index/IndexSettingsTests.java
@@ -32,7 +32,6 @@
package org.opensearch.index;
-import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.AbstractScopedSettings;
@@ -723,7 +722,7 @@ public void testUpdateSoftDeletesFails() {
public void testSoftDeletesDefaultSetting() {
// enabled by default on 7.0+ or later
{
- Version createdVersion = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT);
+ Version createdVersion = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), createdVersion).build();
assertTrue(IndexSettings.INDEX_SOFT_DELETES_SETTING.get(settings));
}
@@ -731,10 +730,7 @@ public void testSoftDeletesDefaultSetting() {
public void testIgnoreTranslogRetentionSettingsIfSoftDeletesEnabled() {
Settings.Builder settings = Settings.builder()
- .put(
- IndexMetadata.SETTING_VERSION_CREATED,
- VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_4_0, Version.CURRENT)
- );
+ .put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT));
if (randomBoolean()) {
settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey(), randomPositiveTimeValue());
}
diff --git a/server/src/test/java/org/opensearch/index/analysis/PreBuiltAnalyzerTests.java b/server/src/test/java/org/opensearch/index/analysis/PreBuiltAnalyzerTests.java
index d8e0a4ea3bc2e..38736b26ea1ba 100644
--- a/server/src/test/java/org/opensearch/index/analysis/PreBuiltAnalyzerTests.java
+++ b/server/src/test/java/org/opensearch/index/analysis/PreBuiltAnalyzerTests.java
@@ -88,7 +88,13 @@ public void testThatInstancesAreCachedAndReused() {
assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(v), PreBuiltAnalyzers.STANDARD.getAnalyzer(v));
assertNotSame(
PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT),
- PreBuiltAnalyzers.STANDARD.getAnalyzer(VersionUtils.randomPreviousCompatibleVersion(random(), Version.CURRENT))
+ PreBuiltAnalyzers.STANDARD.getAnalyzer(
+ VersionUtils.randomVersionBetween(
+ random(),
+ Version.CURRENT.minimumIndexCompatibilityVersion(),
+ VersionUtils.getPreviousVersion(Version.CURRENT)
+ )
+ )
);
// Same Lucene version should be cached:
diff --git a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java
index 9159dbe92065f..c25ddc12efc7a 100644
--- a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java
+++ b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java
@@ -494,6 +494,212 @@ public void testSegments() throws Exception {
}
}
+ public void testMergeSegmentsOnCommitIsDisabled() throws Exception {
+ final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
+
+ final Settings.Builder settings = Settings.builder()
+ .put(defaultSettings.getSettings())
+ .put(IndexSettings.INDEX_MERGE_ON_FLUSH_MAX_FULL_FLUSH_MERGE_WAIT_TIME.getKey(), TimeValue.timeValueMillis(0))
+ .put(IndexSettings.INDEX_MERGE_ON_FLUSH_ENABLED.getKey(), true);
+ final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
+ final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
+
+ try (
+ Store store = createStore();
+ InternalEngine engine = createEngine(
+ config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null, null, globalCheckpoint::get)
+ )
+ ) {
+ assertThat(engine.segments(false), empty());
+ int numDocsFirstSegment = randomIntBetween(5, 50);
+ Set liveDocsFirstSegment = new HashSet<>();
+ for (int i = 0; i < numDocsFirstSegment; i++) {
+ String id = Integer.toString(i);
+ ParsedDocument doc = testParsedDocument(id, null, testDocument(), B_1, null);
+ engine.index(indexForDoc(doc));
+ liveDocsFirstSegment.add(id);
+ }
+ engine.refresh("test");
+ List segments = engine.segments(randomBoolean());
+ assertThat(segments, hasSize(1));
+ assertThat(segments.get(0).getNumDocs(), equalTo(liveDocsFirstSegment.size()));
+ assertThat(segments.get(0).getDeletedDocs(), equalTo(0));
+ assertFalse(segments.get(0).committed);
+ int deletes = 0;
+ int updates = 0;
+ int appends = 0;
+ int iterations = scaledRandomIntBetween(1, 50);
+ for (int i = 0; i < iterations && liveDocsFirstSegment.isEmpty() == false; i++) {
+ String idToUpdate = randomFrom(liveDocsFirstSegment);
+ liveDocsFirstSegment.remove(idToUpdate);
+ ParsedDocument doc = testParsedDocument(idToUpdate, null, testDocument(), B_1, null);
+ if (randomBoolean()) {
+ engine.delete(new Engine.Delete(doc.id(), newUid(doc), primaryTerm.get()));
+ deletes++;
+ } else {
+ engine.index(indexForDoc(doc));
+ updates++;
+ }
+ if (randomBoolean()) {
+ engine.index(indexForDoc(testParsedDocument(UUIDs.randomBase64UUID(), null, testDocument(), B_1, null)));
+ appends++;
+ }
+ }
+
+ boolean committed = randomBoolean();
+ if (committed) {
+ engine.flush();
+ }
+
+ engine.refresh("test");
+ segments = engine.segments(randomBoolean());
+
+ assertThat(segments, hasSize(2));
+ assertThat(segments, hasSize(2));
+ assertThat(segments.get(0).getNumDocs(), equalTo(liveDocsFirstSegment.size()));
+ assertThat(segments.get(0).getDeletedDocs(), equalTo(updates + deletes));
+ assertThat(segments.get(0).committed, equalTo(committed));
+
+ assertThat(segments.get(1).getNumDocs(), equalTo(updates + appends));
+ assertThat(segments.get(1).getDeletedDocs(), equalTo(deletes)); // delete tombstones
+ assertThat(segments.get(1).committed, equalTo(committed));
+ }
+ }
+
+ public void testMergeSegmentsOnCommit() throws Exception {
+ final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
+
+ final Settings.Builder settings = Settings.builder()
+ .put(defaultSettings.getSettings())
+ .put(IndexSettings.INDEX_MERGE_ON_FLUSH_MAX_FULL_FLUSH_MERGE_WAIT_TIME.getKey(), TimeValue.timeValueMillis(5000))
+ .put(IndexSettings.INDEX_MERGE_ON_FLUSH_ENABLED.getKey(), true);
+ final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
+ final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
+
+ try (
+ Store store = createStore();
+ InternalEngine engine = createEngine(
+ config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null, null, globalCheckpoint::get)
+ )
+ ) {
+ assertThat(engine.segments(false), empty());
+ int numDocsFirstSegment = randomIntBetween(5, 50);
+ Set liveDocsFirstSegment = new HashSet<>();
+ for (int i = 0; i < numDocsFirstSegment; i++) {
+ String id = Integer.toString(i);
+ ParsedDocument doc = testParsedDocument(id, null, testDocument(), B_1, null);
+ engine.index(indexForDoc(doc));
+ liveDocsFirstSegment.add(id);
+ }
+ engine.refresh("test");
+ List segments = engine.segments(randomBoolean());
+ assertThat(segments, hasSize(1));
+ assertThat(segments.get(0).getNumDocs(), equalTo(liveDocsFirstSegment.size()));
+ assertThat(segments.get(0).getDeletedDocs(), equalTo(0));
+ assertFalse(segments.get(0).committed);
+ int deletes = 0;
+ int updates = 0;
+ int appends = 0;
+ int iterations = scaledRandomIntBetween(1, 50);
+ for (int i = 0; i < iterations && liveDocsFirstSegment.isEmpty() == false; i++) {
+ String idToUpdate = randomFrom(liveDocsFirstSegment);
+ liveDocsFirstSegment.remove(idToUpdate);
+ ParsedDocument doc = testParsedDocument(idToUpdate, null, testDocument(), B_1, null);
+ if (randomBoolean()) {
+ engine.delete(new Engine.Delete(doc.id(), newUid(doc), primaryTerm.get()));
+ deletes++;
+ } else {
+ engine.index(indexForDoc(doc));
+ updates++;
+ }
+ if (randomBoolean()) {
+ engine.index(indexForDoc(testParsedDocument(UUIDs.randomBase64UUID(), null, testDocument(), B_1, null)));
+ appends++;
+ }
+ }
+
+ boolean committed = randomBoolean();
+ if (committed) {
+ engine.flush();
+ }
+
+ engine.refresh("test");
+ segments = engine.segments(randomBoolean());
+
+ // All segments have to be merged into one
+ assertThat(segments, hasSize(1));
+ assertThat(segments.get(0).getNumDocs(), equalTo(numDocsFirstSegment + appends - deletes));
+ assertThat(segments.get(0).getDeletedDocs(), equalTo(0));
+ assertThat(segments.get(0).committed, equalTo(committed));
+ }
+ }
+
+ // this test writes documents to the engine while concurrently flushing/commit
+ public void testConcurrentMergeSegmentsOnCommit() throws Exception {
+ final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
+
+ final Settings.Builder settings = Settings.builder()
+ .put(defaultSettings.getSettings())
+ .put(IndexSettings.INDEX_MERGE_ON_FLUSH_MAX_FULL_FLUSH_MERGE_WAIT_TIME.getKey(), TimeValue.timeValueMillis(5000))
+ .put(IndexSettings.INDEX_MERGE_ON_FLUSH_ENABLED.getKey(), true);
+ final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
+ final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
+
+ try (
+ Store store = createStore();
+ InternalEngine engine = createEngine(
+ config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null, null, globalCheckpoint::get)
+ )
+ ) {
+ final int numIndexingThreads = scaledRandomIntBetween(3, 8);
+ final int numDocsPerThread = randomIntBetween(500, 1000);
+ final CyclicBarrier barrier = new CyclicBarrier(numIndexingThreads + 1);
+ final List indexingThreads = new ArrayList<>();
+ final CountDownLatch doneLatch = new CountDownLatch(numIndexingThreads);
+ // create N indexing threads to index documents simultaneously
+ for (int threadNum = 0; threadNum < numIndexingThreads; threadNum++) {
+ final int threadIdx = threadNum;
+ Thread indexingThread = new Thread(() -> {
+ try {
+ barrier.await(); // wait for all threads to start at the same time
+ // index random number of docs
+ for (int i = 0; i < numDocsPerThread; i++) {
+ final String id = "thread" + threadIdx + "#" + i;
+ ParsedDocument doc = testParsedDocument(id, null, testDocument(), B_1, null);
+ engine.index(indexForDoc(doc));
+ }
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ } finally {
+ doneLatch.countDown();
+ }
+
+ });
+ indexingThreads.add(indexingThread);
+ }
+
+ // start the indexing threads
+ for (Thread thread : indexingThreads) {
+ thread.start();
+ }
+ barrier.await(); // wait for indexing threads to all be ready to start
+ assertThat(doneLatch.await(10, TimeUnit.SECONDS), is(true));
+
+ boolean committed = randomBoolean();
+ if (committed) {
+ engine.flush();
+ }
+
+ engine.refresh("test");
+ List segments = engine.segments(randomBoolean());
+
+ // All segments have to be merged into one
+ assertThat(segments, hasSize(1));
+ assertThat(segments.get(0).getNumDocs(), equalTo(numIndexingThreads * numDocsPerThread));
+ assertThat(segments.get(0).committed, equalTo(committed));
+ }
+ }
+
public void testCommitStats() throws IOException {
final AtomicLong maxSeqNo = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final AtomicLong localCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
diff --git a/server/src/test/java/org/opensearch/index/mapper/RootObjectMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/RootObjectMapperTests.java
index bb92a4d6d49cf..0d0e6324f8959 100644
--- a/server/src/test/java/org/opensearch/index/mapper/RootObjectMapperTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/RootObjectMapperTests.java
@@ -32,12 +32,8 @@
package org.opensearch.index.mapper;
-import org.opensearch.LegacyESVersion;
-import org.opensearch.Version;
-import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.Strings;
import org.opensearch.common.compress.CompressedXContent;
-import org.opensearch.common.settings.Settings;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.index.mapper.MapperService.MergeReason;
@@ -46,7 +42,6 @@
import java.io.IOException;
import java.util.Arrays;
-import static org.opensearch.test.VersionUtils.randomVersionBetween;
import static org.hamcrest.Matchers.containsString;
public class RootObjectMapperTests extends OpenSearchSingleNodeTestCase {
@@ -483,31 +478,4 @@ public void testIllegalDynamicTemplateNoMappingType() throws Exception {
protected boolean forbidPrivateIndexSettings() {
return false;
}
-
- public void testIllegalDynamicTemplatePre7Dot7Index() throws Exception {
- XContentBuilder mapping = XContentFactory.jsonBuilder();
- mapping.startObject();
- {
- mapping.startObject("type");
- mapping.startArray("dynamic_templates");
- {
- mapping.startObject();
- mapping.startObject("my_template");
- mapping.field("match_mapping_type", "string");
- mapping.startObject("mapping");
- mapping.field("type", "string");
- mapping.endObject();
- mapping.endObject();
- mapping.endObject();
- }
- mapping.endArray();
- mapping.endObject();
- }
- mapping.endObject();
- Version createdVersion = randomVersionBetween(random(), LegacyESVersion.V_7_0_0, LegacyESVersion.V_7_6_0);
- Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), createdVersion).build();
- MapperService mapperService = createIndex("test", indexSettings).mapperService();
- DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MergeReason.MAPPING_UPDATE);
- assertThat(mapper.mappingSource().toString(), containsString("\"type\":\"string\""));
- }
}
diff --git a/server/src/test/java/org/opensearch/indices/analysis/AnalysisModuleTests.java b/server/src/test/java/org/opensearch/indices/analysis/AnalysisModuleTests.java
index efec81e803f1c..483e9401bb075 100644
--- a/server/src/test/java/org/opensearch/indices/analysis/AnalysisModuleTests.java
+++ b/server/src/test/java/org/opensearch/indices/analysis/AnalysisModuleTests.java
@@ -42,7 +42,6 @@
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.NIOFSDirectory;
-import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.io.Streams;
@@ -213,34 +212,6 @@ public void testUnderscoreInAnalyzerName() throws IOException {
}
}
- public void testStandardFilterBWC() throws IOException {
- // standard tokenfilter should have been removed entirely in the 7x line. However, a
- // cacheing bug meant that it was still possible to create indexes using a standard
- // filter until 7.6
- {
- Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_6_0, Version.CURRENT);
- final Settings settings = Settings.builder()
- .put("index.analysis.analyzer.my_standard.tokenizer", "standard")
- .put("index.analysis.analyzer.my_standard.filter", "standard")
- .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .put(IndexMetadata.SETTING_VERSION_CREATED, version)
- .build();
- IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> getIndexAnalyzers(settings));
- assertThat(exc.getMessage(), equalTo("The [standard] token filter has been removed."));
- }
- {
- Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, LegacyESVersion.V_7_5_2);
- final Settings settings = Settings.builder()
- .put("index.analysis.analyzer.my_standard.tokenizer", "standard")
- .put("index.analysis.analyzer.my_standard.filter", "standard")
- .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .put(IndexMetadata.SETTING_VERSION_CREATED, version)
- .build();
- getIndexAnalyzers(settings);
- assertWarnings("The [standard] token filter is deprecated and will be removed in a future version.");
- }
- }
-
/**
* Tests that plugins can register pre-configured char filters that vary in behavior based on OpenSearch version, Lucene version,
* and that do not vary based on version at all.
@@ -473,7 +444,7 @@ public void testRegisterHunspellDictionary() throws Exception {
InputStream aff = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.aff");
InputStream dic = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.dic");
Dictionary dictionary;
- try (Directory tmp = new NIOFSDirectory(environment.tmpFile())) {
+ try (Directory tmp = new NIOFSDirectory(environment.tmpDir())) {
dictionary = new Dictionary(tmp, "hunspell", aff, dic);
}
AnalysisModule module = new AnalysisModule(environment, singletonList(new AnalysisPlugin() {
diff --git a/server/src/test/java/org/opensearch/ingest/IngestStatsTests.java b/server/src/test/java/org/opensearch/ingest/IngestStatsTests.java
index 0486c9f29f86e..b5c74f0ee5d16 100644
--- a/server/src/test/java/org/opensearch/ingest/IngestStatsTests.java
+++ b/server/src/test/java/org/opensearch/ingest/IngestStatsTests.java
@@ -32,12 +32,10 @@
package org.opensearch.ingest;
-import org.opensearch.LegacyESVersion;
import org.opensearch.common.collect.MapBuilder;
import org.opensearch.common.io.stream.BytesStreamOutput;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.test.OpenSearchTestCase;
-import org.opensearch.test.VersionUtils;
import java.io.IOException;
import java.util.Collections;
@@ -58,23 +56,6 @@ public void testSerialization() throws IOException {
assertIngestStats(ingestStats, serializedStats, true, true);
}
- public void testBWCIngestProcessorTypeStats() throws IOException {
- IngestStats.Stats totalStats = new IngestStats.Stats(50, 100, 200, 300);
- List pipelineStats = createPipelineStats();
- Map> processorStats = createProcessorStats(pipelineStats);
- IngestStats expectedIngestStats = new IngestStats(totalStats, pipelineStats, processorStats);
-
- // legacy output logic
- BytesStreamOutput out = new BytesStreamOutput();
- out.setVersion(VersionUtils.getPreviousVersion(LegacyESVersion.V_7_6_0));
- expectedIngestStats.writeTo(out);
-
- StreamInput in = out.bytes().streamInput();
- in.setVersion(VersionUtils.getPreviousVersion(LegacyESVersion.V_7_6_0));
- IngestStats serializedStats = new IngestStats(in);
- assertIngestStats(expectedIngestStats, serializedStats, true, false);
- }
-
private List createPipelineStats() {
IngestStats.PipelineStat pipeline1Stats = new IngestStats.PipelineStat("pipeline1", new IngestStats.Stats(3, 3, 3, 3));
IngestStats.PipelineStat pipeline2Stats = new IngestStats.PipelineStat("pipeline2", new IngestStats.Stats(47, 97, 197, 297));
diff --git a/server/src/test/java/org/opensearch/node/InternalSettingsPreparerTests.java b/server/src/test/java/org/opensearch/node/InternalSettingsPreparerTests.java
index d49bc62583af7..0f1aad5389c23 100644
--- a/server/src/test/java/org/opensearch/node/InternalSettingsPreparerTests.java
+++ b/server/src/test/java/org/opensearch/node/InternalSettingsPreparerTests.java
@@ -85,7 +85,7 @@ public void testEmptySettings() {
assertNotNull(settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey())); // a cluster name was set
assertEquals(settings.toString(), size + 1 /* path.home is in the base settings */, settings.names().size());
String home = Environment.PATH_HOME_SETTING.get(baseEnvSettings);
- String configDir = env.configFile().toString();
+ String configDir = env.configDir().toString();
assertTrue(configDir, configDir.startsWith(home));
}
diff --git a/server/src/test/java/org/opensearch/persistent/PersistentTasksCustomMetadataTests.java b/server/src/test/java/org/opensearch/persistent/PersistentTasksCustomMetadataTests.java
index 873176f5d42be..96b33153ccf31 100644
--- a/server/src/test/java/org/opensearch/persistent/PersistentTasksCustomMetadataTests.java
+++ b/server/src/test/java/org/opensearch/persistent/PersistentTasksCustomMetadataTests.java
@@ -64,6 +64,7 @@
import org.opensearch.persistent.TestPersistentTasksPlugin.TestParams;
import org.opensearch.persistent.TestPersistentTasksPlugin.TestPersistentTasksExecutor;
import org.opensearch.test.AbstractDiffableSerializationTestCase;
+import org.opensearch.test.VersionUtils;
import java.io.IOException;
import java.util.ArrayList;
@@ -79,7 +80,6 @@
import static org.opensearch.persistent.PersistentTasksExecutor.NO_NODE_FOUND;
import static org.opensearch.test.VersionUtils.allReleasedVersions;
import static org.opensearch.test.VersionUtils.compatibleFutureVersion;
-import static org.opensearch.test.VersionUtils.getPreviousVersion;
import static org.opensearch.test.VersionUtils.randomVersionBetween;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
@@ -281,7 +281,7 @@ public void testMinVersionSerialization() throws IOException {
PersistentTasksCustomMetadata.Builder tasks = PersistentTasksCustomMetadata.builder();
Version minVersion = allReleasedVersions().stream().filter(Version::isRelease).findFirst().orElseThrow(NoSuchElementException::new);
- final Version streamVersion = randomVersionBetween(random(), minVersion, getPreviousVersion(Version.CURRENT));
+ final Version streamVersion = randomVersionBetween(random(), minVersion, VersionUtils.getPreviousVersion(Version.CURRENT));
tasks.addTask(
"test_compatible_version",
TestPersistentTasksExecutor.NAME,
diff --git a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java
index e022e78e7424b..d22776cf01f0e 100644
--- a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java
+++ b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java
@@ -102,7 +102,7 @@ static PluginsService newPluginsService(Settings settings, Class extends Plugi
settings,
null,
null,
- TestEnvironment.newEnvironment(settings).pluginsFile(),
+ TestEnvironment.newEnvironment(settings).pluginsDir(),
Arrays.asList(classpathPlugins)
);
}
diff --git a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java
index 94813d1f7cd33..14f9a46169fbb 100644
--- a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java
+++ b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java
@@ -262,7 +262,7 @@ public void testFsRepositoryCompressDeprecated() {
.put(node().getEnvironment().settings())
.put(FsRepository.REPOSITORIES_COMPRESS_SETTING.getKey(), true)
.build();
- Environment useCompressEnvironment = new Environment(useCompressSettings, node().getEnvironment().configFile());
+ Environment useCompressEnvironment = new Environment(useCompressSettings, node().getEnvironment().configDir());
new FsRepository(metadata, useCompressEnvironment, null, BlobStoreTestUtil.mockClusterService(), null);
diff --git a/server/src/test/java/org/opensearch/rest/RestRequestTests.java b/server/src/test/java/org/opensearch/rest/RestRequestTests.java
index d5a915b42cf87..7abc53e4ca610 100644
--- a/server/src/test/java/org/opensearch/rest/RestRequestTests.java
+++ b/server/src/test/java/org/opensearch/rest/RestRequestTests.java
@@ -34,7 +34,6 @@
import org.opensearch.OpenSearchParseException;
import org.opensearch.common.CheckedConsumer;
-import org.opensearch.common.Strings;
import org.opensearch.common.bytes.BytesArray;
import org.opensearch.common.bytes.BytesReference;
import org.opensearch.common.collect.MapBuilder;
@@ -51,13 +50,11 @@
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
-import java.util.Locale;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
-import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.mockito.Mockito.mock;
@@ -283,40 +280,6 @@ public void testRequiredContent() {
assertEquals("unknown content type", e.getMessage());
}
- /*
- * The test is added in 2.0 when the request parameter "cluster_manager_timeout" is introduced.
- * Remove the test along with the removal of the non-inclusive terminology "master_timeout".
- */
- public void testValidateParamValuesAreEqualWhenTheyAreEqual() {
- FakeRestRequest request = new FakeRestRequest();
- String valueForKey1 = randomFrom("value1", "", null);
- String valueForKey2 = "value1";
- request.params().put("key1", valueForKey1);
- request.params().put("key2", valueForKey2);
- request.validateParamValuesAreEqual("key1", "key2");
- assertTrue(
- String.format(
- Locale.ROOT,
- "The 2 values should be equal, or having 1 null/empty value. Value of key1: %s. Value of key2: %s",
- valueForKey1,
- valueForKey2
- ),
- Strings.isNullOrEmpty(valueForKey1) || valueForKey1.equals(valueForKey2)
- );
- }
-
- /*
- * The test is added in 2.0 when the request parameter "cluster_manager_timeout" is introduced.
- * Remove the test along with the removal of the non-inclusive terminology "master_timeout".
- */
- public void testValidateParamValuesAreEqualWhenTheyAreNotEqual() {
- FakeRestRequest request = new FakeRestRequest();
- request.params().put("key1", "value1");
- request.params().put("key2", "value2");
- Exception e = assertThrows(OpenSearchParseException.class, () -> request.validateParamValuesAreEqual("key1", "key2"));
- assertThat(e.getMessage(), containsString("The values of the request parameters: [key1, key2] are required to be equal"));
- }
-
private static RestRequest contentRestRequest(String content, Map params) {
Map> headers = new HashMap<>();
headers.put("Content-Type", Collections.singletonList("application/json"));
diff --git a/server/src/test/java/org/opensearch/rest/action/admin/cluster/RestClusterHealthActionTests.java b/server/src/test/java/org/opensearch/rest/action/admin/cluster/RestClusterHealthActionTests.java
index 4f065653b44a6..8334a1e88190a 100644
--- a/server/src/test/java/org/opensearch/rest/action/admin/cluster/RestClusterHealthActionTests.java
+++ b/server/src/test/java/org/opensearch/rest/action/admin/cluster/RestClusterHealthActionTests.java
@@ -52,7 +52,7 @@ public void testFromRequest() {
Map params = new HashMap<>();
String index = "index";
boolean local = randomBoolean();
- String masterTimeout = randomTimeValue();
+ String clusterManagerTimeout = randomTimeValue();
String timeout = randomTimeValue();
ClusterHealthStatus waitForStatus = randomFrom(ClusterHealthStatus.values());
boolean waitForNoRelocatingShards = randomBoolean();
@@ -63,7 +63,7 @@ public void testFromRequest() {
params.put("index", index);
params.put("local", String.valueOf(local));
- params.put("master_timeout", masterTimeout);
+ params.put("cluster_manager_timeout", clusterManagerTimeout);
params.put("timeout", timeout);
params.put("wait_for_status", waitForStatus.name());
if (waitForNoRelocatingShards || randomBoolean()) {
@@ -81,7 +81,7 @@ public void testFromRequest() {
assertThat(clusterHealthRequest.indices().length, equalTo(1));
assertThat(clusterHealthRequest.indices()[0], equalTo(index));
assertThat(clusterHealthRequest.local(), equalTo(local));
- assertThat(clusterHealthRequest.masterNodeTimeout(), equalTo(TimeValue.parseTimeValue(masterTimeout, "test")));
+ assertThat(clusterHealthRequest.masterNodeTimeout(), equalTo(TimeValue.parseTimeValue(clusterManagerTimeout, "test")));
assertThat(clusterHealthRequest.timeout(), equalTo(TimeValue.parseTimeValue(timeout, "test")));
assertThat(clusterHealthRequest.waitForStatus(), equalTo(waitForStatus));
assertThat(clusterHealthRequest.waitForNoRelocatingShards(), equalTo(waitForNoRelocatingShards));
diff --git a/server/src/test/java/org/opensearch/rest/action/cat/RestNodesActionTests.java b/server/src/test/java/org/opensearch/rest/action/cat/RestNodesActionTests.java
index 9293d40605f42..593ad2907797e 100644
--- a/server/src/test/java/org/opensearch/rest/action/cat/RestNodesActionTests.java
+++ b/server/src/test/java/org/opensearch/rest/action/cat/RestNodesActionTests.java
@@ -32,7 +32,6 @@
package org.opensearch.rest.action.cat;
-import org.opensearch.OpenSearchParseException;
import org.opensearch.Version;
import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.opensearch.action.admin.cluster.node.stats.NodesStatsResponse;
@@ -52,7 +51,6 @@
import static java.util.Collections.emptyMap;
import static java.util.Collections.emptySet;
-import static org.hamcrest.CoreMatchers.containsString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -91,20 +89,4 @@ public void testCatNodesWithLocalDeprecationWarning() {
terminate(threadPool);
}
-
- /**
- * Validate both cluster_manager_timeout and its predecessor can be parsed correctly.
- * Remove the test along with MASTER_ROLE. It's added in version 2.0.0.
- */
- public void testCatNodesWithClusterManagerTimeout() {
- TestThreadPool threadPool = new TestThreadPool(RestNodesActionTests.class.getName());
- NodeClient client = new NodeClient(Settings.EMPTY, threadPool);
- FakeRestRequest request = new FakeRestRequest();
- request.params().put("cluster_manager_timeout", randomFrom("1h", "2m"));
- request.params().put("master_timeout", "3s");
- Exception e = assertThrows(OpenSearchParseException.class, () -> action.doCatRequest(request, client));
- assertThat(e.getMessage(), containsString("[master_timeout, cluster_manager_timeout] are required to be equal"));
- assertWarnings(RestNodesAction.MASTER_TIMEOUT_DEPRECATED_MESSAGE);
- terminate(threadPool);
- }
}
diff --git a/server/src/test/java/org/opensearch/rest/action/document/RestIndexActionTests.java b/server/src/test/java/org/opensearch/rest/action/document/RestIndexActionTests.java
index 5a1d43ff5dd04..85e5497975888 100644
--- a/server/src/test/java/org/opensearch/rest/action/document/RestIndexActionTests.java
+++ b/server/src/test/java/org/opensearch/rest/action/document/RestIndexActionTests.java
@@ -33,7 +33,6 @@
package org.opensearch.rest.action.document;
import org.apache.lucene.util.SetOnce;
-import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.action.DocWriteRequest;
import org.opensearch.action.index.IndexRequest;
@@ -46,7 +45,6 @@
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.document.RestIndexAction.AutoIdHandler;
import org.opensearch.rest.action.document.RestIndexAction.CreateHandler;
-import org.opensearch.test.VersionUtils;
import org.opensearch.test.rest.FakeRestRequest;
import org.opensearch.test.rest.RestActionTestCase;
import org.junit.Before;
@@ -96,13 +94,6 @@ public void testAutoIdDefaultsToOptypeCreate() {
checkAutoIdOpType(Version.CURRENT, DocWriteRequest.OpType.CREATE);
}
- public void testAutoIdDefaultsToOptypeIndexForOlderVersions() {
- checkAutoIdOpType(
- VersionUtils.randomVersionBetween(random(), null, VersionUtils.getPreviousVersion(LegacyESVersion.V_7_5_0)),
- DocWriteRequest.OpType.INDEX
- );
- }
-
private void checkAutoIdOpType(Version minClusterVersion, DocWriteRequest.OpType expectedOpType) {
SetOnce executeCalled = new SetOnce<>();
verifyingClient.setExecuteVerifier((actionType, request) -> {
diff --git a/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java b/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java
index e1cf74bdd6aeb..f6ca12f1c514c 100644
--- a/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java
+++ b/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java
@@ -32,6 +32,8 @@
package org.opensearch.search;
+import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
+
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.tests.index.RandomIndexWriter;
import org.apache.lucene.search.IndexSearcher;
@@ -76,7 +78,12 @@
import org.opensearch.threadpool.ThreadPool;
import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collection;
import java.util.UUID;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.function.Supplier;
@@ -91,6 +98,25 @@
import static org.mockito.Mockito.when;
public class DefaultSearchContextTests extends OpenSearchTestCase {
+ private final ExecutorService executor;
+
+ @ParametersFactory
+ public static Collection concurrency() {
+ return Arrays.asList(new Integer[] { 0 }, new Integer[] { 5 });
+ }
+
+ public DefaultSearchContextTests(int concurrency) {
+ this.executor = (concurrency > 0) ? Executors.newFixedThreadPool(concurrency) : null;
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+ super.tearDown();
+
+ if (executor != null) {
+ ThreadPool.terminate(executor, 10, TimeUnit.SECONDS);
+ }
+ }
public void testPreProcess() throws Exception {
TimeValue timeout = new TimeValue(randomIntBetween(1, 100));
@@ -183,7 +209,7 @@ protected Engine.Searcher acquireSearcherInternal(String source) {
false,
Version.CURRENT,
false,
- null
+ executor
);
contextWithoutScroll.from(300);
contextWithoutScroll.close();
@@ -225,7 +251,7 @@ protected Engine.Searcher acquireSearcherInternal(String source) {
false,
Version.CURRENT,
false,
- null
+ executor
);
context1.from(300);
exception = expectThrows(IllegalArgumentException.class, () -> context1.preProcess(false));
@@ -295,7 +321,7 @@ protected Engine.Searcher acquireSearcherInternal(String source) {
false,
Version.CURRENT,
false,
- null
+ executor
);
SliceBuilder sliceBuilder = mock(SliceBuilder.class);
@@ -334,7 +360,7 @@ protected Engine.Searcher acquireSearcherInternal(String source) {
false,
Version.CURRENT,
false,
- null
+ executor
);
ParsedQuery parsedQuery = ParsedQuery.parsedMatchAllQuery();
context3.sliceBuilder(null).parsedQuery(parsedQuery).preProcess(false);
@@ -365,7 +391,7 @@ protected Engine.Searcher acquireSearcherInternal(String source) {
false,
Version.CURRENT,
false,
- null
+ executor
);
context4.sliceBuilder(new SliceBuilder(1, 2)).parsedQuery(parsedQuery).preProcess(false);
Query query1 = context4.query();
@@ -446,7 +472,7 @@ protected Engine.Searcher acquireSearcherInternal(String source) {
false,
Version.CURRENT,
false,
- null
+ executor
);
assertThat(context.searcher().hasCancellations(), is(false));
context.searcher().addQueryCancellation(() -> {});
diff --git a/server/src/test/java/org/opensearch/search/SearchCancellationTests.java b/server/src/test/java/org/opensearch/search/SearchCancellationTests.java
index 1927558f94094..f479f3a1b99f1 100644
--- a/server/src/test/java/org/opensearch/search/SearchCancellationTests.java
+++ b/server/src/test/java/org/opensearch/search/SearchCancellationTests.java
@@ -108,7 +108,8 @@ public void testAddingCancellationActions() throws IOException {
IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultQueryCache(),
IndexSearcher.getDefaultQueryCachingPolicy(),
- true
+ true,
+ null
);
NullPointerException npe = expectThrows(NullPointerException.class, () -> searcher.addQueryCancellation(null));
assertEquals("cancellation runnable should not be null", npe.getMessage());
@@ -127,7 +128,8 @@ public void testCancellableCollector() throws IOException {
IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultQueryCache(),
IndexSearcher.getDefaultQueryCachingPolicy(),
- true
+ true,
+ null
);
searcher.search(new MatchAllDocsQuery(), collector1);
@@ -154,7 +156,8 @@ public void testExitableDirectoryReader() throws IOException {
IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultQueryCache(),
IndexSearcher.getDefaultQueryCachingPolicy(),
- true
+ true,
+ null
);
searcher.addQueryCancellation(cancellation);
CompiledAutomaton automaton = new CompiledAutomaton(new RegExp("a.*").toAutomaton());
diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/GeoHashGridTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/GeoHashGridTests.java
index b85761b709105..5e230a445ec98 100644
--- a/server/src/test/java/org/opensearch/search/aggregations/bucket/GeoHashGridTests.java
+++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/GeoHashGridTests.java
@@ -32,23 +32,10 @@
package org.opensearch.search.aggregations.bucket;
-import org.opensearch.LegacyESVersion;
-import org.opensearch.Version;
-import org.opensearch.common.geo.GeoBoundingBox;
import org.opensearch.common.geo.GeoBoundingBoxTests;
-import org.opensearch.common.geo.GeoPoint;
-import org.opensearch.common.io.stream.BytesStreamOutput;
-import org.opensearch.common.io.stream.NamedWriteableAwareStreamInput;
-import org.opensearch.common.io.stream.NamedWriteableRegistry;
-import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.search.aggregations.BaseAggregationTestCase;
import org.opensearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder;
import org.opensearch.search.aggregations.bucket.geogrid.GeoHashGridAggregationBuilder;
-import org.opensearch.test.VersionUtils;
-
-import java.util.Collections;
-
-import static org.hamcrest.Matchers.equalTo;
public class GeoHashGridTests extends BaseAggregationTestCase {
@@ -72,26 +59,4 @@ protected GeoHashGridAggregationBuilder createTestAggregatorBuilder() {
}
return factory;
}
-
- public void testSerializationPreBounds() throws Exception {
- Version noBoundsSupportVersion = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, LegacyESVersion.V_7_5_0);
- GeoHashGridAggregationBuilder builder = createTestAggregatorBuilder();
- try (BytesStreamOutput output = new BytesStreamOutput()) {
- output.setVersion(LegacyESVersion.V_7_6_0);
- builder.writeTo(output);
- try (
- StreamInput in = new NamedWriteableAwareStreamInput(
- output.bytes().streamInput(),
- new NamedWriteableRegistry(Collections.emptyList())
- )
- ) {
- in.setVersion(noBoundsSupportVersion);
- GeoHashGridAggregationBuilder readBuilder = new GeoHashGridAggregationBuilder(in);
- assertThat(
- readBuilder.geoBoundingBox(),
- equalTo(new GeoBoundingBox(new GeoPoint(Double.NaN, Double.NaN), new GeoPoint(Double.NaN, Double.NaN)))
- );
- }
- }
- }
}
diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/GeoTileGridTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/GeoTileGridTests.java
index 3564ea337f741..d54667fb4f1a6 100644
--- a/server/src/test/java/org/opensearch/search/aggregations/bucket/GeoTileGridTests.java
+++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/GeoTileGridTests.java
@@ -32,24 +32,11 @@
package org.opensearch.search.aggregations.bucket;
-import org.opensearch.LegacyESVersion;
-import org.opensearch.Version;
-import org.opensearch.common.geo.GeoBoundingBox;
import org.opensearch.common.geo.GeoBoundingBoxTests;
-import org.opensearch.common.geo.GeoPoint;
-import org.opensearch.common.io.stream.BytesStreamOutput;
-import org.opensearch.common.io.stream.NamedWriteableAwareStreamInput;
-import org.opensearch.common.io.stream.NamedWriteableRegistry;
-import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.search.aggregations.BaseAggregationTestCase;
import org.opensearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder;
import org.opensearch.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder;
import org.opensearch.search.aggregations.bucket.geogrid.GeoTileUtils;
-import org.opensearch.test.VersionUtils;
-
-import java.util.Collections;
-
-import static org.hamcrest.Matchers.equalTo;
public class GeoTileGridTests extends BaseAggregationTestCase {
@@ -72,26 +59,4 @@ protected GeoTileGridAggregationBuilder createTestAggregatorBuilder() {
}
return factory;
}
-
- public void testSerializationPreBounds() throws Exception {
- Version noBoundsSupportVersion = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, LegacyESVersion.V_7_5_0);
- GeoTileGridAggregationBuilder builder = createTestAggregatorBuilder();
- try (BytesStreamOutput output = new BytesStreamOutput()) {
- output.setVersion(LegacyESVersion.V_7_6_0);
- builder.writeTo(output);
- try (
- StreamInput in = new NamedWriteableAwareStreamInput(
- output.bytes().streamInput(),
- new NamedWriteableRegistry(Collections.emptyList())
- )
- ) {
- in.setVersion(noBoundsSupportVersion);
- GeoTileGridAggregationBuilder readBuilder = new GeoTileGridAggregationBuilder(in);
- assertThat(
- readBuilder.geoBoundingBox(),
- equalTo(new GeoBoundingBox(new GeoPoint(Double.NaN, Double.NaN), new GeoPoint(Double.NaN, Double.NaN)))
- );
- }
- }
- }
}
diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/RareTermsTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/RareTermsTests.java
index 799faecb5ab57..6b8655eccd74d 100644
--- a/server/src/test/java/org/opensearch/search/aggregations/bucket/RareTermsTests.java
+++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/RareTermsTests.java
@@ -33,7 +33,6 @@
package org.opensearch.search.aggregations.bucket;
import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.automaton.RegExp;
import org.opensearch.search.aggregations.BaseAggregationTestCase;
import org.opensearch.search.aggregations.bucket.terms.IncludeExclude;
import org.opensearch.search.aggregations.bucket.terms.RareTermsAggregationBuilder;
@@ -59,13 +58,13 @@ protected RareTermsAggregationBuilder createTestAggregatorBuilder() {
IncludeExclude incExc = null;
switch (randomInt(6)) {
case 0:
- incExc = new IncludeExclude(new RegExp("foobar"), null);
+ incExc = new IncludeExclude("foobar", null);
break;
case 1:
- incExc = new IncludeExclude(null, new RegExp("foobaz"));
+ incExc = new IncludeExclude(null, "foobaz");
break;
case 2:
- incExc = new IncludeExclude(new RegExp("foobar"), new RegExp("foobaz"));
+ incExc = new IncludeExclude("foobar", "foobaz");
break;
case 3:
SortedSet includeValues = new TreeSet<>();
diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/SignificantTermsTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/SignificantTermsTests.java
index 3001f8ede7f4d..6312d6c175866 100644
--- a/server/src/test/java/org/opensearch/search/aggregations/bucket/SignificantTermsTests.java
+++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/SignificantTermsTests.java
@@ -33,7 +33,6 @@
package org.opensearch.search.aggregations.bucket;
import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.automaton.RegExp;
import org.opensearch.index.query.QueryBuilders;
import org.opensearch.search.aggregations.BaseAggregationTestCase;
import org.opensearch.search.aggregations.bucket.terms.IncludeExclude;
@@ -160,13 +159,13 @@ static IncludeExclude getIncludeExclude() {
IncludeExclude incExc = null;
switch (randomInt(5)) {
case 0:
- incExc = new IncludeExclude(new RegExp("foobar"), null);
+ incExc = new IncludeExclude("foobar", null);
break;
case 1:
- incExc = new IncludeExclude(null, new RegExp("foobaz"));
+ incExc = new IncludeExclude(null, "foobaz");
break;
case 2:
- incExc = new IncludeExclude(new RegExp("foobar"), new RegExp("foobaz"));
+ incExc = new IncludeExclude("foobar", "foobaz");
break;
case 3:
SortedSet includeValues = new TreeSet<>();
diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/TermsTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/TermsTests.java
index eb4f33c6f8e19..04e7fad2105ec 100644
--- a/server/src/test/java/org/opensearch/search/aggregations/bucket/TermsTests.java
+++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/TermsTests.java
@@ -33,7 +33,6 @@
package org.opensearch.search.aggregations.bucket;
import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.automaton.RegExp;
import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.opensearch.search.aggregations.BaseAggregationTestCase;
import org.opensearch.search.aggregations.BucketOrder;
@@ -118,13 +117,13 @@ protected TermsAggregationBuilder createTestAggregatorBuilder() {
IncludeExclude incExc = null;
switch (randomInt(6)) {
case 0:
- incExc = new IncludeExclude(new RegExp("foobar"), null);
+ incExc = new IncludeExclude("foobar", null);
break;
case 1:
- incExc = new IncludeExclude(null, new RegExp("foobaz"));
+ incExc = new IncludeExclude(null, "foobaz");
break;
case 2:
- incExc = new IncludeExclude(new RegExp("foobar"), new RegExp("foobaz"));
+ incExc = new IncludeExclude("foobar", "foobaz");
break;
case 3:
SortedSet includeValues = new TreeSet<>();
diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilderTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilderTests.java
index 58199741b1ee2..2b1700676f549 100644
--- a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilderTests.java
+++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilderTests.java
@@ -32,22 +32,7 @@
package org.opensearch.search.aggregations.bucket.composite;
-import org.opensearch.LegacyESVersion;
-import org.opensearch.Version;
-import org.opensearch.common.geo.GeoBoundingBox;
-import org.opensearch.common.geo.GeoBoundingBoxTests;
-import org.opensearch.common.geo.GeoPoint;
-import org.opensearch.common.io.stream.BytesStreamOutput;
-import org.opensearch.common.io.stream.NamedWriteableAwareStreamInput;
-import org.opensearch.common.io.stream.NamedWriteableRegistry;
-import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.test.OpenSearchTestCase;
-import org.opensearch.test.VersionUtils;
-
-import java.io.IOException;
-import java.util.Collections;
-
-import static org.hamcrest.Matchers.equalTo;
public class GeoTileGridValuesSourceBuilderTests extends OpenSearchTestCase {
@@ -56,28 +41,4 @@ public void testSetFormat() {
expectThrows(IllegalArgumentException.class, () -> builder.format("format"));
}
- public void testBWCBounds() throws IOException {
- Version noBoundsSupportVersion = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, LegacyESVersion.V_7_5_0);
- GeoTileGridValuesSourceBuilder builder = new GeoTileGridValuesSourceBuilder("name");
- if (randomBoolean()) {
- builder.geoBoundingBox(GeoBoundingBoxTests.randomBBox());
- }
- try (BytesStreamOutput output = new BytesStreamOutput()) {
- output.setVersion(LegacyESVersion.V_7_6_0);
- builder.writeTo(output);
- try (
- StreamInput in = new NamedWriteableAwareStreamInput(
- output.bytes().streamInput(),
- new NamedWriteableRegistry(Collections.emptyList())
- )
- ) {
- in.setVersion(noBoundsSupportVersion);
- GeoTileGridValuesSourceBuilder readBuilder = new GeoTileGridValuesSourceBuilder(in);
- assertThat(
- readBuilder.geoBoundingBox(),
- equalTo(new GeoBoundingBox(new GeoPoint(Double.NaN, Double.NaN), new GeoPoint(Double.NaN, Double.NaN)))
- );
- }
- }
- }
}
diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java
index 7703afa88d93c..34cc29d40a9fd 100644
--- a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java
+++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java
@@ -41,7 +41,6 @@
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.automaton.RegExp;
import org.opensearch.common.Numbers;
import org.opensearch.index.mapper.BinaryFieldMapper;
import org.opensearch.index.mapper.MappedFieldType;
@@ -97,7 +96,7 @@ public void testMatchAllDocs() throws IOException {
}
public void testBadIncludeExclude() throws IOException {
- IncludeExclude includeExclude = new IncludeExclude(new RegExp("foo"), null);
+ IncludeExclude includeExclude = new IncludeExclude("foo", null);
// Make sure the include/exclude fails regardless of how the user tries to type hint the agg
AggregationExecutionException e = expectThrows(
diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java
index 13e41d5a2e543..846f71b12dab0 100644
--- a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java
+++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java
@@ -42,7 +42,6 @@
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.automaton.RegExp;
import org.opensearch.index.mapper.MappedFieldType;
import org.opensearch.index.mapper.NumberFieldMapper;
import org.opensearch.search.aggregations.AggregationExecutionException;
@@ -116,7 +115,7 @@ public void testMatchAllDocs() throws IOException {
}
public void testBadIncludeExclude() throws IOException {
- IncludeExclude includeExclude = new IncludeExclude(new RegExp("foo"), null);
+ IncludeExclude includeExclude = new IncludeExclude("foo", null);
// Numerics don't support any regex include/exclude, so should fail no matter what we do
diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalScriptedMetricTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalScriptedMetricTests.java
index 69c53d1a526e8..fdb59591cba36 100644
--- a/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalScriptedMetricTests.java
+++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalScriptedMetricTests.java
@@ -32,7 +32,6 @@
package org.opensearch.search.aggregations.metrics;
-import org.opensearch.LegacyESVersion;
import org.opensearch.common.geo.GeoPoint;
import org.opensearch.common.settings.Settings;
import org.opensearch.script.MockScriptEngine;
@@ -42,12 +41,10 @@
import org.opensearch.script.ScriptService;
import org.opensearch.script.ScriptType;
import org.opensearch.search.aggregations.Aggregation.CommonFields;
-import org.opensearch.search.aggregations.InternalAggregation;
import org.opensearch.search.aggregations.InternalAggregation.ReduceContext;
import org.opensearch.search.aggregations.ParsedAggregation;
import org.opensearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree;
import org.opensearch.test.InternalAggregationTestCase;
-import org.opensearch.test.VersionUtils;
import java.io.IOException;
import java.util.ArrayList;
@@ -60,7 +57,6 @@
import java.util.function.Supplier;
import static java.util.Collections.singletonList;
-import static org.hamcrest.Matchers.equalTo;
public class InternalScriptedMetricTests extends InternalAggregationTestCase {
@@ -276,45 +272,4 @@ protected InternalScriptedMetric mutateInstance(InternalScriptedMetric instance)
}
return new InternalScriptedMetric(name, aggregationsList, reduceScript, metadata);
}
-
- public void testOldSerialization() throws IOException {
- // A single element list looks like a fully reduced agg
- InternalScriptedMetric original = new InternalScriptedMetric(
- "test",
- org.opensearch.common.collect.List.of("foo"),
- new Script("test"),
- null
- );
- original.mergePipelineTreeForBWCSerialization(PipelineTree.EMPTY);
- InternalScriptedMetric roundTripped = (InternalScriptedMetric) copyNamedWriteable(
- original,
- getNamedWriteableRegistry(),
- InternalAggregation.class,
- VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, VersionUtils.getPreviousVersion(LegacyESVersion.V_7_8_0))
- );
- assertThat(roundTripped, equalTo(original));
-
- // A multi-element list looks like a non-reduced agg
- InternalScriptedMetric unreduced = new InternalScriptedMetric(
- "test",
- org.opensearch.common.collect.List.of("foo", "bar"),
- new Script("test"),
- null
- );
- unreduced.mergePipelineTreeForBWCSerialization(PipelineTree.EMPTY);
- Exception e = expectThrows(
- IllegalArgumentException.class,
- () -> copyNamedWriteable(
- unreduced,
- getNamedWriteableRegistry(),
- InternalAggregation.class,
- VersionUtils.randomVersionBetween(
- random(),
- LegacyESVersion.V_7_0_0,
- VersionUtils.getPreviousVersion(LegacyESVersion.V_7_8_0)
- )
- )
- );
- assertThat(e.getMessage(), equalTo("scripted_metric doesn't support cross cluster search until 7.8.0"));
- }
}
diff --git a/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java b/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java
index d84812557ab18..d104fc6783dc5 100644
--- a/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java
+++ b/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java
@@ -36,12 +36,16 @@
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongBitSet;
+import org.opensearch.Version;
+import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.ParseField;
+import org.opensearch.common.settings.Settings;
import org.opensearch.common.xcontent.ToXContent;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
+import org.opensearch.index.IndexSettings;
import org.opensearch.index.fielddata.AbstractSortedSetDocValues;
import org.opensearch.search.DocValueFormat;
import org.opensearch.search.aggregations.bucket.terms.IncludeExclude;
@@ -53,14 +57,24 @@
import java.util.TreeSet;
public class IncludeExcludeTests extends OpenSearchTestCase {
+
+ private final IndexSettings dummyIndexSettings = new IndexSettings(
+ IndexMetadata.builder("index")
+ .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
+ .numberOfShards(1)
+ .numberOfReplicas(0)
+ .build(),
+ Settings.EMPTY
+ );
+
public void testEmptyTermsWithOrds() throws IOException {
IncludeExclude inexcl = new IncludeExclude(new TreeSet<>(Collections.singleton(new BytesRef("foo"))), null);
- OrdinalsFilter filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW);
+ OrdinalsFilter filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, dummyIndexSettings);
LongBitSet acceptedOrds = filter.acceptedGlobalOrdinals(DocValues.emptySortedSet());
assertEquals(0, acceptedOrds.length());
inexcl = new IncludeExclude(null, new TreeSet<>(Collections.singleton(new BytesRef("foo"))));
- filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW);
+ filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, dummyIndexSettings);
acceptedOrds = filter.acceptedGlobalOrdinals(DocValues.emptySortedSet());
assertEquals(0, acceptedOrds.length());
}
@@ -99,13 +113,13 @@ public long getValueCount() {
};
IncludeExclude inexcl = new IncludeExclude(new TreeSet<>(Collections.singleton(new BytesRef("foo"))), null);
- OrdinalsFilter filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW);
+ OrdinalsFilter filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, dummyIndexSettings);
LongBitSet acceptedOrds = filter.acceptedGlobalOrdinals(ords);
assertEquals(1, acceptedOrds.length());
assertTrue(acceptedOrds.get(0));
inexcl = new IncludeExclude(new TreeSet<>(Collections.singleton(new BytesRef("bar"))), null);
- filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW);
+ filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, dummyIndexSettings);
acceptedOrds = filter.acceptedGlobalOrdinals(ords);
assertEquals(1, acceptedOrds.length());
assertFalse(acceptedOrds.get(0));
@@ -114,7 +128,7 @@ public long getValueCount() {
new TreeSet<>(Collections.singleton(new BytesRef("foo"))),
new TreeSet<>(Collections.singleton(new BytesRef("foo")))
);
- filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW);
+ filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, dummyIndexSettings);
acceptedOrds = filter.acceptedGlobalOrdinals(ords);
assertEquals(1, acceptedOrds.length());
assertFalse(acceptedOrds.get(0));
@@ -123,7 +137,7 @@ public long getValueCount() {
null, // means everything included
new TreeSet<>(Collections.singleton(new BytesRef("foo")))
);
- filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW);
+ filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, dummyIndexSettings);
acceptedOrds = filter.acceptedGlobalOrdinals(ords);
assertEquals(1, acceptedOrds.length());
assertFalse(acceptedOrds.get(0));
diff --git a/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java
index de0a31b9dc04b..eb7dde4b0b2ce 100644
--- a/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java
+++ b/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java
@@ -258,7 +258,8 @@ public void onRemoval(ShardId shardId, Accountable accountable) {
IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultQueryCache(),
IndexSearcher.getDefaultQueryCachingPolicy(),
- true
+ true,
+ null
);
for (LeafReaderContext context : searcher.getIndexReader().leaves()) {
diff --git a/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java b/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java
index afaab15e1431e..7f4dcdaed2aa1 100644
--- a/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java
+++ b/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java
@@ -32,8 +32,6 @@
package org.opensearch.search.profile.query;
-import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
-
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.StringField;
@@ -64,18 +62,12 @@
import org.opensearch.search.internal.ContextIndexSearcher;
import org.opensearch.search.profile.ProfileResult;
import org.opensearch.test.OpenSearchTestCase;
-import org.opensearch.threadpool.ThreadPool;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
-import java.util.Arrays;
-import java.util.Collection;
import java.util.List;
import java.util.Map;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
@@ -85,16 +77,6 @@ public class QueryProfilerTests extends OpenSearchTestCase {
private Directory dir;
private IndexReader reader;
private ContextIndexSearcher searcher;
- private ExecutorService executor;
-
- @ParametersFactory
- public static Collection concurrency() {
- return Arrays.asList(new Integer[] { 0 }, new Integer[] { 5 });
- }
-
- public QueryProfilerTests(int concurrency) {
- this.executor = (concurrency > 0) ? Executors.newFixedThreadPool(concurrency) : null;
- }
@Before
public void setUp() throws Exception {
@@ -120,7 +102,7 @@ public void setUp() throws Exception {
IndexSearcher.getDefaultQueryCache(),
ALWAYS_CACHE_POLICY,
true,
- executor
+ null
);
}
@@ -134,10 +116,6 @@ public void tearDown() throws Exception {
assertThat(cache.getTotalCount(), equalTo(cache.getMissCount()));
assertThat(cache.getCacheSize(), equalTo(0L));
- if (executor != null) {
- ThreadPool.terminate(executor, 10, TimeUnit.SECONDS);
- }
-
IOUtils.close(reader, dir);
dir = null;
reader = null;
@@ -145,7 +123,7 @@ public void tearDown() throws Exception {
}
public void testBasic() throws IOException {
- QueryProfiler profiler = new QueryProfiler(searcher.allowConcurrentSegmentSearch());
+ QueryProfiler profiler = new QueryProfiler(false);
searcher.setProfiler(profiler);
Query query = new TermQuery(new Term("foo", "bar"));
searcher.search(query, 1);
@@ -171,7 +149,7 @@ public void testBasic() throws IOException {
}
public void testNoScoring() throws IOException {
- QueryProfiler profiler = new QueryProfiler(searcher.allowConcurrentSegmentSearch());
+ QueryProfiler profiler = new QueryProfiler(false);
searcher.setProfiler(profiler);
Query query = new TermQuery(new Term("foo", "bar"));
searcher.search(query, 1, Sort.INDEXORDER); // scores are not needed
@@ -197,7 +175,7 @@ public void testNoScoring() throws IOException {
}
public void testUseIndexStats() throws IOException {
- QueryProfiler profiler = new QueryProfiler(searcher.allowConcurrentSegmentSearch());
+ QueryProfiler profiler = new QueryProfiler(false);
searcher.setProfiler(profiler);
Query query = new TermQuery(new Term("foo", "bar"));
searcher.count(query); // will use index stats
@@ -211,7 +189,7 @@ public void testUseIndexStats() throws IOException {
}
public void testApproximations() throws IOException {
- QueryProfiler profiler = new QueryProfiler(searcher.allowConcurrentSegmentSearch());
+ QueryProfiler profiler = new QueryProfiler(false);
searcher.setProfiler(profiler);
Query query = new RandomApproximationQuery(new TermQuery(new Term("foo", "bar")), random());
searcher.count(query);
diff --git a/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java
index b87c11dce5be2..1232347edea64 100644
--- a/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java
+++ b/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java
@@ -39,6 +39,7 @@
import org.apache.lucene.document.LatLonPoint;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
@@ -77,6 +78,7 @@
import org.apache.lucene.search.TotalHitCountCollector;
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.search.Weight;
+import org.apache.lucene.search.grouping.CollapseTopFieldDocs;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.store.Directory;
@@ -88,12 +90,15 @@
import org.opensearch.index.mapper.MappedFieldType;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.mapper.NumberFieldMapper;
+import org.opensearch.index.mapper.NumberFieldMapper.NumberFieldType;
+import org.opensearch.index.mapper.NumberFieldMapper.NumberType;
import org.opensearch.index.query.ParsedQuery;
import org.opensearch.index.query.QueryShardContext;
import org.opensearch.index.search.OpenSearchToParentBlockJoinQuery;
import org.opensearch.index.shard.IndexShard;
import org.opensearch.index.shard.IndexShardTestCase;
import org.opensearch.search.DocValueFormat;
+import org.opensearch.search.collapse.CollapseBuilder;
import org.opensearch.search.internal.ContextIndexSearcher;
import org.opensearch.search.internal.ScrollContext;
import org.opensearch.search.internal.SearchContext;
@@ -144,7 +149,7 @@ private void countTestCase(Query query, IndexReader reader, boolean shouldCollec
context.parsedQuery(new ParsedQuery(query));
context.setSize(0);
context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
- final boolean rescore = QueryPhase.executeInternal(context);
+ final boolean rescore = QueryPhase.executeInternal(context.withCleanQueryResult());
assertFalse(rescore);
ContextIndexSearcher countSearcher = shouldCollectCount
@@ -157,7 +162,7 @@ private void countTestCase(boolean withDeletions) throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE);
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
- final int numDocs = scaledRandomIntBetween(100, 200);
+ final int numDocs = scaledRandomIntBetween(600, 900);
for (int i = 0; i < numDocs; ++i) {
Document doc = new Document();
if (randomBoolean()) {
@@ -228,12 +233,12 @@ public void testPostFilterDisablesCountOptimization() throws Exception {
context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery()));
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value);
context.setSearcher(newContextSearcher(reader));
context.parsedPostFilter(new ParsedQuery(new MatchNoDocsQuery()));
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value);
reader.close();
dir.close();
@@ -261,7 +266,7 @@ public void testTerminateAfterWithFilter() throws Exception {
context.setSize(10);
for (int i = 0; i < 10; i++) {
context.parsedPostFilter(new ParsedQuery(new TermQuery(new Term("foo", Integer.toString(i)))));
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value);
assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1));
}
@@ -283,12 +288,13 @@ public void testMinScoreDisablesCountOptimization() throws Exception {
context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery()));
context.setSize(0);
context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value);
context.minimumScore(100);
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value);
+ assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation);
reader.close();
dir.close();
}
@@ -297,7 +303,7 @@ public void testQueryCapturesThreadPoolStats() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig();
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
- final int numDocs = scaledRandomIntBetween(100, 200);
+ final int numDocs = scaledRandomIntBetween(600, 900);
for (int i = 0; i < numDocs; ++i) {
w.addDocument(new Document());
}
@@ -307,7 +313,7 @@ public void testQueryCapturesThreadPoolStats() throws Exception {
context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery()));
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
QuerySearchResult results = context.queryResult();
assertThat(results.serviceTimeEWMA(), greaterThanOrEqualTo(0L));
assertThat(results.nodeQueueSize(), greaterThanOrEqualTo(0));
@@ -320,7 +326,7 @@ public void testInOrderScrollOptimization() throws Exception {
final Sort sort = new Sort(new SortField("rank", SortField.Type.INT));
IndexWriterConfig iwc = newIndexWriterConfig().setIndexSort(sort);
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
- final int numDocs = scaledRandomIntBetween(100, 200);
+ final int numDocs = scaledRandomIntBetween(600, 900);
for (int i = 0; i < numDocs; ++i) {
w.addDocument(new Document());
}
@@ -336,14 +342,14 @@ public void testInOrderScrollOptimization() throws Exception {
int size = randomIntBetween(2, 5);
context.setSize(size);
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs));
assertNull(context.queryResult().terminatedEarly());
assertThat(context.terminateAfter(), equalTo(0));
assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs));
context.setSearcher(newEarlyTerminationContextSearcher(reader, size));
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs));
assertThat(context.terminateAfter(), equalTo(size));
assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs));
@@ -356,7 +362,7 @@ public void testTerminateAfterEarlyTermination() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig();
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
- final int numDocs = scaledRandomIntBetween(100, 200);
+ final int numDocs = scaledRandomIntBetween(600, 900);
for (int i = 0; i < numDocs; ++i) {
Document doc = new Document();
if (randomBoolean()) {
@@ -377,25 +383,25 @@ public void testTerminateAfterEarlyTermination() throws Exception {
context.terminateAfter(numDocs);
{
context.setSize(10);
- TotalHitCountCollector collector = new TotalHitCountCollector();
- context.queryCollectors().put(TotalHitCountCollector.class, collector);
- QueryPhase.executeInternal(context);
+ final TestTotalHitCountCollectorManager manager = TestTotalHitCountCollectorManager.create();
+ context.queryCollectorManagers().put(TotalHitCountCollector.class, manager);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertFalse(context.queryResult().terminatedEarly());
assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs));
assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(10));
- assertThat(collector.getTotalHits(), equalTo(numDocs));
+ assertThat(manager.getTotalHits(), equalTo(numDocs));
}
context.terminateAfter(1);
{
context.setSize(1);
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertTrue(context.queryResult().terminatedEarly());
assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L));
assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1));
context.setSize(0);
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertTrue(context.queryResult().terminatedEarly());
assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L));
assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0));
@@ -403,7 +409,7 @@ public void testTerminateAfterEarlyTermination() throws Exception {
{
context.setSize(1);
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertTrue(context.queryResult().terminatedEarly());
assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L));
assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1));
@@ -414,38 +420,38 @@ public void testTerminateAfterEarlyTermination() throws Exception {
.add(new TermQuery(new Term("foo", "baz")), Occur.SHOULD)
.build();
context.parsedQuery(new ParsedQuery(bq));
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertTrue(context.queryResult().terminatedEarly());
assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L));
assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1));
context.setSize(0);
context.parsedQuery(new ParsedQuery(bq));
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertTrue(context.queryResult().terminatedEarly());
assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L));
assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0));
}
{
context.setSize(1);
- TotalHitCountCollector collector = new TotalHitCountCollector();
- context.queryCollectors().put(TotalHitCountCollector.class, collector);
- QueryPhase.executeInternal(context);
+ final TestTotalHitCountCollectorManager manager = TestTotalHitCountCollectorManager.create();
+ context.queryCollectorManagers().put(TotalHitCountCollector.class, manager);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertTrue(context.queryResult().terminatedEarly());
assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L));
assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1));
- assertThat(collector.getTotalHits(), equalTo(1));
- context.queryCollectors().clear();
+ assertThat(manager.getTotalHits(), equalTo(1));
+ context.queryCollectorManagers().clear();
}
{
context.setSize(0);
- TotalHitCountCollector collector = new TotalHitCountCollector();
- context.queryCollectors().put(TotalHitCountCollector.class, collector);
- QueryPhase.executeInternal(context);
+ final TestTotalHitCountCollectorManager manager = TestTotalHitCountCollectorManager.create();
+ context.queryCollectorManagers().put(TotalHitCountCollector.class, manager);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertTrue(context.queryResult().terminatedEarly());
assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L));
assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0));
- assertThat(collector.getTotalHits(), equalTo(1));
+ assertThat(manager.getTotalHits(), equalTo(1));
}
// tests with trackTotalHits and terminateAfter
@@ -453,9 +459,9 @@ public void testTerminateAfterEarlyTermination() throws Exception {
context.setSize(0);
for (int trackTotalHits : new int[] { -1, 3, 76, 100 }) {
context.trackTotalHitsUpTo(trackTotalHits);
- TotalHitCountCollector collector = new TotalHitCountCollector();
- context.queryCollectors().put(TotalHitCountCollector.class, collector);
- QueryPhase.executeInternal(context);
+ final TestTotalHitCountCollectorManager manager = TestTotalHitCountCollectorManager.create();
+ context.queryCollectorManagers().put(TotalHitCountCollector.class, manager);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertTrue(context.queryResult().terminatedEarly());
if (trackTotalHits == -1) {
assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L));
@@ -463,16 +469,14 @@ public void testTerminateAfterEarlyTermination() throws Exception {
assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) Math.min(trackTotalHits, 10)));
}
assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0));
- assertThat(collector.getTotalHits(), equalTo(10));
+ assertThat(manager.getTotalHits(), equalTo(10));
}
context.terminateAfter(7);
context.setSize(10);
for (int trackTotalHits : new int[] { -1, 3, 75, 100 }) {
context.trackTotalHitsUpTo(trackTotalHits);
- EarlyTerminatingCollector collector = new EarlyTerminatingCollector(new TotalHitCountCollector(), 1, false);
- context.queryCollectors().put(EarlyTerminatingCollector.class, collector);
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertTrue(context.queryResult().terminatedEarly());
if (trackTotalHits == -1) {
assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L));
@@ -490,7 +494,7 @@ public void testIndexSortingEarlyTermination() throws Exception {
final Sort sort = new Sort(new SortField("rank", SortField.Type.INT));
IndexWriterConfig iwc = newIndexWriterConfig().setIndexSort(sort);
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
- final int numDocs = scaledRandomIntBetween(100, 200);
+ final int numDocs = scaledRandomIntBetween(600, 900);
for (int i = 0; i < numDocs; ++i) {
Document doc = new Document();
if (randomBoolean()) {
@@ -511,7 +515,7 @@ public void testIndexSortingEarlyTermination() throws Exception {
context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
context.sort(new SortAndFormats(sort, new DocValueFormat[] { DocValueFormat.RAW }));
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs));
assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1));
assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class));
@@ -520,7 +524,7 @@ public void testIndexSortingEarlyTermination() throws Exception {
{
context.parsedPostFilter(new ParsedQuery(new MinDocQuery(1)));
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertNull(context.queryResult().terminatedEarly());
assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(numDocs - 1L));
assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1));
@@ -528,28 +532,28 @@ public void testIndexSortingEarlyTermination() throws Exception {
assertThat(fieldDoc.fields[0], anyOf(equalTo(1), equalTo(2)));
context.parsedPostFilter(null);
- final TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
- context.queryCollectors().put(TotalHitCountCollector.class, totalHitCountCollector);
- QueryPhase.executeInternal(context);
+ final TestTotalHitCountCollectorManager manager = TestTotalHitCountCollectorManager.create(sort);
+ context.queryCollectorManagers().put(TotalHitCountCollector.class, manager);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertNull(context.queryResult().terminatedEarly());
assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs));
assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1));
assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class));
assertThat(fieldDoc.fields[0], anyOf(equalTo(1), equalTo(2)));
- assertThat(totalHitCountCollector.getTotalHits(), equalTo(numDocs));
- context.queryCollectors().clear();
+ assertThat(manager.getTotalHits(), equalTo(numDocs));
+ context.queryCollectorManagers().clear();
}
{
context.setSearcher(newEarlyTerminationContextSearcher(reader, 1));
context.trackTotalHitsUpTo(SearchContext.TRACK_TOTAL_HITS_DISABLED);
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertNull(context.queryResult().terminatedEarly());
assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1));
assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class));
assertThat(fieldDoc.fields[0], anyOf(equalTo(1), equalTo(2)));
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertNull(context.queryResult().terminatedEarly());
assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1));
assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class));
@@ -564,7 +568,7 @@ public void testIndexSortScrollOptimization() throws Exception {
final Sort indexSort = new Sort(new SortField("rank", SortField.Type.INT), new SortField("tiebreaker", SortField.Type.INT));
IndexWriterConfig iwc = newIndexWriterConfig().setIndexSort(indexSort);
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
- final int numDocs = scaledRandomIntBetween(100, 200);
+ final int numDocs = scaledRandomIntBetween(600, 900);
for (int i = 0; i < numDocs; ++i) {
Document doc = new Document();
doc.add(new NumericDocValuesField("rank", random().nextInt()));
@@ -592,7 +596,7 @@ public void testIndexSortScrollOptimization() throws Exception {
context.setSize(10);
context.sort(searchSortAndFormat);
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs));
assertNull(context.queryResult().terminatedEarly());
assertThat(context.terminateAfter(), equalTo(0));
@@ -601,7 +605,7 @@ public void testIndexSortScrollOptimization() throws Exception {
FieldDoc lastDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[sizeMinus1];
context.setSearcher(newEarlyTerminationContextSearcher(reader, 10));
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertNull(context.queryResult().terminatedEarly());
assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs));
assertThat(context.terminateAfter(), equalTo(0));
@@ -630,7 +634,8 @@ public void testDisableTopScoreCollection() throws Exception {
IndexWriterConfig iwc = newIndexWriterConfig(new StandardAnalyzer());
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
Document doc = new Document();
- for (int i = 0; i < 10; i++) {
+ final int numDocs = 2 * scaledRandomIntBetween(50, 450);
+ for (int i = 0; i < numDocs; i++) {
doc.clear();
if (i % 2 == 0) {
doc.add(new TextField("title", "foo bar", Store.NO));
@@ -653,16 +658,16 @@ public void testDisableTopScoreCollection() throws Exception {
context.trackTotalHitsUpTo(3);
TopDocsCollectorContext topDocsContext = TopDocsCollectorContext.createTopDocsCollectorContext(context, false);
assertEquals(topDocsContext.create(null).scoreMode(), org.apache.lucene.search.ScoreMode.COMPLETE);
- QueryPhase.executeInternal(context);
- assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
+ assertEquals(numDocs / 2, context.queryResult().topDocs().topDocs.totalHits.value);
assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation, TotalHits.Relation.EQUAL_TO);
assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(3));
context.sort(new SortAndFormats(new Sort(new SortField("other", SortField.Type.INT)), new DocValueFormat[] { DocValueFormat.RAW }));
topDocsContext = TopDocsCollectorContext.createTopDocsCollectorContext(context, false);
assertEquals(topDocsContext.create(null).scoreMode(), org.apache.lucene.search.ScoreMode.TOP_DOCS);
- QueryPhase.executeInternal(context);
- assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
+ assertEquals(numDocs / 2, context.queryResult().topDocs().topDocs.totalHits.value);
assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(3));
assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO);
@@ -724,7 +729,7 @@ public void testEnhanceSortOnNumeric() throws Exception {
searchContext.parsedQuery(query);
searchContext.setTask(task);
searchContext.setSize(10);
- QueryPhase.executeInternal(searchContext);
+ QueryPhase.executeInternal(searchContext.withCleanQueryResult());
assertSortResults(searchContext.queryResult().topDocs().topDocs, (long) numDocs, false);
}
@@ -736,7 +741,7 @@ public void testEnhanceSortOnNumeric() throws Exception {
searchContext.parsedQuery(query);
searchContext.setTask(task);
searchContext.setSize(10);
- QueryPhase.executeInternal(searchContext);
+ QueryPhase.executeInternal(searchContext.withCleanQueryResult());
assertSortResults(searchContext.queryResult().topDocs().topDocs, (long) numDocs, true);
}
@@ -748,7 +753,7 @@ public void testEnhanceSortOnNumeric() throws Exception {
searchContext.parsedQuery(query);
searchContext.setTask(task);
searchContext.setSize(10);
- QueryPhase.executeInternal(searchContext);
+ QueryPhase.executeInternal(searchContext.withCleanQueryResult());
assertSortResults(searchContext.queryResult().topDocs().topDocs, (long) numDocs, false);
}
@@ -773,7 +778,7 @@ public void testEnhanceSortOnNumeric() throws Exception {
searchContext.setTask(task);
searchContext.from(5);
searchContext.setSize(0);
- QueryPhase.executeInternal(searchContext);
+ QueryPhase.executeInternal(searchContext.withCleanQueryResult());
assertSortResults(searchContext.queryResult().topDocs().topDocs, (long) numDocs, false);
}
@@ -800,11 +805,15 @@ public void testEnhanceSortOnNumeric() throws Exception {
searchContext.parsedQuery(query);
searchContext.setTask(task);
searchContext.setSize(10);
- QueryPhase.executeInternal(searchContext);
+ QueryPhase.executeInternal(searchContext.withCleanQueryResult());
final TopDocs topDocs = searchContext.queryResult().topDocs().topDocs;
long topValue = (long) ((FieldDoc) topDocs.scoreDocs[0]).fields[0];
assertThat(topValue, greaterThan(afterValue));
assertSortResults(topDocs, (long) numDocs, false);
+
+ final TotalHits totalHits = topDocs.totalHits;
+ assertEquals(TotalHits.Relation.EQUAL_TO, totalHits.relation);
+ assertEquals(numDocs, totalHits.value);
}
reader.close();
@@ -916,13 +925,133 @@ public void testMinScore() throws Exception {
context.setSize(1);
context.trackTotalHitsUpTo(5);
- QueryPhase.executeInternal(context);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
assertEquals(10, context.queryResult().topDocs().topDocs.totalHits.value);
reader.close();
dir.close();
}
+ public void testMaxScore() throws Exception {
+ Directory dir = newDirectory();
+ final Sort sort = new Sort(new SortField("filter", SortField.Type.STRING));
+ IndexWriterConfig iwc = newIndexWriterConfig().setIndexSort(sort);
+ RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+
+ final int numDocs = scaledRandomIntBetween(600, 900);
+ for (int i = 0; i < numDocs; i++) {
+ Document doc = new Document();
+ doc.add(new StringField("foo", "bar", Store.NO));
+ doc.add(new StringField("filter", "f1" + ((i > 0) ? " " + Integer.toString(i) : ""), Store.NO));
+ doc.add(new SortedDocValuesField("filter", newBytesRef("f1" + ((i > 0) ? " " + Integer.toString(i) : ""))));
+ w.addDocument(doc);
+ }
+ w.close();
+
+ IndexReader reader = DirectoryReader.open(dir);
+ TestSearchContext context = new TestSearchContext(null, indexShard, newContextSearcher(reader));
+ context.trackScores(true);
+ context.parsedQuery(
+ new ParsedQuery(
+ new BooleanQuery.Builder().add(new TermQuery(new Term("foo", "bar")), Occur.MUST)
+ .add(new TermQuery(new Term("filter", "f1")), Occur.SHOULD)
+ .build()
+ )
+ );
+ context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
+ context.setSize(1);
+ context.trackTotalHitsUpTo(5);
+
+ QueryPhase.executeInternal(context.withCleanQueryResult());
+ assertFalse(Float.isNaN(context.queryResult().getMaxScore()));
+ assertEquals(1, context.queryResult().topDocs().topDocs.scoreDocs.length);
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, greaterThanOrEqualTo(6L));
+
+ context.sort(new SortAndFormats(sort, new DocValueFormat[] { DocValueFormat.RAW }));
+ QueryPhase.executeInternal(context.withCleanQueryResult());
+ assertFalse(Float.isNaN(context.queryResult().getMaxScore()));
+ assertEquals(1, context.queryResult().topDocs().topDocs.scoreDocs.length);
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, greaterThanOrEqualTo(6L));
+
+ context.trackScores(false);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
+ assertTrue(Float.isNaN(context.queryResult().getMaxScore()));
+ assertEquals(1, context.queryResult().topDocs().topDocs.scoreDocs.length);
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, greaterThanOrEqualTo(6L));
+
+ reader.close();
+ dir.close();
+ }
+
+ public void testCollapseQuerySearchResults() throws Exception {
+ Directory dir = newDirectory();
+ final Sort sort = new Sort(new SortField("user", SortField.Type.INT));
+ IndexWriterConfig iwc = newIndexWriterConfig().setIndexSort(sort);
+ RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+
+ // Always end up with uneven buckets so collapsing is predictable
+ final int numDocs = 2 * scaledRandomIntBetween(600, 900) - 1;
+ for (int i = 0; i < numDocs; i++) {
+ Document doc = new Document();
+ doc.add(new StringField("foo", "bar", Store.NO));
+ doc.add(new NumericDocValuesField("user", i & 1));
+ w.addDocument(doc);
+ }
+ w.close();
+
+ IndexReader reader = DirectoryReader.open(dir);
+ QueryShardContext queryShardContext = mock(QueryShardContext.class);
+ when(queryShardContext.fieldMapper("user")).thenReturn(
+ new NumberFieldType("user", NumberType.INTEGER, true, false, true, false, null, Collections.emptyMap())
+ );
+
+ TestSearchContext context = new TestSearchContext(queryShardContext, indexShard, newContextSearcher(reader));
+ context.collapse(new CollapseBuilder("user").build(context.getQueryShardContext()));
+ context.trackScores(true);
+ context.parsedQuery(new ParsedQuery(new TermQuery(new Term("foo", "bar"))));
+ context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
+ context.setSize(2);
+ context.trackTotalHitsUpTo(5);
+
+ QueryPhase.executeInternal(context.withCleanQueryResult());
+ assertFalse(Float.isNaN(context.queryResult().getMaxScore()));
+ assertEquals(2, context.queryResult().topDocs().topDocs.scoreDocs.length);
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs));
+ assertThat(context.queryResult().topDocs().topDocs, instanceOf(CollapseTopFieldDocs.class));
+
+ CollapseTopFieldDocs topDocs = (CollapseTopFieldDocs) context.queryResult().topDocs().topDocs;
+ assertThat(topDocs.collapseValues.length, equalTo(2));
+ assertThat(topDocs.collapseValues[0], equalTo(0L)); // user == 0
+ assertThat(topDocs.collapseValues[1], equalTo(1L)); // user == 1
+
+ context.sort(new SortAndFormats(sort, new DocValueFormat[] { DocValueFormat.RAW }));
+ QueryPhase.executeInternal(context.withCleanQueryResult());
+ assertFalse(Float.isNaN(context.queryResult().getMaxScore()));
+ assertEquals(2, context.queryResult().topDocs().topDocs.scoreDocs.length);
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs));
+ assertThat(context.queryResult().topDocs().topDocs, instanceOf(CollapseTopFieldDocs.class));
+
+ topDocs = (CollapseTopFieldDocs) context.queryResult().topDocs().topDocs;
+ assertThat(topDocs.collapseValues.length, equalTo(2));
+ assertThat(topDocs.collapseValues[0], equalTo(0L)); // user == 0
+ assertThat(topDocs.collapseValues[1], equalTo(1L)); // user == 1
+
+ context.trackScores(false);
+ QueryPhase.executeInternal(context.withCleanQueryResult());
+ assertTrue(Float.isNaN(context.queryResult().getMaxScore()));
+ assertEquals(2, context.queryResult().topDocs().topDocs.scoreDocs.length);
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs));
+ assertThat(context.queryResult().topDocs().topDocs, instanceOf(CollapseTopFieldDocs.class));
+
+ topDocs = (CollapseTopFieldDocs) context.queryResult().topDocs().topDocs;
+ assertThat(topDocs.collapseValues.length, equalTo(2));
+ assertThat(topDocs.collapseValues[0], equalTo(0L)); // user == 0
+ assertThat(topDocs.collapseValues[1], equalTo(1L)); // user == 1
+
+ reader.close();
+ dir.close();
+ }
+
public void testCancellationDuringPreprocess() throws IOException {
try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig())) {
@@ -982,7 +1111,8 @@ private static ContextIndexSearcher newContextSearcher(IndexReader reader) throw
IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultQueryCache(),
IndexSearcher.getDefaultQueryCachingPolicy(),
- true
+ true,
+ null
);
}
@@ -992,7 +1122,8 @@ private static ContextIndexSearcher newEarlyTerminationContextSearcher(IndexRead
IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultQueryCache(),
IndexSearcher.getDefaultQueryCachingPolicy(),
- true
+ true,
+ null
) {
@Override
@@ -1003,6 +1134,32 @@ public void search(List leaves, Weight weight, Collector coll
};
}
+ private static class TestTotalHitCountCollectorManager extends TotalHitCountCollectorManager {
+ private final TotalHitCountCollector collector;
+
+ static TestTotalHitCountCollectorManager create() {
+ return create(null);
+ }
+
+ static TestTotalHitCountCollectorManager create(final Sort sort) {
+ return new TestTotalHitCountCollectorManager(new TotalHitCountCollector(), sort);
+ }
+
+ private TestTotalHitCountCollectorManager(final TotalHitCountCollector collector, final Sort sort) {
+ super(sort);
+ this.collector = collector;
+ }
+
+ @Override
+ public TotalHitCountCollector newCollector() throws IOException {
+ return collector;
+ }
+
+ public int getTotalHits() {
+ return collector.getTotalHits();
+ }
+ }
+
private static class AssertingEarlyTerminationFilterCollector extends FilterCollector {
private final int size;
diff --git a/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java b/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java
new file mode 100644
index 0000000000000..dfa41edb5cff2
--- /dev/null
+++ b/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java
@@ -0,0 +1,1158 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.query;
+
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.SortedDocValuesField;
+import org.apache.lucene.document.StringField;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.queries.spans.SpanNearQuery;
+import org.apache.lucene.queries.spans.SpanTermQuery;
+import org.apache.lucene.search.BooleanClause.Occur;
+import org.apache.lucene.search.grouping.CollapseTopFieldDocs;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.FieldComparator;
+import org.apache.lucene.search.FieldDoc;
+import org.apache.lucene.search.FilterCollector;
+import org.apache.lucene.search.FilterLeafCollector;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.LeafCollector;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.MatchNoDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TotalHits;
+import org.apache.lucene.search.Weight;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.tests.index.RandomIndexWriter;
+import org.opensearch.action.search.SearchShardTask;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.xcontent.ToXContent;
+import org.opensearch.common.xcontent.XContentBuilder;
+import org.opensearch.common.xcontent.json.JsonXContent;
+import org.opensearch.index.mapper.NumberFieldMapper.NumberFieldType;
+import org.opensearch.index.mapper.NumberFieldMapper.NumberType;
+import org.opensearch.index.query.ParsedQuery;
+import org.opensearch.index.query.QueryShardContext;
+import org.opensearch.index.shard.IndexShard;
+import org.opensearch.index.shard.IndexShardTestCase;
+import org.opensearch.lucene.queries.MinDocQuery;
+import org.opensearch.search.DocValueFormat;
+import org.opensearch.search.collapse.CollapseBuilder;
+import org.opensearch.search.internal.ContextIndexSearcher;
+import org.opensearch.search.internal.ScrollContext;
+import org.opensearch.search.internal.SearchContext;
+import org.opensearch.search.profile.ProfileResult;
+import org.opensearch.search.profile.ProfileShardResult;
+import org.opensearch.search.profile.SearchProfileShardResults;
+import org.opensearch.search.profile.query.CollectorResult;
+import org.opensearch.search.profile.query.QueryProfileShardResult;
+import org.opensearch.search.sort.SortAndFormats;
+import org.opensearch.test.TestSearchContext;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.function.Consumer;
+
+import static org.hamcrest.CoreMatchers.not;
+import static org.hamcrest.CoreMatchers.nullValue;
+import static org.hamcrest.Matchers.anyOf;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.empty;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.greaterThanOrEqualTo;
+import static org.hamcrest.Matchers.instanceOf;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+import static org.hamcrest.Matchers.hasSize;
+
+public class QueryProfilePhaseTests extends IndexShardTestCase {
+
+ private IndexShard indexShard;
+
+ @Override
+ public Settings threadPoolSettings() {
+ return Settings.builder().put(super.threadPoolSettings()).put("thread_pool.search.min_queue_size", 10).build();
+ }
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ indexShard = newShard(true);
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+ super.tearDown();
+ closeShards(indexShard);
+ }
+
+ public void testPostFilterDisablesCountOptimization() throws Exception {
+ Directory dir = newDirectory();
+ final Sort sort = new Sort(new SortField("rank", SortField.Type.INT));
+ IndexWriterConfig iwc = newIndexWriterConfig().setIndexSort(sort);
+ RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+ Document doc = new Document();
+ w.addDocument(doc);
+ w.close();
+
+ IndexReader reader = DirectoryReader.open(dir);
+
+ TestSearchContext context = new TestSearchContext(null, indexShard, newEarlyTerminationContextSearcher(reader, 0));
+ context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
+ context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery()));
+
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value);
+ assertProfileData(context, "MatchAllDocsQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_count"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), empty());
+ });
+
+ context.setSearcher(newContextSearcher(reader));
+ context.parsedPostFilter(new ParsedQuery(new MatchNoDocsQuery()));
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value);
+ assertProfileData(context, collector -> {
+ assertThat(collector.getReason(), equalTo("search_post_filter"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), hasSize(1));
+ assertThat(collector.getProfiledChildren().get(0).getReason(), equalTo("search_count"));
+ assertThat(collector.getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ }, (query) -> {
+ assertThat(query.getQueryName(), equalTo("MatchNoDocsQuery"));
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, (query) -> {
+ assertThat(query.getQueryName(), equalTo("MatchAllDocsQuery"));
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ });
+
+ reader.close();
+ dir.close();
+ }
+
+ public void testTerminateAfterWithFilter() throws Exception {
+ Directory dir = newDirectory();
+ final Sort sort = new Sort(new SortField("rank", SortField.Type.INT));
+ IndexWriterConfig iwc = newIndexWriterConfig().setIndexSort(sort);
+ RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+ Document doc = new Document();
+ for (int i = 0; i < 10; i++) {
+ doc.add(new StringField("foo", Integer.toString(i), Store.NO));
+ }
+ w.addDocument(doc);
+ w.close();
+
+ IndexReader reader = DirectoryReader.open(dir);
+
+ TestSearchContext context = new TestSearchContext(null, indexShard, newContextSearcher(reader));
+ context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
+
+ context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery()));
+ context.terminateAfter(1);
+ context.setSize(10);
+ for (int i = 0; i < 10; i++) {
+ context.parsedPostFilter(new ParsedQuery(new TermQuery(new Term("foo", Integer.toString(i)))));
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value);
+ assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1));
+ assertProfileData(context, collector -> {
+ assertThat(collector.getReason(), equalTo("search_post_filter"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), hasSize(1));
+ assertThat(collector.getProfiledChildren().get(0).getReason(), equalTo("search_terminate_after_count"));
+ assertThat(collector.getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren().get(0).getProfiledChildren(), hasSize(1));
+ assertThat(collector.getProfiledChildren().get(0).getProfiledChildren().get(0).getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getProfiledChildren().get(0).getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ }, (query) -> {
+ assertThat(query.getQueryName(), equalTo("TermQuery"));
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, (query) -> {
+ assertThat(query.getQueryName(), equalTo("MatchAllDocsQuery"));
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), equalTo(1L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ });
+ }
+ reader.close();
+ dir.close();
+ }
+
+ public void testMinScoreDisablesCountOptimization() throws Exception {
+ Directory dir = newDirectory();
+ final Sort sort = new Sort(new SortField("rank", SortField.Type.INT));
+ IndexWriterConfig iwc = newIndexWriterConfig().setIndexSort(sort);
+ RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+ Document doc = new Document();
+ w.addDocument(doc);
+ w.close();
+
+ IndexReader reader = DirectoryReader.open(dir);
+ TestSearchContext context = new TestSearchContext(null, indexShard, newEarlyTerminationContextSearcher(reader, 0));
+ context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery()));
+ context.setSize(0);
+ context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value);
+ assertProfileData(context, "MatchAllDocsQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_count"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), empty());
+ });
+
+ context.minimumScore(100);
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value);
+ assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation);
+ assertProfileData(context, "MatchAllDocsQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), greaterThanOrEqualTo(100L));
+ assertThat(query.getTimeBreakdown().get("score_count"), equalTo(1L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_min_score"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), hasSize(1));
+ assertThat(collector.getProfiledChildren().get(0).getReason(), equalTo("search_count"));
+ assertThat(collector.getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ });
+
+ reader.close();
+ dir.close();
+ }
+
+ public void testInOrderScrollOptimization() throws Exception {
+ Directory dir = newDirectory();
+ final Sort sort = new Sort(new SortField("rank", SortField.Type.INT));
+ IndexWriterConfig iwc = newIndexWriterConfig().setIndexSort(sort);
+ RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+ final int numDocs = scaledRandomIntBetween(600, 900);
+ for (int i = 0; i < numDocs; ++i) {
+ w.addDocument(new Document());
+ }
+ w.close();
+ IndexReader reader = DirectoryReader.open(dir);
+ ScrollContext scrollContext = new ScrollContext();
+ TestSearchContext context = new TestSearchContext(null, indexShard, newContextSearcher(reader), scrollContext);
+ context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery()));
+ scrollContext.lastEmittedDoc = null;
+ scrollContext.maxScore = Float.NaN;
+ scrollContext.totalHits = null;
+ context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
+ int size = randomIntBetween(2, 5);
+ context.setSize(size);
+
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs));
+ assertNull(context.queryResult().terminatedEarly());
+ assertThat(context.terminateAfter(), equalTo(0));
+ assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs));
+ assertProfileData(context, "MatchAllDocsQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), empty());
+ });
+
+ context.setSearcher(newEarlyTerminationContextSearcher(reader, size));
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs));
+ assertThat(context.terminateAfter(), equalTo(size));
+ assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs));
+ assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0].doc, greaterThanOrEqualTo(size));
+ assertProfileData(context, "ConstantScoreQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score"), equalTo(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score_count"), equalTo(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_terminate_after_count"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), hasSize(1));
+ assertThat(collector.getProfiledChildren().get(0).getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ });
+
+ reader.close();
+ dir.close();
+ }
+
+ public void testTerminateAfterEarlyTermination() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriterConfig iwc = newIndexWriterConfig();
+ RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+ final int numDocs = scaledRandomIntBetween(600, 900);
+ for (int i = 0; i < numDocs; ++i) {
+ Document doc = new Document();
+ if (randomBoolean()) {
+ doc.add(new StringField("foo", "bar", Store.NO));
+ }
+ if (randomBoolean()) {
+ doc.add(new StringField("foo", "baz", Store.NO));
+ }
+ doc.add(new NumericDocValuesField("rank", numDocs - i));
+ w.addDocument(doc);
+ }
+ w.close();
+ final IndexReader reader = DirectoryReader.open(dir);
+ TestSearchContext context = new TestSearchContext(null, indexShard, newContextSearcher(reader));
+ context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
+ context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery()));
+
+ context.terminateAfter(1);
+ {
+ context.setSize(1);
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertTrue(context.queryResult().terminatedEarly());
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L));
+ assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1));
+ assertProfileData(context, "MatchAllDocsQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_terminate_after_count"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), hasSize(1));
+ assertThat(collector.getProfiledChildren().get(0).getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ });
+
+ context.setSize(0);
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertTrue(context.queryResult().terminatedEarly());
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L));
+ assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0));
+ assertProfileData(context, "MatchAllDocsQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_terminate_after_count"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), hasSize(1));
+ assertThat(collector.getProfiledChildren().get(0).getReason(), equalTo("search_count"));
+ assertThat(collector.getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ });
+ }
+
+ {
+ context.setSize(1);
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertTrue(context.queryResult().terminatedEarly());
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L));
+ assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1));
+ assertProfileData(context, "MatchAllDocsQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_terminate_after_count"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), hasSize(1));
+ assertThat(collector.getProfiledChildren().get(0).getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ });
+ }
+ {
+ context.setSize(1);
+ BooleanQuery bq = new BooleanQuery.Builder().add(new TermQuery(new Term("foo", "bar")), Occur.SHOULD)
+ .add(new TermQuery(new Term("foo", "baz")), Occur.SHOULD)
+ .build();
+ context.parsedQuery(new ParsedQuery(bq));
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertTrue(context.queryResult().terminatedEarly());
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L));
+ assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1));
+ assertProfileData(context, "BooleanQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+
+ assertThat(query.getProfiledChildren(), hasSize(2));
+ assertThat(query.getProfiledChildren().get(0).getQueryName(), equalTo("TermQuery"));
+ assertThat(query.getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+
+ assertThat(query.getProfiledChildren().get(1).getQueryName(), equalTo("TermQuery"));
+ assertThat(query.getProfiledChildren().get(1).getTime(), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_terminate_after_count"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), hasSize(1));
+ assertThat(collector.getProfiledChildren().get(0).getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ });
+ context.setSize(0);
+ context.parsedQuery(new ParsedQuery(bq));
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertTrue(context.queryResult().terminatedEarly());
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L));
+ assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0));
+
+ assertProfileData(context, "BooleanQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+
+ assertThat(query.getProfiledChildren(), hasSize(2));
+ assertThat(query.getProfiledChildren().get(0).getQueryName(), equalTo("TermQuery"));
+ assertThat(query.getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score"), equalTo(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score_count"), equalTo(0L));
+
+ assertThat(query.getProfiledChildren().get(1).getQueryName(), equalTo("TermQuery"));
+ assertThat(query.getProfiledChildren().get(1).getTime(), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("score"), equalTo(0L));
+ assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("score_count"), equalTo(0L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_terminate_after_count"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), hasSize(1));
+ assertThat(collector.getProfiledChildren().get(0).getReason(), equalTo("search_count"));
+ assertThat(collector.getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ });
+ }
+
+ context.terminateAfter(7);
+ context.setSize(10);
+ for (int trackTotalHits : new int[] { -1, 3, 75, 100 }) {
+ context.trackTotalHitsUpTo(trackTotalHits);
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertTrue(context.queryResult().terminatedEarly());
+ if (trackTotalHits == -1) {
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L));
+ } else {
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(7L));
+ }
+ assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7));
+ assertProfileData(context, "BooleanQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(7L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+
+ assertThat(query.getProfiledChildren(), hasSize(2));
+ assertThat(query.getProfiledChildren().get(0).getQueryName(), equalTo("TermQuery"));
+ assertThat(query.getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score"), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score_count"), greaterThan(0L));
+
+ assertThat(query.getProfiledChildren().get(1).getQueryName(), equalTo("TermQuery"));
+ assertThat(query.getProfiledChildren().get(1).getTime(), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("score"), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("score_count"), greaterThan(0L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_terminate_after_count"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), hasSize(1));
+ assertThat(collector.getProfiledChildren().get(0).getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ });
+ }
+
+ reader.close();
+ dir.close();
+ }
+
+ public void testIndexSortingEarlyTermination() throws Exception {
+ Directory dir = newDirectory();
+ final Sort sort = new Sort(new SortField("rank", SortField.Type.INT));
+ IndexWriterConfig iwc = newIndexWriterConfig().setIndexSort(sort);
+ RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+ final int numDocs = scaledRandomIntBetween(600, 900);
+ for (int i = 0; i < numDocs; ++i) {
+ Document doc = new Document();
+ if (randomBoolean()) {
+ doc.add(new StringField("foo", "bar", Store.NO));
+ }
+ if (randomBoolean()) {
+ doc.add(new StringField("foo", "baz", Store.NO));
+ }
+ doc.add(new NumericDocValuesField("rank", numDocs - i));
+ w.addDocument(doc);
+ }
+ w.close();
+
+ final IndexReader reader = DirectoryReader.open(dir);
+ TestSearchContext context = new TestSearchContext(null, indexShard, newContextSearcher(reader));
+ context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery()));
+ context.setSize(1);
+ context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
+ context.sort(new SortAndFormats(sort, new DocValueFormat[] { DocValueFormat.RAW }));
+
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs));
+ assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1));
+ assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class));
+ FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0];
+ assertThat(fieldDoc.fields[0], equalTo(1));
+ assertProfileData(context, "MatchAllDocsQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), empty());
+ });
+
+ {
+ context.parsedPostFilter(new ParsedQuery(new MinDocQuery(1)));
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertNull(context.queryResult().terminatedEarly());
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(numDocs - 1L));
+ assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1));
+ assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class));
+ assertThat(fieldDoc.fields[0], anyOf(equalTo(1), equalTo(2)));
+ assertProfileData(context, collector -> {
+ assertThat(collector.getReason(), equalTo("search_post_filter"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), hasSize(1));
+ assertThat(collector.getProfiledChildren().get(0).getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ }, (query) -> {
+ assertThat(query.getQueryName(), equalTo("MinDocQuery"));
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, (query) -> {
+ assertThat(query.getQueryName(), equalTo("MatchAllDocsQuery"));
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ });
+ context.parsedPostFilter(null);
+ }
+
+ {
+ context.setSearcher(newEarlyTerminationContextSearcher(reader, 1));
+ context.trackTotalHitsUpTo(SearchContext.TRACK_TOTAL_HITS_DISABLED);
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertNull(context.queryResult().terminatedEarly());
+ assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1));
+ assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class));
+ assertThat(fieldDoc.fields[0], anyOf(equalTo(1), equalTo(2)));
+ assertProfileData(context, "MatchAllDocsQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), empty());
+ });
+
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertNull(context.queryResult().terminatedEarly());
+ assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1));
+ assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class));
+ assertThat(fieldDoc.fields[0], anyOf(equalTo(1), equalTo(2)));
+ assertProfileData(context, "MatchAllDocsQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), empty());
+ });
+ }
+
+ reader.close();
+ dir.close();
+ }
+
+ public void testIndexSortScrollOptimization() throws Exception {
+ Directory dir = newDirectory();
+ final Sort indexSort = new Sort(new SortField("rank", SortField.Type.INT), new SortField("tiebreaker", SortField.Type.INT));
+ IndexWriterConfig iwc = newIndexWriterConfig().setIndexSort(indexSort);
+ RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+ final int numDocs = scaledRandomIntBetween(600, 900);
+ for (int i = 0; i < numDocs; ++i) {
+ Document doc = new Document();
+ doc.add(new NumericDocValuesField("rank", random().nextInt()));
+ doc.add(new NumericDocValuesField("tiebreaker", i));
+ w.addDocument(doc);
+ }
+ if (randomBoolean()) {
+ w.forceMerge(randomIntBetween(1, 10));
+ }
+ w.close();
+
+ final IndexReader reader = DirectoryReader.open(dir);
+ List searchSortAndFormats = new ArrayList<>();
+ searchSortAndFormats.add(new SortAndFormats(indexSort, new DocValueFormat[] { DocValueFormat.RAW, DocValueFormat.RAW }));
+ // search sort is a prefix of the index sort
+ searchSortAndFormats.add(new SortAndFormats(new Sort(indexSort.getSort()[0]), new DocValueFormat[] { DocValueFormat.RAW }));
+ for (SortAndFormats searchSortAndFormat : searchSortAndFormats) {
+ ScrollContext scrollContext = new ScrollContext();
+ TestSearchContext context = new TestSearchContext(null, indexShard, newContextSearcher(reader), scrollContext);
+ context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery()));
+ scrollContext.lastEmittedDoc = null;
+ scrollContext.maxScore = Float.NaN;
+ scrollContext.totalHits = null;
+ context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
+ context.setSize(10);
+ context.sort(searchSortAndFormat);
+
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs));
+ assertNull(context.queryResult().terminatedEarly());
+ assertThat(context.terminateAfter(), equalTo(0));
+ assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs));
+ assertProfileData(context, "MatchAllDocsQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), empty());
+ });
+
+ int sizeMinus1 = context.queryResult().topDocs().topDocs.scoreDocs.length - 1;
+ FieldDoc lastDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[sizeMinus1];
+
+ context.setSearcher(newEarlyTerminationContextSearcher(reader, 10));
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertNull(context.queryResult().terminatedEarly());
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs));
+ assertThat(context.terminateAfter(), equalTo(0));
+ assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs));
+ assertProfileData(context, "ConstantScoreQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+
+ assertThat(query.getProfiledChildren(), hasSize(1));
+ assertThat(query.getProfiledChildren().get(0).getQueryName(), equalTo("SearchAfterSortedDocQuery"));
+ assertThat(query.getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score"), equalTo(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score_count"), equalTo(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), empty());
+ });
+ FieldDoc firstDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0];
+ for (int i = 0; i < searchSortAndFormat.sort.getSort().length; i++) {
+ @SuppressWarnings("unchecked")
+ FieldComparator comparator = (FieldComparator) searchSortAndFormat.sort.getSort()[i].getComparator(
+ i,
+ false
+ );
+ int cmp = comparator.compareValues(firstDoc.fields[i], lastDoc.fields[i]);
+ if (cmp == 0) {
+ continue;
+ }
+ assertThat(cmp, equalTo(1));
+ break;
+ }
+ }
+ reader.close();
+ dir.close();
+ }
+
+ public void testDisableTopScoreCollection() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriterConfig iwc = newIndexWriterConfig(new StandardAnalyzer());
+ RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+ Document doc = new Document();
+ final int numDocs = 2 * scaledRandomIntBetween(50, 450);
+ for (int i = 0; i < numDocs; i++) {
+ doc.clear();
+ if (i % 2 == 0) {
+ doc.add(new TextField("title", "foo bar", Store.NO));
+ } else {
+ doc.add(new TextField("title", "foo", Store.NO));
+ }
+ w.addDocument(doc);
+ }
+ w.close();
+
+ IndexReader reader = DirectoryReader.open(dir);
+ TestSearchContext context = new TestSearchContext(null, indexShard, newContextSearcher(reader));
+ context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
+ Query q = new SpanNearQuery.Builder("title", true).addClause(new SpanTermQuery(new Term("title", "foo")))
+ .addClause(new SpanTermQuery(new Term("title", "bar")))
+ .build();
+
+ context.parsedQuery(new ParsedQuery(q));
+ context.setSize(3);
+ context.trackTotalHitsUpTo(3);
+ TopDocsCollectorContext topDocsContext = TopDocsCollectorContext.createTopDocsCollectorContext(context, false);
+ assertEquals(topDocsContext.create(null).scoreMode(), org.apache.lucene.search.ScoreMode.COMPLETE);
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertEquals(numDocs / 2, context.queryResult().topDocs().topDocs.totalHits.value);
+ assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation, TotalHits.Relation.EQUAL_TO);
+ assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(3));
+ assertProfileData(context, "SpanNearQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), empty());
+ });
+
+ context.sort(new SortAndFormats(new Sort(new SortField("other", SortField.Type.INT)), new DocValueFormat[] { DocValueFormat.RAW }));
+ topDocsContext = TopDocsCollectorContext.createTopDocsCollectorContext(context, false);
+ assertEquals(topDocsContext.create(null).scoreMode(), org.apache.lucene.search.ScoreMode.TOP_DOCS);
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertEquals(numDocs / 2, context.queryResult().topDocs().topDocs.totalHits.value);
+ assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(3));
+ assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO);
+ assertProfileData(context, "SpanNearQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), empty());
+ });
+
+ reader.close();
+ dir.close();
+ }
+
+ public void testMinScore() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriterConfig iwc = newIndexWriterConfig();
+ RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+ for (int i = 0; i < 10; i++) {
+ Document doc = new Document();
+ doc.add(new StringField("foo", "bar", Store.NO));
+ doc.add(new StringField("filter", "f1", Store.NO));
+ w.addDocument(doc);
+ }
+ w.close();
+
+ IndexReader reader = DirectoryReader.open(dir);
+ TestSearchContext context = new TestSearchContext(null, indexShard, newContextSearcher(reader));
+ context.parsedQuery(
+ new ParsedQuery(
+ new BooleanQuery.Builder().add(new TermQuery(new Term("foo", "bar")), Occur.MUST)
+ .add(new TermQuery(new Term("filter", "f1")), Occur.SHOULD)
+ .build()
+ )
+ );
+ context.minimumScore(0.01f);
+ context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
+ context.setSize(1);
+ context.trackTotalHitsUpTo(5);
+
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertEquals(10, context.queryResult().topDocs().topDocs.totalHits.value);
+ assertProfileData(context, "BooleanQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), equalTo(10L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+
+ assertThat(query.getProfiledChildren(), hasSize(2));
+ assertThat(query.getProfiledChildren().get(0).getQueryName(), equalTo("TermQuery"));
+ assertThat(query.getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+
+ assertThat(query.getProfiledChildren().get(1).getQueryName(), equalTo("TermQuery"));
+ assertThat(query.getProfiledChildren().get(1).getTime(), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_min_score"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), hasSize(1));
+ assertThat(collector.getProfiledChildren().get(0).getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ });
+
+ reader.close();
+ dir.close();
+ }
+
+ public void testMaxScore() throws Exception {
+ Directory dir = newDirectory();
+ final Sort sort = new Sort(new SortField("filter", SortField.Type.STRING));
+ IndexWriterConfig iwc = newIndexWriterConfig().setIndexSort(sort);
+ RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+
+ final int numDocs = scaledRandomIntBetween(600, 900);
+ for (int i = 0; i < numDocs; i++) {
+ Document doc = new Document();
+ doc.add(new StringField("foo", "bar", Store.NO));
+ doc.add(new StringField("filter", "f1" + ((i > 0) ? " " + Integer.toString(i) : ""), Store.NO));
+ doc.add(new SortedDocValuesField("filter", newBytesRef("f1" + ((i > 0) ? " " + Integer.toString(i) : ""))));
+ w.addDocument(doc);
+ }
+ w.close();
+
+ IndexReader reader = DirectoryReader.open(dir);
+ TestSearchContext context = new TestSearchContext(null, indexShard, newContextSearcher(reader));
+ context.trackScores(true);
+ context.parsedQuery(
+ new ParsedQuery(
+ new BooleanQuery.Builder().add(new TermQuery(new Term("foo", "bar")), Occur.MUST)
+ .add(new TermQuery(new Term("filter", "f1")), Occur.SHOULD)
+ .build()
+ )
+ );
+ context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
+ context.setSize(1);
+ context.trackTotalHitsUpTo(5);
+
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertFalse(Float.isNaN(context.queryResult().getMaxScore()));
+ assertEquals(1, context.queryResult().topDocs().topDocs.scoreDocs.length);
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, greaterThanOrEqualTo(6L));
+ assertProfileData(context, "BooleanQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(6L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+
+ assertThat(query.getProfiledChildren(), hasSize(2));
+ assertThat(query.getProfiledChildren().get(0).getQueryName(), equalTo("TermQuery"));
+ assertThat(query.getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+
+ assertThat(query.getProfiledChildren().get(1).getQueryName(), equalTo("TermQuery"));
+ assertThat(query.getProfiledChildren().get(1).getTime(), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), empty());
+ });
+
+ context.sort(new SortAndFormats(sort, new DocValueFormat[] { DocValueFormat.RAW }));
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertFalse(Float.isNaN(context.queryResult().getMaxScore()));
+ assertEquals(1, context.queryResult().topDocs().topDocs.scoreDocs.length);
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, greaterThanOrEqualTo(6L));
+ assertProfileData(context, "BooleanQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(6L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+
+ assertThat(query.getProfiledChildren(), hasSize(2));
+ assertThat(query.getProfiledChildren().get(0).getQueryName(), equalTo("TermQuery"));
+ assertThat(query.getProfiledChildren().get(0).getTime(), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+
+ assertThat(query.getProfiledChildren().get(1).getQueryName(), equalTo("TermQuery"));
+ assertThat(query.getProfiledChildren().get(1).getTime(), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), empty());
+ });
+
+ reader.close();
+ dir.close();
+ }
+
+ public void testCollapseQuerySearchResults() throws Exception {
+ Directory dir = newDirectory();
+ final Sort sort = new Sort(new SortField("user", SortField.Type.INT));
+ IndexWriterConfig iwc = newIndexWriterConfig().setIndexSort(sort);
+ RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+
+ // Always end up with uneven buckets so collapsing is predictable
+ final int numDocs = 2 * scaledRandomIntBetween(600, 900) - 1;
+ for (int i = 0; i < numDocs; i++) {
+ Document doc = new Document();
+ doc.add(new StringField("foo", "bar", Store.NO));
+ doc.add(new NumericDocValuesField("user", i & 1));
+ w.addDocument(doc);
+ }
+ w.close();
+
+ IndexReader reader = DirectoryReader.open(dir);
+ QueryShardContext queryShardContext = mock(QueryShardContext.class);
+ when(queryShardContext.fieldMapper("user")).thenReturn(
+ new NumberFieldType("user", NumberType.INTEGER, true, false, true, false, null, Collections.emptyMap())
+ );
+
+ TestSearchContext context = new TestSearchContext(queryShardContext, indexShard, newContextSearcher(reader));
+ context.collapse(new CollapseBuilder("user").build(context.getQueryShardContext()));
+ context.trackScores(true);
+ context.parsedQuery(new ParsedQuery(new TermQuery(new Term("foo", "bar"))));
+ context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
+ context.setSize(2);
+ context.trackTotalHitsUpTo(5);
+
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertFalse(Float.isNaN(context.queryResult().getMaxScore()));
+ assertEquals(2, context.queryResult().topDocs().topDocs.scoreDocs.length);
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs));
+ assertThat(context.queryResult().topDocs().topDocs, instanceOf(CollapseTopFieldDocs.class));
+
+ assertProfileData(context, "TermQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(6L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ assertThat(query.getProfiledChildren(), empty());
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), empty());
+ });
+
+ context.sort(new SortAndFormats(sort, new DocValueFormat[] { DocValueFormat.RAW }));
+ QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers());
+ assertFalse(Float.isNaN(context.queryResult().getMaxScore()));
+ assertEquals(2, context.queryResult().topDocs().topDocs.scoreDocs.length);
+ assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs));
+ assertThat(context.queryResult().topDocs().topDocs, instanceOf(CollapseTopFieldDocs.class));
+
+ assertProfileData(context, "TermQuery", query -> {
+ assertThat(query.getTimeBreakdown().keySet(), not(empty()));
+ assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(6L));
+ assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L));
+ assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L));
+ assertThat(query.getProfiledChildren(), empty());
+ }, collector -> {
+ assertThat(collector.getReason(), equalTo("search_top_hits"));
+ assertThat(collector.getTime(), greaterThan(0L));
+ assertThat(collector.getProfiledChildren(), empty());
+ });
+
+ reader.close();
+ dir.close();
+ }
+
+ private void assertProfileData(SearchContext context, String type, Consumer query, Consumer collector)
+ throws IOException {
+ assertProfileData(context, collector, (profileResult) -> {
+ assertThat(profileResult.getQueryName(), equalTo(type));
+ assertThat(profileResult.getTime(), greaterThan(0L));
+ query.accept(profileResult);
+ });
+ }
+
+ private void assertProfileData(SearchContext context, Consumer collector, Consumer query1)
+ throws IOException {
+ assertProfileData(context, Arrays.asList(query1), collector, false);
+ }
+
+ private void assertProfileData(
+ SearchContext context,
+ Consumer collector,
+ Consumer query1,
+ Consumer query2
+ ) throws IOException {
+ assertProfileData(context, Arrays.asList(query1, query2), collector, false);
+ }
+
+ private final void assertProfileData(
+ SearchContext context,
+ List> queries,
+ Consumer collector,
+ boolean debug
+ ) throws IOException {
+ assertThat(context.getProfilers(), not(nullValue()));
+
+ final ProfileShardResult result = SearchProfileShardResults.buildShardResults(context.getProfilers(), null);
+ if (debug) {
+ final SearchProfileShardResults results = new SearchProfileShardResults(
+ Collections.singletonMap(indexShard.shardId().toString(), result)
+ );
+
+ try (final XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint()) {
+ builder.startObject();
+ results.toXContent(builder, ToXContent.EMPTY_PARAMS);
+ builder.endObject();
+ builder.flush();
+
+ final OutputStream out = builder.getOutputStream();
+ assertThat(out, instanceOf(ByteArrayOutputStream.class));
+
+ logger.info(new String(((ByteArrayOutputStream) out).toByteArray(), StandardCharsets.UTF_8));
+ }
+ }
+
+ assertThat(result.getQueryProfileResults(), hasSize(1));
+
+ final QueryProfileShardResult queryProfileShardResult = result.getQueryProfileResults().get(0);
+ assertThat(queryProfileShardResult.getQueryResults(), hasSize(queries.size()));
+
+ for (int i = 0; i < queries.size(); ++i) {
+ queries.get(i).accept(queryProfileShardResult.getQueryResults().get(i));
+ }
+
+ collector.accept(queryProfileShardResult.getCollectorResult());
+ }
+
+ private static ContextIndexSearcher newContextSearcher(IndexReader reader) throws IOException {
+ return new ContextIndexSearcher(
+ reader,
+ IndexSearcher.getDefaultSimilarity(),
+ IndexSearcher.getDefaultQueryCache(),
+ IndexSearcher.getDefaultQueryCachingPolicy(),
+ true,
+ null
+ );
+ }
+
+ private static ContextIndexSearcher newEarlyTerminationContextSearcher(IndexReader reader, int size) throws IOException {
+ return new ContextIndexSearcher(
+ reader,
+ IndexSearcher.getDefaultSimilarity(),
+ IndexSearcher.getDefaultQueryCache(),
+ IndexSearcher.getDefaultQueryCachingPolicy(),
+ true,
+ null
+ ) {
+
+ @Override
+ public void search(List leaves, Weight weight, Collector collector) throws IOException {
+ final Collector in = new AssertingEarlyTerminationFilterCollector(collector, size);
+ super.search(leaves, weight, in);
+ }
+ };
+ }
+
+ private static class AssertingEarlyTerminationFilterCollector extends FilterCollector {
+ private final int size;
+
+ AssertingEarlyTerminationFilterCollector(Collector in, int size) {
+ super(in);
+ this.size = size;
+ }
+
+ @Override
+ public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
+ final LeafCollector in = super.getLeafCollector(context);
+ return new FilterLeafCollector(in) {
+ int collected;
+
+ @Override
+ public void collect(int doc) throws IOException {
+ assert collected <= size : "should not collect more than " + size + " doc per segment, got " + collected;
+ ++collected;
+ super.collect(doc);
+ }
+ };
+ }
+ }
+}
diff --git a/server/src/test/java/org/opensearch/tasks/CancelTasksResponseTests.java b/server/src/test/java/org/opensearch/tasks/CancelTasksResponseTests.java
index 64d2979c2c5a0..c0ec4ca3d31fd 100644
--- a/server/src/test/java/org/opensearch/tasks/CancelTasksResponseTests.java
+++ b/server/src/test/java/org/opensearch/tasks/CancelTasksResponseTests.java
@@ -62,7 +62,7 @@ protected CancelTasksResponse createTestInstance() {
private static List randomTasks() {
List randomTasks = new ArrayList<>();
for (int i = 0; i < randomInt(10); i++) {
- randomTasks.add(TaskInfoTests.randomTaskInfo());
+ randomTasks.add(TaskInfoTests.randomTaskInfo(false));
}
return randomTasks;
}
diff --git a/server/src/test/java/org/opensearch/tasks/ListTasksResponseTests.java b/server/src/test/java/org/opensearch/tasks/ListTasksResponseTests.java
index 4d5feb46de1d0..0201509d03a2b 100644
--- a/server/src/test/java/org/opensearch/tasks/ListTasksResponseTests.java
+++ b/server/src/test/java/org/opensearch/tasks/ListTasksResponseTests.java
@@ -45,6 +45,7 @@
import java.net.ConnectException;
import java.util.ArrayList;
import java.util.Collections;
+import java.util.HashMap;
import java.util.List;
import java.util.function.Predicate;
import java.util.function.Supplier;
@@ -72,7 +73,12 @@ public void testNonEmptyToString() {
true,
false,
new TaskId("node1", 0),
- Collections.singletonMap("foo", "bar")
+ Collections.singletonMap("foo", "bar"),
+ new TaskResourceStats(new HashMap() {
+ {
+ put("dummy-type1", new TaskResourceUsage(100, 100));
+ }
+ })
);
ListTasksResponse tasksResponse = new ListTasksResponse(singletonList(info), emptyList(), emptyList());
assertEquals(
@@ -93,6 +99,12 @@ public void testNonEmptyToString() {
+ " \"parent_task_id\" : \"node1:0\",\n"
+ " \"headers\" : {\n"
+ " \"foo\" : \"bar\"\n"
+ + " },\n"
+ + " \"resource_stats\" : {\n"
+ + " \"dummy-type1\" : {\n"
+ + " \"cpu_time_in_nanos\" : 100,\n"
+ + " \"memory_in_bytes\" : 100\n"
+ + " }\n"
+ " }\n"
+ " }\n"
+ " ]\n"
@@ -127,8 +139,8 @@ protected boolean supportsUnknownFields() {
@Override
protected Predicate getRandomFieldsExcludeFilter() {
- // status and headers hold arbitrary content, we can't inject random fields in them
- return field -> field.endsWith("status") || field.endsWith("headers");
+ // status, headers and resource_stats hold arbitrary content, we can't inject random fields in them
+ return field -> field.endsWith("status") || field.endsWith("headers") || field.contains("resource_stats");
}
@Override
diff --git a/server/src/test/java/org/opensearch/tasks/TaskInfoTests.java b/server/src/test/java/org/opensearch/tasks/TaskInfoTests.java
index 89b690d81a4ea..7c8cb3230659b 100644
--- a/server/src/test/java/org/opensearch/tasks/TaskInfoTests.java
+++ b/server/src/test/java/org/opensearch/tasks/TaskInfoTests.java
@@ -77,13 +77,13 @@ protected boolean supportsUnknownFields() {
@Override
protected Predicate getRandomFieldsExcludeFilter() {
- // status and headers hold arbitrary content, we can't inject random fields in them
- return field -> "status".equals(field) || "headers".equals(field);
+ // status, headers and resource_stats hold arbitrary content, we can't inject random fields in them
+ return field -> "status".equals(field) || "headers".equals(field) || field.contains("resource_stats");
}
@Override
protected TaskInfo mutateInstance(TaskInfo info) {
- switch (between(0, 9)) {
+ switch (between(0, 10)) {
case 0:
TaskId taskId = new TaskId(info.getTaskId().getNodeId() + randomAlphaOfLength(5), info.getTaskId().getId());
return new TaskInfo(
@@ -97,7 +97,8 @@ protected TaskInfo mutateInstance(TaskInfo info) {
info.isCancellable(),
info.isCancelled(),
info.getParentTaskId(),
- info.getHeaders()
+ info.getHeaders(),
+ info.getResourceStats()
);
case 1:
return new TaskInfo(
@@ -111,7 +112,8 @@ protected TaskInfo mutateInstance(TaskInfo info) {
info.isCancellable(),
info.isCancelled(),
info.getParentTaskId(),
- info.getHeaders()
+ info.getHeaders(),
+ info.getResourceStats()
);
case 2:
return new TaskInfo(
@@ -125,7 +127,8 @@ protected TaskInfo mutateInstance(TaskInfo info) {
info.isCancellable(),
info.isCancelled(),
info.getParentTaskId(),
- info.getHeaders()
+ info.getHeaders(),
+ info.getResourceStats()
);
case 3:
return new TaskInfo(
@@ -139,7 +142,8 @@ protected TaskInfo mutateInstance(TaskInfo info) {
info.isCancellable(),
info.isCancelled(),
info.getParentTaskId(),
- info.getHeaders()
+ info.getHeaders(),
+ info.getResourceStats()
);
case 4:
Task.Status newStatus = randomValueOtherThan(info.getStatus(), TaskInfoTests::randomRawTaskStatus);
@@ -154,7 +158,8 @@ protected TaskInfo mutateInstance(TaskInfo info) {
info.isCancellable(),
info.isCancelled(),
info.getParentTaskId(),
- info.getHeaders()
+ info.getHeaders(),
+ info.getResourceStats()
);
case 5:
return new TaskInfo(
@@ -168,7 +173,8 @@ protected TaskInfo mutateInstance(TaskInfo info) {
info.isCancellable(),
info.isCancelled(),
info.getParentTaskId(),
- info.getHeaders()
+ info.getHeaders(),
+ info.getResourceStats()
);
case 6:
return new TaskInfo(
@@ -182,7 +188,8 @@ protected TaskInfo mutateInstance(TaskInfo info) {
info.isCancellable(),
info.isCancelled(),
info.getParentTaskId(),
- info.getHeaders()
+ info.getHeaders(),
+ info.getResourceStats()
);
case 7:
return new TaskInfo(
@@ -196,7 +203,8 @@ protected TaskInfo mutateInstance(TaskInfo info) {
info.isCancellable() == false,
false,
info.getParentTaskId(),
- info.getHeaders()
+ info.getHeaders(),
+ info.getResourceStats()
);
case 8:
TaskId parentId = new TaskId(info.getParentTaskId().getNodeId() + randomAlphaOfLength(5), info.getParentTaskId().getId());
@@ -211,7 +219,8 @@ protected TaskInfo mutateInstance(TaskInfo info) {
info.isCancellable(),
info.isCancelled(),
parentId,
- info.getHeaders()
+ info.getHeaders(),
+ info.getResourceStats()
);
case 9:
Map headers = info.getHeaders();
@@ -232,7 +241,30 @@ protected TaskInfo mutateInstance(TaskInfo info) {
info.isCancellable(),
info.isCancelled(),
info.getParentTaskId(),
- headers
+ headers,
+ info.getResourceStats()
+ );
+ case 10:
+ Map resourceUsageMap;
+ if (info.getResourceStats() == null) {
+ resourceUsageMap = new HashMap<>(1);
+ } else {
+ resourceUsageMap = new HashMap<>(info.getResourceStats().getResourceUsageInfo());
+ }
+ resourceUsageMap.put(randomAlphaOfLength(5), new TaskResourceUsage(randomNonNegativeLong(), randomNonNegativeLong()));
+ return new TaskInfo(
+ info.getTaskId(),
+ info.getType(),
+ info.getAction(),
+ info.getDescription(),
+ info.getStatus(),
+ info.getStartTime(),
+ info.getRunningTimeNanos(),
+ info.isCancellable(),
+ info.isCancelled(),
+ info.getParentTaskId(),
+ info.getHeaders(),
+ new TaskResourceStats(resourceUsageMap)
);
default:
throw new IllegalStateException();
@@ -240,11 +272,15 @@ protected TaskInfo mutateInstance(TaskInfo info) {
}
static TaskInfo randomTaskInfo() {
+ return randomTaskInfo(randomBoolean());
+ }
+
+ static TaskInfo randomTaskInfo(boolean detailed) {
TaskId taskId = randomTaskId();
String type = randomAlphaOfLength(5);
String action = randomAlphaOfLength(5);
- Task.Status status = randomBoolean() ? randomRawTaskStatus() : null;
- String description = randomBoolean() ? randomAlphaOfLength(5) : null;
+ Task.Status status = detailed ? randomRawTaskStatus() : null;
+ String description = detailed ? randomAlphaOfLength(5) : null;
long startTime = randomLong();
long runningTimeNanos = randomLong();
boolean cancellable = randomBoolean();
@@ -264,7 +300,8 @@ static TaskInfo randomTaskInfo() {
cancellable,
cancelled,
parentTaskId,
- headers
+ headers,
+ randomResourceStats(detailed)
);
}
@@ -285,4 +322,14 @@ private static RawTaskStatus randomRawTaskStatus() {
throw new IllegalStateException(e);
}
}
+
+ public static TaskResourceStats randomResourceStats(boolean detailed) {
+ return detailed ? new TaskResourceStats(new HashMap() {
+ {
+ for (int i = 0; i < randomInt(5); i++) {
+ put(randomAlphaOfLength(5), new TaskResourceUsage(randomNonNegativeLong(), randomNonNegativeLong()));
+ }
+ }
+ }) : null;
+ }
}
diff --git a/server/src/test/java/org/opensearch/transport/InboundDecoderTests.java b/server/src/test/java/org/opensearch/transport/InboundDecoderTests.java
index 9c78d039984e1..bcc7fe0ccac94 100644
--- a/server/src/test/java/org/opensearch/transport/InboundDecoderTests.java
+++ b/server/src/test/java/org/opensearch/transport/InboundDecoderTests.java
@@ -32,7 +32,6 @@
package org.opensearch.transport;
-import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.common.bytes.BytesReference;
import org.opensearch.common.bytes.ReleasableBytesReference;
@@ -135,63 +134,18 @@ public void testDecode() throws IOException {
assertEquals(InboundDecoder.END_CONTENT, endMarker);
}
- public void testDecodePreHeaderSizeVariableInt() throws IOException {
- // TODO: Can delete test on 9.0
- boolean isCompressed = randomBoolean();
- String action = "test-request";
- long requestId = randomNonNegativeLong();
- final Version preHeaderVariableInt = LegacyESVersion.V_7_5_0;
- final String contentValue = randomAlphaOfLength(100);
- final OutboundMessage message = new OutboundMessage.Request(
- threadContext,
- new String[0],
- new TestRequest(contentValue),
- preHeaderVariableInt,
- action,
- requestId,
- true,
- isCompressed
- );
-
- final BytesReference totalBytes = message.serialize(new BytesStreamOutput());
- int partialHeaderSize = TcpHeader.headerSize(preHeaderVariableInt);
-
- InboundDecoder decoder = new InboundDecoder(Version.CURRENT, PageCacheRecycler.NON_RECYCLING_INSTANCE);
- final ArrayList fragments = new ArrayList<>();
- final ReleasableBytesReference releasable1 = ReleasableBytesReference.wrap(totalBytes);
- int bytesConsumed = decoder.decode(releasable1, fragments::add);
- assertEquals(partialHeaderSize, bytesConsumed);
- assertEquals(1, releasable1.refCount());
-
- final Header header = (Header) fragments.get(0);
- assertEquals(requestId, header.getRequestId());
- assertEquals(preHeaderVariableInt, header.getVersion());
- assertEquals(isCompressed, header.isCompressed());
- assertTrue(header.isHandshake());
- assertTrue(header.isRequest());
- assertTrue(header.needsToReadVariableHeader());
- fragments.clear();
-
- final BytesReference bytes2 = totalBytes.slice(bytesConsumed, totalBytes.length() - bytesConsumed);
- final ReleasableBytesReference releasable2 = ReleasableBytesReference.wrap(bytes2);
- int bytesConsumed2 = decoder.decode(releasable2, fragments::add);
- assertEquals(2, fragments.size());
- assertEquals(InboundDecoder.END_CONTENT, fragments.get(fragments.size() - 1));
- assertEquals(totalBytes.length() - bytesConsumed, bytesConsumed2);
- }
-
public void testDecodeHandshakeCompatibility() throws IOException {
String action = "test-request";
long requestId = randomNonNegativeLong();
final String headerKey = randomAlphaOfLength(10);
final String headerValue = randomAlphaOfLength(20);
threadContext.putHeader(headerKey, headerValue);
- Version handshakeCompat = Version.CURRENT.minimumCompatibilityVersion().minimumCompatibilityVersion();
+ Version handshakeCompatVersion = Version.CURRENT.minimumCompatibilityVersion().minimumCompatibilityVersion();
OutboundMessage message = new OutboundMessage.Request(
threadContext,
new String[0],
new TestRequest(randomAlphaOfLength(100)),
- handshakeCompat,
+ handshakeCompatVersion,
action,
requestId,
true,
@@ -199,7 +153,10 @@ public void testDecodeHandshakeCompatibility() throws IOException {
);
final BytesReference bytes = message.serialize(new BytesStreamOutput());
- int totalHeaderSize = TcpHeader.headerSize(handshakeCompat);
+ int totalHeaderSize = TcpHeader.headerSize(handshakeCompatVersion);
+ if (handshakeCompatVersion.onOrAfter(TcpHeader.VERSION_WITH_HEADER_SIZE)) {
+ totalHeaderSize += bytes.getInt(TcpHeader.VARIABLE_HEADER_SIZE_POSITION);
+ }
InboundDecoder decoder = new InboundDecoder(Version.CURRENT, PageCacheRecycler.NON_RECYCLING_INSTANCE);
final ArrayList fragments = new ArrayList<>();
@@ -210,12 +167,10 @@ public void testDecodeHandshakeCompatibility() throws IOException {
final Header header = (Header) fragments.get(0);
assertEquals(requestId, header.getRequestId());
- assertEquals(handshakeCompat, header.getVersion());
+ assertEquals(handshakeCompatVersion, header.getVersion());
assertFalse(header.isCompressed());
assertTrue(header.isHandshake());
assertTrue(header.isRequest());
- // TODO: On 9.0 this will be true because all compatible versions with contain the variable header int
- assertTrue(header.needsToReadVariableHeader());
fragments.clear();
}
@@ -306,12 +261,12 @@ public void testCompressedDecodeHandshakeCompatibility() throws IOException {
final String headerKey = randomAlphaOfLength(10);
final String headerValue = randomAlphaOfLength(20);
threadContext.putHeader(headerKey, headerValue);
- Version handshakeCompat = Version.CURRENT.minimumCompatibilityVersion().minimumCompatibilityVersion();
+ Version handshakeCompatVersion = Version.CURRENT.minimumCompatibilityVersion().minimumCompatibilityVersion();
OutboundMessage message = new OutboundMessage.Request(
threadContext,
new String[0],
new TestRequest(randomAlphaOfLength(100)),
- handshakeCompat,
+ handshakeCompatVersion,
action,
requestId,
true,
@@ -319,7 +274,10 @@ public void testCompressedDecodeHandshakeCompatibility() throws IOException {
);
final BytesReference bytes = message.serialize(new BytesStreamOutput());
- int totalHeaderSize = TcpHeader.headerSize(handshakeCompat);
+ int totalHeaderSize = TcpHeader.headerSize(handshakeCompatVersion);
+ if (handshakeCompatVersion.onOrAfter(TcpHeader.VERSION_WITH_HEADER_SIZE)) {
+ totalHeaderSize += bytes.getInt(TcpHeader.VARIABLE_HEADER_SIZE_POSITION);
+ }
InboundDecoder decoder = new InboundDecoder(Version.CURRENT, PageCacheRecycler.NON_RECYCLING_INSTANCE);
final ArrayList fragments = new ArrayList<>();
@@ -330,12 +288,10 @@ public void testCompressedDecodeHandshakeCompatibility() throws IOException {
final Header header = (Header) fragments.get(0);
assertEquals(requestId, header.getRequestId());
- assertEquals(handshakeCompat, header.getVersion());
+ assertEquals(handshakeCompatVersion, header.getVersion());
assertTrue(header.isCompressed());
assertTrue(header.isHandshake());
assertTrue(header.isRequest());
- // TODO: On 9.0 this will be true because all compatible versions with contain the variable header int
- assertTrue(header.needsToReadVariableHeader());
fragments.clear();
}
@@ -372,25 +328,25 @@ public void testEnsureVersionCompatibility() throws IOException {
);
assertNull(ise);
- final Version version = Version.fromString("7.0.0");
- ise = InboundDecoder.ensureVersionCompatibility(Version.fromString("6.0.0"), version, true);
+ final Version version = Version.V_3_0_0;
+ ise = InboundDecoder.ensureVersionCompatibility(Version.V_2_0_0, version, true);
assertNull(ise);
- ise = InboundDecoder.ensureVersionCompatibility(Version.fromString("6.0.0"), version, false);
+ ise = InboundDecoder.ensureVersionCompatibility(Version.V_1_0_0, version, false);
assertEquals(
- "Received message from unsupported version: [6.0.0] minimal compatible version is: ["
+ "Received message from unsupported version: [1.0.0] minimal compatible version is: ["
+ version.minimumCompatibilityVersion()
+ "]",
ise.getMessage()
);
// For handshake we are compatible with N-2
- ise = InboundDecoder.ensureVersionCompatibility(Version.fromString("6.8.0"), version, true);
+ ise = InboundDecoder.ensureVersionCompatibility(Version.fromString("2.1.0"), version, true);
assertNull(ise);
- ise = InboundDecoder.ensureVersionCompatibility(Version.fromString("5.6.0"), version, false);
+ ise = InboundDecoder.ensureVersionCompatibility(Version.fromString("1.3.0"), version, false);
assertEquals(
- "Received message from unsupported version: [5.6.0] minimal compatible version is: ["
+ "Received message from unsupported version: [1.3.0] minimal compatible version is: ["
+ version.minimumCompatibilityVersion()
+ "]",
ise.getMessage()
diff --git a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java
index f12a4579a2d0c..4879425b7bcd6 100644
--- a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java
+++ b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java
@@ -150,12 +150,16 @@ public void handle(final HttpExchange exchange) throws IOException {
}
final int start = Integer.parseInt(matcher.group(1));
- final int length = Integer.parseInt(matcher.group(2)) - start + 1;
+ final int end = Integer.parseInt(matcher.group(2));
+ final int length = Math.min(end - start + 1, blob.length());
exchange.getResponseHeaders().add("Content-Type", "application/octet-stream");
exchange.getResponseHeaders().add("Content-Length", String.valueOf(length));
exchange.getResponseHeaders().add("x-ms-blob-type", "blockblob");
exchange.getResponseHeaders().add("x-ms-request-server-encrypted", "false");
+ exchange.getResponseHeaders()
+ .add("Content-Range", "bytes " + start + "-" + Math.min(end, length) + "/" + blob.length());
+
exchange.sendResponseHeaders(RestStatus.OK.getStatus(), length);
exchange.getResponseBody().write(blob.toBytesRef().bytes, start, length);
diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle
index d1040acd03aa7..c56cc6d196b63 100644
--- a/test/fixtures/hdfs-fixture/build.gradle
+++ b/test/fixtures/hdfs-fixture/build.gradle
@@ -41,6 +41,6 @@ dependencies {
api 'com.google.code.gson:gson:2.9.0'
api "org.bouncycastle:bcpkix-jdk15on:${versions.bouncycastle}"
api "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:${versions.jackson}"
- api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}"
+ api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}"
api 'net.minidev:json-smart:2.4.8'
}
diff --git a/test/framework/build.gradle b/test/framework/build.gradle
index 42197bf5e2980..096e8c1e58243 100644
--- a/test/framework/build.gradle
+++ b/test/framework/build.gradle
@@ -71,7 +71,6 @@ thirdPartyAudit.ignoreMissingClasses(
'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy',
'org.apache.log.Logger',
- 'org.apache.log4j.Category',
'org.apache.log4j.Level',
'org.apache.log4j.Logger',
'org.apache.log4j.Priority',
diff --git a/test/framework/src/main/java/org/opensearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/opensearch/cluster/coordination/AbstractCoordinatorTestCase.java
index 9841daa5f81b7..6617102c12ffc 100644
--- a/test/framework/src/main/java/org/opensearch/cluster/coordination/AbstractCoordinatorTestCase.java
+++ b/test/framework/src/main/java/org/opensearch/cluster/coordination/AbstractCoordinatorTestCase.java
@@ -321,7 +321,7 @@ class Cluster implements Releasable {
);
logger.info(
- "--> creating cluster of {} nodes (master-eligible nodes: {}) with initial configuration {}",
+ "--> creating cluster of {} nodes (cluster-manager-eligible nodes: {}) with initial configuration {}",
initialNodeCount,
masterEligibleNodeIds,
initialConfiguration
diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java
index 38a0253305833..832328cb0242f 100644
--- a/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java
+++ b/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java
@@ -334,7 +334,8 @@ public boolean shouldCache(Query query) {
indexSearcher.getSimilarity(),
queryCache,
queryCachingPolicy,
- false
+ false,
+ null
);
SearchContext searchContext = mock(SearchContext.class);
diff --git a/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java
index ae32db5fe6032..b29d6a26054ff 100644
--- a/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java
+++ b/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java
@@ -376,7 +376,7 @@ private static class ServiceHolder implements Closeable {
() -> { throw new AssertionError("node.name must be set"); }
);
PluginsService pluginsService;
- pluginsService = new PluginsService(nodeSettings, null, env.modulesFile(), env.pluginsFile(), plugins);
+ pluginsService = new PluginsService(nodeSettings, null, env.modulesDir(), env.pluginsDir(), plugins);
client = (Client) Proxy.newProxyInstance(Client.class.getClassLoader(), new Class[] { Client.class }, clientInvocationHandler);
ScriptModule scriptModule = createScriptModule(pluginsService.filterPlugins(ScriptPlugin.class));
diff --git a/test/framework/src/main/java/org/opensearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/opensearch/test/InternalTestCluster.java
index 3a28ec2efdd4b..9e3a2c3564a00 100644
--- a/test/framework/src/main/java/org/opensearch/test/InternalTestCluster.java
+++ b/test/framework/src/main/java/org/opensearch/test/InternalTestCluster.java
@@ -1121,7 +1121,7 @@ private synchronized void reset(boolean wipeData) throws IOException {
}
assertTrue(
- "expected at least one master-eligible node left in " + nodes,
+ "expected at least one cluster-manager-eligible node left in " + nodes,
nodes.isEmpty() || nodes.values().stream().anyMatch(NodeAndClient::isMasterEligible)
);
@@ -1740,7 +1740,7 @@ private void rebuildUnicastHostFiles(List newNodes) {
.distinct()
.collect(Collectors.toList());
Set configPaths = Stream.concat(currentNodes.stream(), newNodes.stream())
- .map(nac -> nac.node.getEnvironment().configFile())
+ .map(nac -> nac.node.getEnvironment().configDir())
.collect(Collectors.toSet());
logger.debug("configuring discovery with {} at {}", discoveryFileContents, configPaths);
for (final Path configPath : configPaths) {
@@ -1848,7 +1848,8 @@ private void restartNode(NodeAndClient nodeAndClient, RestartCallback callback)
publishNode(nodeAndClient);
if (callback.validateClusterForming() || excludedNodeIds.isEmpty() == false) {
- // we have to validate cluster size to ensure that the restarted node has rejoined the cluster if it was master-eligible;
+ // we have to validate cluster size to ensure that the restarted node has rejoined the cluster if it was
+ // cluster-manager-eligible;
validateClusterFormed();
}
}
@@ -1999,7 +2000,7 @@ public synchronized Set nodesInclude(String index) {
/**
* Performs cluster bootstrap when node with index {@link #bootstrapMasterNodeIndex} is started
- * with the names of all existing and new master-eligible nodes.
+ * with the names of all existing and new cluster-manager-eligible nodes.
* Indexing starts from 0.
* If {@link #bootstrapMasterNodeIndex} is -1 (default), this method does nothing.
*/
diff --git a/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java b/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java
index 0e91332892a55..0b2235a0afedd 100644
--- a/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java
+++ b/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java
@@ -32,6 +32,7 @@
package org.opensearch.test;
import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.CollectorManager;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Query;
import org.opensearch.action.OriginalIndices;
@@ -70,6 +71,7 @@
import org.opensearch.search.internal.ShardSearchRequest;
import org.opensearch.search.profile.Profilers;
import org.opensearch.search.query.QuerySearchResult;
+import org.opensearch.search.query.ReduceableSearchResult;
import org.opensearch.search.rescore.RescoreContext;
import org.opensearch.search.sort.SortAndFormats;
import org.opensearch.search.suggest.SuggestionSearchContext;
@@ -90,7 +92,7 @@ public class TestSearchContext extends SearchContext {
final BigArrays bigArrays;
final IndexService indexService;
final BitsetFilterCache fixedBitSetFilterCache;
- final Map, Collector> queryCollectors = new HashMap<>();
+ final Map, CollectorManager extends Collector, ReduceableSearchResult>> queryCollectorManagers = new HashMap<>();
final IndexShard indexShard;
final QuerySearchResult queryResult = new QuerySearchResult();
final QueryShardContext queryShardContext;
@@ -110,7 +112,9 @@ public class TestSearchContext extends SearchContext {
private SearchContextAggregations aggregations;
private ScrollContext scrollContext;
private FieldDoc searchAfter;
- private final long originNanoTime = System.nanoTime();
+ private Profilers profilers;
+ private CollapseContext collapse;
+
private final Map searchExtBuilders = new HashMap<>();
public TestSearchContext(BigArrays bigArrays, IndexService indexService) {
@@ -405,12 +409,13 @@ public FieldDoc searchAfter() {
@Override
public SearchContext collapse(CollapseContext collapse) {
- return null;
+ this.collapse = collapse;
+ return this;
}
@Override
public CollapseContext collapse() {
- return null;
+ return collapse;
}
@Override
@@ -596,12 +601,12 @@ public long getRelativeTimeInMillis() {
@Override
public Profilers getProfilers() {
- return null; // no profiling
+ return profilers;
}
@Override
- public Map, Collector> queryCollectors() {
- return queryCollectors;
+ public Map, CollectorManager extends Collector, ReduceableSearchResult>> queryCollectorManagers() {
+ return queryCollectorManagers;
}
@Override
@@ -633,4 +638,21 @@ public void addRescore(RescoreContext rescore) {
public ReaderContext readerContext() {
throw new UnsupportedOperationException();
}
+
+ /**
+ * Clean the query results by consuming all of it
+ */
+ public TestSearchContext withCleanQueryResult() {
+ queryResult.consumeAll();
+ profilers = null;
+ return this;
+ }
+
+ /**
+ * Add profilers to the query
+ */
+ public TestSearchContext withProfilers() {
+ this.profilers = new Profilers(searcher);
+ return this;
+ }
}
diff --git a/test/framework/src/main/java/org/opensearch/test/VersionUtils.java b/test/framework/src/main/java/org/opensearch/test/VersionUtils.java
index 1e6807189f947..5989dfa7898fd 100644
--- a/test/framework/src/main/java/org/opensearch/test/VersionUtils.java
+++ b/test/framework/src/main/java/org/opensearch/test/VersionUtils.java
@@ -89,7 +89,7 @@ static Tuple, List> resolveReleasedVersions(Version curre
stableVersions = previousMajor;
// remove current
moveLastToUnreleased(currentMajor, unreleasedVersions);
- } else if (current.major != 1 && current.major != 2) {
+ } else if (current.major != 1) {
// on a stable or release branch, ie N.x
stableVersions = currentMajor;
// remove the next maintenance bugfix
@@ -108,9 +108,11 @@ static Tuple, List> resolveReleasedVersions(Version curre
List lastMinorLine = stableVersions.get(stableVersions.size() - 1);
if (lastMinorLine.get(lastMinorLine.size() - 1) instanceof LegacyESVersion == false) {
// if the last minor line is Legacy there are no more staged releases; do nothing
+ // otherwise the last minor line is (by definition) staged and unreleased
Version lastMinor = moveLastToUnreleased(stableVersions, unreleasedVersions);
+ // no more staged legacy bugfixes so skip;
if (lastMinor instanceof LegacyESVersion == false && lastMinor.revision == 0) {
- // no more staged legacy versions
+ // this is not a legacy version; remove the staged bugfix
if (stableVersions.get(stableVersions.size() - 1).size() == 1) {
// a minor is being staged, which is also unreleased
moveLastToUnreleased(stableVersions, unreleasedVersions);
@@ -210,11 +212,11 @@ public static List allLegacyVersions() {
}
/**
- * Get the released version before {@code version}.
+ * Get the version before {@code version}.
*/
public static Version getPreviousVersion(Version version) {
- for (int i = RELEASED_VERSIONS.size() - 1; i >= 0; i--) {
- Version v = RELEASED_VERSIONS.get(i);
+ for (int i = ALL_VERSIONS.size() - 1; i >= 0; i--) {
+ Version v = ALL_VERSIONS.get(i);
if (v.before(version)) {
return v;
}
@@ -223,7 +225,7 @@ public static Version getPreviousVersion(Version version) {
}
/**
- * Get the released version before {@link Version#CURRENT}.
+ * Get the version before {@link Version#CURRENT}.
*/
public static Version getPreviousVersion() {
Version version = getPreviousVersion(Version.CURRENT);
diff --git a/test/framework/src/main/java/org/opensearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/opensearch/test/junit/listeners/ReproduceInfoPrinter.java
index 668526d9d6d0d..3d5a906e50836 100644
--- a/test/framework/src/main/java/org/opensearch/test/junit/listeners/ReproduceInfoPrinter.java
+++ b/test/framework/src/main/java/org/opensearch/test/junit/listeners/ReproduceInfoPrinter.java
@@ -200,7 +200,7 @@ private ReproduceErrorMessageBuilder appendESProperties() {
public ReproduceErrorMessageBuilder appendClientYamlSuiteProperties() {
return appendProperties(
OpenSearchClientYamlSuiteTestCase.REST_TESTS_SUITE,
- OpenSearchClientYamlSuiteTestCase.REST_TESTS_BLACKLIST
+ OpenSearchClientYamlSuiteTestCase.REST_TESTS_DENYLIST
);
}
diff --git a/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java b/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java
index 27369e79e5dee..f976b3619102a 100644
--- a/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java
+++ b/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java
@@ -531,7 +531,8 @@ protected boolean waitForAllSnapshotsWiped() {
private void wipeCluster() throws Exception {
// Clean up SLM policies before trying to wipe snapshots so that no new ones get started by SLM after wiping
- if (nodeVersions.first().onOrAfter(LegacyESVersion.V_7_4_0)) { // SLM was introduced in version 7.4
+ if (nodeVersions.first().onOrAfter(LegacyESVersion.V_7_4_0) && nodeVersions.first().before(Version.V_1_0_0)) { // SLM was introduced
+ // in version 7.4
if (preserveSLMPoliciesUponCompletion() == false) {
// Clean up SLM policies before trying to wipe snapshots so that no new ones get started by SLM after wiping
deleteAllSLMPolicies();
@@ -823,7 +824,8 @@ protected String getProtocol() {
protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOException {
RestClientBuilder builder = RestClient.builder(hosts);
configureClient(builder, settings);
- builder.setStrictDeprecationMode(true);
+ // TODO: set the method argument to 'true' after PR https://github.com/opensearch-project/OpenSearch/pull/2683 merged.
+ builder.setStrictDeprecationMode(false);
return builder.build();
}
diff --git a/test/framework/src/main/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcher.java b/test/framework/src/main/java/org/opensearch/test/rest/yaml/DenylistedPathPatternMatcher.java
similarity index 97%
rename from test/framework/src/main/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcher.java
rename to test/framework/src/main/java/org/opensearch/test/rest/yaml/DenylistedPathPatternMatcher.java
index 15510e368b1f5..eeaa76b6ca1b3 100644
--- a/test/framework/src/main/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcher.java
+++ b/test/framework/src/main/java/org/opensearch/test/rest/yaml/DenylistedPathPatternMatcher.java
@@ -47,7 +47,7 @@
*
* Each denylist pattern is a suffix match on the path. Empty patterns are not allowed.
*/
-final class BlacklistedPathPatternMatcher {
+final class DenylistedPathPatternMatcher {
private final Pattern pattern;
/**
@@ -55,7 +55,7 @@ final class BlacklistedPathPatternMatcher {
*
* @param p The suffix pattern. Must be a non-empty string.
*/
- BlacklistedPathPatternMatcher(String p) {
+ DenylistedPathPatternMatcher(String p) {
// guard against accidentally matching everything as an empty string lead to the pattern ".*" which matches everything
if (p == null || p.trim().isEmpty()) {
throw new IllegalArgumentException("Empty denylist patterns are not supported");
diff --git a/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java
index 5a404ccd4b9fc..1b19f03f46174 100644
--- a/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java
+++ b/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java
@@ -89,14 +89,14 @@ public abstract class OpenSearchClientYamlSuiteTestCase extends OpenSearchRestTe
public static final String REST_TESTS_SUITE = "tests.rest.suite";
/**
* Property that allows to denylist some of the REST tests based on a comma separated list of globs
- * e.g. "-Dtests.rest.blacklist=get/10_basic/*"
+ * e.g. "-Dtests.rest.denylist=get/10_basic/*"
*/
- public static final String REST_TESTS_BLACKLIST = "tests.rest.blacklist";
+ public static final String REST_TESTS_DENYLIST = "tests.rest.denylist";
/**
- * We use tests.rest.blacklist in build files to denylist tests; this property enables a user to add additional denylisted tests on
+ * We use tests.rest.denylist in build files to denylist tests; this property enables a user to add additional denylisted tests on
* top of the tests denylisted in the build.
*/
- public static final String REST_TESTS_BLACKLIST_ADDITIONS = "tests.rest.blacklist_additions";
+ public static final String REST_TESTS_DENYLIST_ADDITIONS = "tests.rest.denylist_additions";
/**
* Property that allows to control whether spec validation is enabled or not (default true).
*/
@@ -116,7 +116,7 @@ public abstract class OpenSearchClientYamlSuiteTestCase extends OpenSearchRestTe
*/
private static final String PATHS_SEPARATOR = "(? denylistPathMatchers;
+ private static List denylistPathMatchers;
private static ClientYamlTestExecutionContext restTestExecutionContext;
private static ClientYamlTestExecutionContext adminExecutionContext;
private static ClientYamlTestClient clientYamlTestClient;
@@ -154,14 +154,14 @@ public void initAndResetContext() throws Exception {
clientYamlTestClient = initClientYamlTestClient(restSpec, client(), hosts, minVersion, masterVersion);
restTestExecutionContext = new ClientYamlTestExecutionContext(clientYamlTestClient, randomizeContentType());
adminExecutionContext = new ClientYamlTestExecutionContext(clientYamlTestClient, false);
- final String[] denylist = resolvePathsProperty(REST_TESTS_BLACKLIST, null);
+ final String[] denylist = resolvePathsProperty(REST_TESTS_DENYLIST, null);
denylistPathMatchers = new ArrayList<>();
for (final String entry : denylist) {
- denylistPathMatchers.add(new BlacklistedPathPatternMatcher(entry));
+ denylistPathMatchers.add(new DenylistedPathPatternMatcher(entry));
}
- final String[] denylistAdditions = resolvePathsProperty(REST_TESTS_BLACKLIST_ADDITIONS, null);
+ final String[] denylistAdditions = resolvePathsProperty(REST_TESTS_DENYLIST_ADDITIONS, null);
for (final String entry : denylistAdditions) {
- denylistPathMatchers.add(new BlacklistedPathPatternMatcher(entry));
+ denylistPathMatchers.add(new DenylistedPathPatternMatcher(entry));
}
}
assert restTestExecutionContext != null;
@@ -368,12 +368,9 @@ protected RequestOptions getCatNodesVersionMasterRequestOptions() {
public void test() throws IOException {
// skip test if it matches one of the denylist globs
- for (BlacklistedPathPatternMatcher denylistedPathMatcher : denylistPathMatchers) {
+ for (DenylistedPathPatternMatcher denylistedPathMatcher : denylistPathMatchers) {
String testPath = testCandidate.getSuitePath() + "/" + testCandidate.getTestSection().getName();
- assumeFalse(
- "[" + testCandidate.getTestPath() + "] skipped, reason: blacklisted",
- denylistedPathMatcher.isSuffixMatch(testPath)
- );
+ assumeFalse("[" + testCandidate.getTestPath() + "] skipped, reason: denylisted", denylistedPathMatcher.isSuffixMatch(testPath));
}
// skip test if the whole suite (yaml file) is disabled
diff --git a/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java
index ec88cd0201db5..e4b98124ea441 100644
--- a/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java
+++ b/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java
@@ -107,7 +107,6 @@
import static java.util.Collections.emptyMap;
import static java.util.Collections.emptySet;
-import static org.opensearch.transport.TransportHandshaker.V_3_0_0;
import static org.opensearch.transport.TransportService.NOOP_TRANSPORT_INTERCEPTOR;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
@@ -2227,11 +2226,7 @@ public void testHandshakeUpdatesVersion() throws IOException {
TransportRequestOptions.Type.STATE
);
try (Transport.Connection connection = serviceA.openConnection(node, builder.build())) {
- // OpenSearch [1.0:3.0) in bwc mode should only "upgrade" to Legacy v7.10.2
- assertEquals(
- connection.getVersion(),
- version.onOrAfter(Version.V_1_0_0) && version.before(V_3_0_0) ? LegacyESVersion.V_7_10_2 : version
- );
+ assertEquals(version, connection.getVersion());
}
}
}
@@ -2276,9 +2271,7 @@ public void testTcpHandshake() {
PlainActionFuture future = PlainActionFuture.newFuture();
serviceA.getOriginalTransport().openConnection(node, connectionProfile, future);
try (Transport.Connection connection = future.actionGet()) {
- // OpenSearch sends a handshake version spoofed as Legacy version 7_10_2
- // todo change for OpenSearch 3.0.0 when Legacy compatibility is removed
- assertEquals(LegacyESVersion.V_7_10_2, connection.getVersion());
+ assertEquals(connection.getVersion(), connection.getVersion());
}
}
}
@@ -2624,8 +2617,8 @@ public String executor() {
TransportStats transportStats = serviceC.transport.getStats(); // we did a single round-trip to do the initial handshake
assertEquals(1, transportStats.getRxCount());
assertEquals(1, transportStats.getTxCount());
- assertEquals(25, transportStats.getRxSize().getBytes());
- assertEquals(51, transportStats.getTxSize().getBytes());
+ assertEquals(29, transportStats.getRxSize().getBytes());
+ assertEquals(55, transportStats.getTxSize().getBytes());
});
serviceC.sendRequest(
connection,
@@ -2639,16 +2632,16 @@ public String executor() {
TransportStats transportStats = serviceC.transport.getStats(); // request has ben send
assertEquals(1, transportStats.getRxCount());
assertEquals(2, transportStats.getTxCount());
- assertEquals(25, transportStats.getRxSize().getBytes());
- assertEquals(111, transportStats.getTxSize().getBytes());
+ assertEquals(29, transportStats.getRxSize().getBytes());
+ assertEquals(115, transportStats.getTxSize().getBytes());
});
sendResponseLatch.countDown();
responseLatch.await();
stats = serviceC.transport.getStats(); // response has been received
assertEquals(2, stats.getRxCount());
assertEquals(2, stats.getTxCount());
- assertEquals(50, stats.getRxSize().getBytes());
- assertEquals(111, stats.getTxSize().getBytes());
+ assertEquals(54, stats.getRxSize().getBytes());
+ assertEquals(115, stats.getTxSize().getBytes());
} finally {
serviceC.close();
}
@@ -2745,8 +2738,8 @@ public String executor() {
TransportStats transportStats = serviceC.transport.getStats(); // request has been sent
assertEquals(1, transportStats.getRxCount());
assertEquals(1, transportStats.getTxCount());
- assertEquals(25, transportStats.getRxSize().getBytes());
- assertEquals(51, transportStats.getTxSize().getBytes());
+ assertEquals(29, transportStats.getRxSize().getBytes());
+ assertEquals(55, transportStats.getTxSize().getBytes());
});
serviceC.sendRequest(
connection,
@@ -2760,8 +2753,8 @@ public String executor() {
TransportStats transportStats = serviceC.transport.getStats(); // request has been sent
assertEquals(1, transportStats.getRxCount());
assertEquals(2, transportStats.getTxCount());
- assertEquals(25, transportStats.getRxSize().getBytes());
- assertEquals(111, transportStats.getTxSize().getBytes());
+ assertEquals(29, transportStats.getRxSize().getBytes());
+ assertEquals(115, transportStats.getTxSize().getBytes());
});
sendResponseLatch.countDown();
responseLatch.await();
@@ -2773,10 +2766,10 @@ public String executor() {
BytesStreamOutput streamOutput = new BytesStreamOutput();
exception.writeTo(streamOutput);
String failedMessage = "Unexpected read bytes size. The transport exception that was received=" + exception;
- // 53 bytes are the non-exception message bytes that have been received. It should include the initial
+ // 57 bytes are the non-exception message bytes that have been received. It should include the initial
// handshake message and the header, version, etc bytes in the exception message.
- assertEquals(failedMessage, 53 + streamOutput.bytes().length(), stats.getRxSize().getBytes());
- assertEquals(111, stats.getTxSize().getBytes());
+ assertEquals(failedMessage, 57 + streamOutput.bytes().length(), stats.getRxSize().getBytes());
+ assertEquals(115, stats.getTxSize().getBytes());
} finally {
serviceC.close();
}
diff --git a/test/framework/src/test/java/org/opensearch/test/VersionUtilsTests.java b/test/framework/src/test/java/org/opensearch/test/VersionUtilsTests.java
index 7d8fb4a318621..d007547ba0918 100644
--- a/test/framework/src/test/java/org/opensearch/test/VersionUtilsTests.java
+++ b/test/framework/src/test/java/org/opensearch/test/VersionUtilsTests.java
@@ -31,7 +31,6 @@
package org.opensearch.test;
-import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.common.Booleans;
import org.opensearch.common.collect.Tuple;
@@ -76,21 +75,21 @@ public void testRandomVersionBetween() {
assertTrue(got.onOrBefore(Version.CURRENT));
// sub range
- got = VersionUtils.randomVersionBetween(random(), LegacyESVersion.fromId(7000099), LegacyESVersion.fromId(7010099));
- assertTrue(got.onOrAfter(LegacyESVersion.fromId(7000099)));
- assertTrue(got.onOrBefore(LegacyESVersion.fromId(7010099)));
+ got = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_1_0);
+ assertTrue(got.onOrAfter(Version.V_2_0_0));
+ assertTrue(got.onOrBefore(Version.V_2_1_0));
// unbounded lower
- got = VersionUtils.randomVersionBetween(random(), null, LegacyESVersion.fromId(7000099));
+ got = VersionUtils.randomVersionBetween(random(), null, Version.V_2_0_0);
assertTrue(got.onOrAfter(VersionUtils.getFirstVersion()));
- assertTrue(got.onOrBefore(LegacyESVersion.fromId(7000099)));
+ assertTrue(got.onOrBefore(Version.V_2_0_0));
got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.allReleasedVersions().get(0));
assertTrue(got.onOrAfter(VersionUtils.getFirstVersion()));
assertTrue(got.onOrBefore(VersionUtils.allReleasedVersions().get(0)));
// unbounded upper
- got = VersionUtils.randomVersionBetween(random(), LegacyESVersion.fromId(7000099), null);
- assertTrue(got.onOrAfter(LegacyESVersion.fromId(7000099)));
+ got = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, null);
+ assertTrue(got.onOrAfter(Version.V_2_0_0));
assertTrue(got.onOrBefore(Version.CURRENT));
got = VersionUtils.randomVersionBetween(random(), VersionUtils.getPreviousVersion(), null);
assertTrue(got.onOrAfter(VersionUtils.getPreviousVersion()));
@@ -101,8 +100,8 @@ public void testRandomVersionBetween() {
assertEquals(got, VersionUtils.getFirstVersion());
got = VersionUtils.randomVersionBetween(random(), Version.CURRENT, Version.CURRENT);
assertEquals(got, Version.CURRENT);
- got = VersionUtils.randomVersionBetween(random(), LegacyESVersion.fromId(7000099), LegacyESVersion.fromId(7000099));
- assertEquals(got, LegacyESVersion.fromId(7000099));
+ got = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_0_0);
+ assertEquals(got, Version.V_2_0_0);
// implicit range of one
got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.getFirstVersion());
@@ -284,7 +283,6 @@ public void testResolveReleasedVersionsAtNewMinorBranchIn2x() {
Arrays.asList(
TestNewMinorBranchIn6x.V_1_6_0,
TestNewMinorBranchIn6x.V_1_6_1,
- TestNewMinorBranchIn6x.V_1_6_2,
TestNewMinorBranchIn6x.V_2_0_0,
TestNewMinorBranchIn6x.V_2_0_1,
TestNewMinorBranchIn6x.V_2_1_0,
@@ -292,7 +290,10 @@ public void testResolveReleasedVersionsAtNewMinorBranchIn2x() {
)
)
);
- assertThat(unreleased, equalTo(Arrays.asList(TestNewMinorBranchIn6x.V_2_1_2, TestNewMinorBranchIn6x.V_2_2_0)));
+ assertThat(
+ unreleased,
+ equalTo(Arrays.asList(TestNewMinorBranchIn6x.V_1_6_2, TestNewMinorBranchIn6x.V_2_1_2, TestNewMinorBranchIn6x.V_2_2_0))
+ );
}
/**
diff --git a/test/framework/src/test/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcherTests.java b/test/framework/src/test/java/org/opensearch/test/rest/yaml/DenylistedPathPatternMatcherTests.java
similarity index 93%
rename from test/framework/src/test/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcherTests.java
rename to test/framework/src/test/java/org/opensearch/test/rest/yaml/DenylistedPathPatternMatcherTests.java
index 05cdec242e565..3d62f399fe271 100644
--- a/test/framework/src/test/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcherTests.java
+++ b/test/framework/src/test/java/org/opensearch/test/rest/yaml/DenylistedPathPatternMatcherTests.java
@@ -33,7 +33,7 @@
import org.opensearch.test.OpenSearchTestCase;
-public class BlacklistedPathPatternMatcherTests extends OpenSearchTestCase {
+public class DenylistedPathPatternMatcherTests extends OpenSearchTestCase {
public void testMatchesExact() {
// suffix match
@@ -71,12 +71,12 @@ public void testMatchesMixedPatterns() {
}
private void assertMatch(String pattern, String path) {
- BlacklistedPathPatternMatcher matcher = new BlacklistedPathPatternMatcher(pattern);
+ DenylistedPathPatternMatcher matcher = new DenylistedPathPatternMatcher(pattern);
assertTrue("Pattern [" + pattern + "] should have matched path [" + path + "]", matcher.isSuffixMatch(path));
}
private void assertNoMatch(String pattern, String path) {
- BlacklistedPathPatternMatcher matcher = new BlacklistedPathPatternMatcher(pattern);
+ DenylistedPathPatternMatcher matcher = new DenylistedPathPatternMatcher(pattern);
assertFalse("Pattern [" + pattern + "] should not have matched path [" + path + "]", matcher.isSuffixMatch(path));
}
}
diff --git a/test/framework/src/test/java/org/opensearch/test/rest/yaml/section/SkipSectionTests.java b/test/framework/src/test/java/org/opensearch/test/rest/yaml/section/SkipSectionTests.java
index 2735128a4583d..5cdeeb70c1950 100644
--- a/test/framework/src/test/java/org/opensearch/test/rest/yaml/section/SkipSectionTests.java
+++ b/test/framework/src/test/java/org/opensearch/test/rest/yaml/section/SkipSectionTests.java
@@ -48,21 +48,21 @@
public class SkipSectionTests extends AbstractClientYamlTestFragmentParserTestCase {
public void testSkipMultiRange() {
- SkipSection section = new SkipSection("6.0.0 - 6.1.0, 7.1.0 - 7.5.0", Collections.emptyList(), "foobar");
+ SkipSection section = new SkipSection("1.0.0 - 1.1.0, 2.1.0 - 2.5.0", Collections.emptyList(), "foobar");
assertFalse(section.skip(Version.CURRENT));
- assertFalse(section.skip(Version.fromString("6.2.0")));
- assertFalse(section.skip(Version.fromString("7.0.0")));
- assertFalse(section.skip(Version.fromString("7.6.0")));
-
- assertTrue(section.skip(Version.fromString("6.0.0")));
- assertTrue(section.skip(Version.fromString("6.1.0")));
- assertTrue(section.skip(Version.fromString("7.1.0")));
- assertTrue(section.skip(Version.fromString("7.5.0")));
-
- section = new SkipSection("- 7.1.0, 7.2.0 - 7.5.0", Collections.emptyList(), "foobar");
- assertTrue(section.skip(Version.fromString("7.0.0")));
- assertTrue(section.skip(Version.fromString("7.3.0")));
+ assertFalse(section.skip(Version.fromString("1.2.0")));
+ assertFalse(section.skip(Version.fromString("2.0.0")));
+ assertFalse(section.skip(Version.fromString("2.6.0")));
+
+ assertTrue(section.skip(Version.fromString("1.0.0")));
+ assertTrue(section.skip(Version.fromString("1.1.0")));
+ assertTrue(section.skip(Version.fromString("2.1.0")));
+ assertTrue(section.skip(Version.fromString("2.5.0")));
+
+ section = new SkipSection("- 2.1.0, 2.2.0 - 2.5.0", Collections.emptyList(), "foobar");
+ assertTrue(section.skip(Version.fromString("2.0.0")));
+ assertTrue(section.skip(Version.fromString("2.3.0")));
}
public void testSkip() {
diff --git a/test/logger-usage/build.gradle b/test/logger-usage/build.gradle
index ecfccc9338410..3154e556a87cf 100644
--- a/test/logger-usage/build.gradle
+++ b/test/logger-usage/build.gradle
@@ -33,7 +33,7 @@ apply plugin: 'opensearch.java'
dependencies {
api 'org.ow2.asm:asm:9.2'
api 'org.ow2.asm:asm-tree:9.2'
- api 'org.ow2.asm:asm-analysis:9.2'
+ api 'org.ow2.asm:asm-analysis:9.3'
api "org.apache.logging.log4j:log4j-api:${versions.log4j}"
testImplementation project(":test:framework")
}
diff --git a/x-pack/plugin/core/licenses/commons-logging-1.2.jar.sha1 b/x-pack/plugin/core/licenses/commons-logging-1.2.jar.sha1
new file mode 100644
index 0000000000000..f40f0242448e8
--- /dev/null
+++ b/x-pack/plugin/core/licenses/commons-logging-1.2.jar.sha1
@@ -0,0 +1 @@
+4bfc12adfe4842bf07b657f0369c4cb522955686
\ No newline at end of file